Skip to content
This repository has been archived by the owner on Nov 22, 2022. It is now read-only.

docs: Fix a few typos #1720

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ def from_config(cls, config: Config):
Args:
config (Config): Configuration object specifying all the
parameters of IntWeightedMultiCategoryEmbedding.
num_intput_features: Number of input features in forward.
num_input_features: Number of input features in forward.

Returns:
type: An instance of IntWeightedMultiCategoryEmbedding.
Expand Down Expand Up @@ -106,9 +106,9 @@ def __init__(
}
)

self.num_intput_features = len(feature_buckets)
self.num_input_features = len(feature_buckets)
input_dim = (
self.num_intput_features * embedding_dim
self.num_input_features * embedding_dim
if self.pooling_type == "none"
else embedding_dim
)
Expand All @@ -128,7 +128,7 @@ def get_output_dim(self):
return self.mlp_layer_dims[-1]

if self.pooling_type == "none":
return self.num_intput_features * self.embedding_dim
return self.num_input_features * self.embedding_dim
elif self.pooling_type == "mean":
return self.embedding_dim
elif self.pooling_type == "max":
Expand Down
4 changes: 2 additions & 2 deletions pytext/torchscript/batchutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ def make_batch_texts(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down Expand Up @@ -462,7 +462,7 @@ def make_batch_texts_dense(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down
10 changes: 5 additions & 5 deletions pytext/torchscript/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -427,7 +427,7 @@ def make_batch(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down Expand Up @@ -1053,7 +1053,7 @@ def make_batch(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down Expand Up @@ -1521,7 +1521,7 @@ def make_batch(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down Expand Up @@ -1933,7 +1933,7 @@ def make_batch(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down Expand Up @@ -2175,7 +2175,7 @@ def make_batch(

# TBD: allow model server to specify batch size in goals dictionary
# the right batchsize depends on the target architecture and should
# be passed via the goals config doctionary
# be passed via the goals config dictionary
max_bs = int(goals.get("batchsize", "4"))
len_mb = len(mega_batch)
num_batches = (len_mb + max_bs - 1) // max_bs
Expand Down
2 changes: 1 addition & 1 deletion pytext/torchscript/tokenizer/bpe.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,7 @@ def bpe_token(self, token: str) -> List[str]:
# We structure the vocabulary to not have ties, but they can come up anyway,
# for instance in cases with repeated tokens or when passing in vocabs not
# created with BPE.load_vocab. In the case of a tie between the value of
# joined segments, they'll be joined proiritizing the first pair in the
# joined segments, they'll be joined prioritizing the first pair in the
# token according to byte order, ie. left in LTR and right in RTL languages.
# For instance, if the vocab contains "aa" but not "aaa", then
# bpe_tokens("aaa") -> ["aa", "a"]. If the vocab contains "ab" and "bc"
Expand Down