Skip to content

Commit aff3c24

Browse files
brettkoonceneerajprad
authored andcommitted
Some spelling corrections in doc strings (pyro-ppl#521)
1 parent 127b7cc commit aff3c24

File tree

9 files changed

+11
-11
lines changed

9 files changed

+11
-11
lines changed

pyro/distributions/dirichlet.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ def sample(self):
7575

7676
def batch_log_pdf(self, x):
7777
"""
78-
Evaluates log probabity density over one or a batch of samples.
78+
Evaluates log probability density over one or a batch of samples.
7979
8080
Each of alpha and x can be either a single value or a batch of values batched along dimension 0.
8181
If they are both batches, their batch sizes must agree.

pyro/distributions/distribution.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ def event_shape(self, x=None, *args, **kwargs):
118118
"""
119119
The right-hand tensor shape of samples, used for individual events. The
120120
event dimension(/s) is used to designate random variables that could
121-
potentially depend on each other, for instance in the case of dirichlet
121+
potentially depend on each other, for instance in the case of Dirichlet
122122
or the categorical distribution, but could also simply be used for logical
123123
grouping, for example in the case of a normal distribution with a
124124
diagonal covariance matrix.

pyro/distributions/poisson.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
class Poisson(Distribution):
1212
"""
13-
Poisson distribution over integers parameterizeds by scale `lambda`.
13+
Poisson distribution over integers parameterized by scale `lambda`.
1414
1515
This is often used in conjunction with `torch.nn.Softplus` to ensure the
1616
`lam` parameter is positive.

pyro/distributions/util.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def torch_eye(n, m=None, out=None):
9191
try:
9292
return torch.eye(n, m, out=out)
9393
except TypeError:
94-
# Only catch errors due to torch.eye() not being availble for cuda tensors.
94+
# Only catch errors due to torch.eye() not being available for cuda tensors.
9595
module = torch.Tensor.__module__ if out is None else type(out).__module__
9696
if module != 'torch.cuda':
9797
raise

pyro/infer/search.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def _traces(self, *args, **kwargs):
2929
is performing exact inference
3030
3131
:returns: Iterator of traces from the posterior.
32-
:rtype: Geneator[:class:`pyro.Trace`]
32+
:rtype: Generator[:class:`pyro.Trace`]
3333
"""
3434
# currently only using the standard library queue
3535
self.queue = Queue()

pyro/nn/auto_reg_nn.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,15 +47,15 @@ class AutoRegressiveNN(nn.Module):
4747
:type hidden_dim: int
4848
:param output_dim_multiplier: the dimensionality of the output is given by input_dim x output_dim_multiplier.
4949
specifically the shape of the output for a single vector input is [output_dim_multiplier, input_dim].
50-
for any i, j in range(0, output_dim_multiplier) the subset of outputs [i, :] has identifical
50+
for any i, j in range(0, output_dim_multiplier) the subset of outputs [i, :] has identical
5151
autoregressive structure to [j, :]. defaults to `1`
5252
:type output_dim_multiplier: int
5353
:param mask_encoding: a torch Tensor that controls the autoregressive structure (see reference). by default
5454
this is chosen at random.
5555
:type mask_encoding: torch.LongTensor
5656
:param permutation: an optional permutation that is applied to the inputs and controls the order of the
5757
autoregressive factorization. in particular for the identity permutation the autoregressive structure
58-
is such that the jacobian is upper triangular. by default this is chosen at random.
58+
is such that the Jacobian is upper triangular. by default this is chosen at random.
5959
:type permutation: torch.LongTensor
6060
"""
6161

pyro/poutine/lift_poutine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ def __init__(self, fn, prior):
2929

3030
def _prepare_site(self, msg):
3131
"""
32-
Sets flags of params that will be overriden so they are not
32+
Sets flags of params that will be overridden so they are not
3333
reexecuted in the stack and not added to the param store.
3434
"""
3535
name = msg["name"]

pyro/poutine/poutine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ def __exit__(self, exc_type, exc_value, traceback):
9898
They are all None unless the body of the with statement raised an exception.
9999
"""
100100
if exc_type is None: # callee or enclosed block returned successfully
101-
# if the callee or enclosed block returned successfuly,
101+
# if the callee or enclosed block returned successfully,
102102
# this poutine should be on the bottom of the stack.
103103
# If so, remove it from the stack.
104104
# if not, raise a ValueError because something really weird happened.

pyro/util.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -223,7 +223,7 @@ def enum_extend(trace, msg, num_samples=None):
223223
inside_iarange = any(frame.vectorized for frame in msg["map_data_stack"])
224224
if is_batched and not inside_iarange:
225225
raise ValueError(
226-
"Tried to enumerate a batched pyro.sample site '{}' outiside of a pyro.iarange. "
226+
"Tried to enumerate a batched pyro.sample site '{}' outside of a pyro.iarange. "
227227
"To fix, either enclose in a pyro.iarange, or avoid batching.".format(msg["name"]))
228228

229229
extended_traces = []
@@ -389,6 +389,6 @@ def check_model_guide_match(model_trace, guide_trace):
389389
def deep_getattr(obj, name):
390390
"""
391391
Python getattr() for arbitrarily deep attributes
392-
Throws an AttirbuteError if bad attribute
392+
Throws an AttributeError if bad attribute
393393
"""
394394
return functools.reduce(getattr, name.split("."), obj)

0 commit comments

Comments
 (0)