Skip to content

Commit 659f177

Browse files
replacing pytensor-devs for aesara-devs (#6817)
* replacing pytensor-devs for aesara-devs * fix typo * Update docs/source/learn/core_notebooks/pymc_pytensor.ipynb Change pytensor hypterlink to "pymc-devs/pytensor" Co-authored-by: larryshamalama <[email protected]> --------- Co-authored-by: larryshamalama <[email protected]>
1 parent f67ff8b commit 659f177

File tree

6 files changed

+76
-11
lines changed

6 files changed

+76
-11
lines changed

docs/source/learn/core_notebooks/pymc_pytensor.ipynb

Lines changed: 71 additions & 6 deletions
Large diffs are not rendered by default.

pymc/distributions/shape_utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -407,7 +407,7 @@ def change_specify_shape_size(op, ss, new_size, expand) -> TensorVariable:
407407
if ndim_supp > 0:
408408
new_shapes[-ndim_supp:] = shapes[-ndim_supp:]
409409

410-
# specify_shape has a wrong signature https://github.com/pytensor-devs/pytensor/issues/1164
410+
# specify_shape has a wrong signature https://github.com/aesara-devs/aesara/issues/1164
411411
return pt.specify_shape(new_var, new_shapes) # type: ignore
412412

413413

pymc/logprob/rewriting.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -418,7 +418,7 @@ def construct_ir_fgraph(
418418
For instance, some `Op`s will be lifted through `MeasurableVariable`\s in
419419
this IR, and the resulting graphs will not be computationally sound,
420420
because they wouldn't produce independent samples when the original graph
421-
would. See https://github.com/pytensor-devs/aeppl/pull/78.
421+
would. See https://github.com/aesara-devs/aeppl/pull/78.
422422
423423
Returns
424424
-------

pymc/sampling/jax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def get_jaxified_graph(
115115
# JAX sequential optimizer without warnings. We made sure there
116116
# are no mutable input variables, so we only need to check for
117117
# "destroyers". This should be automatically handled by PyTensor
118-
# once https://github.com/pytensor-devs/pytensor/issues/637 is fixed.
118+
# once https://github.com/aesara-devs/aesara/issues/637 is fixed.
119119
fgraph.attach_feature(
120120
Supervisor(
121121
input

tests/distributions/test_distribution.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -701,7 +701,7 @@ def diracdelta_rng_fn(self, size, c):
701701

702702
@pytest.mark.parametrize("floatX", ["float32", "float64"])
703703
@pytest.mark.xfail(
704-
sys.platform == "win32", reason="https://github.com/pytensor-devs/pytensor/issues/871"
704+
sys.platform == "win32", reason="https://github.com/aesara-devs/aesara/issues/871"
705705
)
706706
def test_dtype(self, floatX):
707707
with pytensor.config.change_flags(floatX=floatX):

tests/sampling/test_jax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,7 +134,7 @@ def test_deterministic_samples(sampler):
134134

135135
def test_get_jaxified_graph():
136136
# Check that jaxifying a graph does not emit the Supervisor Warning. This test can
137-
# be removed once https://github.com/pytensor-devs/pytensor/issues/637 is sorted.
137+
# be removed once https://github.com/aesara-devs/aesara/issues/637 is sorted.
138138
x = pt.scalar("x")
139139
y = pt.exp(x)
140140
with warnings.catch_warnings():

0 commit comments

Comments
 (0)