Skip to content

Commit

Permalink
Bump Pytensor dependency
Browse files Browse the repository at this point in the history
  • Loading branch information
ricardoV94 authored and twiecki committed Apr 12, 2023
1 parent 5d68bf3 commit 2a324bc
Show file tree
Hide file tree
Showing 13 changed files with 69 additions and 69 deletions.
2 changes: 1 addition & 1 deletion conda-envs/environment-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ dependencies:
- numpy>=1.15.0
- pandas>=0.24.0
- pip
- pytensor=2.10.1
- pytensor>=2.11.0,<2.12
- python-graphviz
- networkx
- scipy>=1.4.1
Expand Down
2 changes: 1 addition & 1 deletion conda-envs/environment-docs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ dependencies:
- numpy>=1.15.0
- pandas>=0.24.0
- pip
- pytensor=2.9.1
- pytensor>=2.11.0,<2.12
- python-graphviz
- scipy>=1.4.1
- typing-extensions>=3.7.4
Expand Down
2 changes: 1 addition & 1 deletion conda-envs/environment-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ dependencies:
- numpy>=1.15.0
- pandas>=0.24.0
- pip
- pytensor=2.10.1
- pytensor>=2.11.0,<2.12
- python-graphviz
- networkx
- scipy>=1.4.1
Expand Down
2 changes: 1 addition & 1 deletion conda-envs/windows-environment-dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ dependencies:
- numpy>=1.15.0
- pandas>=0.24.0
- pip
- pytensor=2.10.1
- pytensor>=2.11.0,<2.12
- python-graphviz
- networkx
- scipy>=1.4.1
Expand Down
2 changes: 1 addition & 1 deletion conda-envs/windows-environment-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ dependencies:
- numpy>=1.15.0
- pandas>=0.24.0
- pip
- pytensor=2.10.1
- pytensor>=2.11.0,<2.12
- python-graphviz
- networkx
- scipy>=1.4.1
Expand Down
4 changes: 2 additions & 2 deletions pymc/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def get_tau_sigma(tau=None, sigma=None):
else:
if isinstance(sigma, Variable):
# Keep tau negative, if sigma was negative, so that it will fail when used
tau = (sigma**-2.0) * pt.sgn(sigma)
tau = (sigma**-2.0) * pt.sign(sigma)
else:
sigma_ = np.asarray(sigma)
if np.any(sigma_ <= 0):
Expand All @@ -248,7 +248,7 @@ def get_tau_sigma(tau=None, sigma=None):
else:
if isinstance(tau, Variable):
# Keep sigma negative, if tau was negative, so that it will fail when used
sigma = pt.abs(tau) ** (-0.5) * pt.sgn(tau)
sigma = pt.abs(tau) ** (-0.5) * pt.sign(tau)
else:
tau_ = np.asarray(tau)
if np.any(tau_ <= 0):
Expand Down
2 changes: 1 addition & 1 deletion pymc/distributions/dist_math.py
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ def impl(self, x):
def grad(self, inp, grads):
(x,) = inp
(gz,) = grads
return (gz * (i1e_scalar(x) - pytensor.scalar.sgn(x) * i0e_scalar(x)),)
return (gz * (i1e_scalar(x) - pytensor.scalar.sign(x) * i0e_scalar(x)),)


i0e_scalar = I0e(upgrade_to_float_no_complex, name="i0e")
Expand Down
4 changes: 2 additions & 2 deletions pymc/distributions/multivariate.py
Original file line number Diff line number Diff line change
Expand Up @@ -2059,8 +2059,8 @@ def make_node(self, rng, size, dtype, mu, W, alpha, tau):
sparse = isinstance(W, pytensor.sparse.SparseVariable)
msg = "W must be a symmetric adjacency matrix."
if sparse:
abs_diff = pytensor.sparse.basic.mul(pytensor.sparse.basic.sgn(W - W.T), W - W.T)
W = Assert(msg)(W, pt.isclose(pytensor.sparse.basic.sp_sum(abs_diff), 0))
abs_diff = pytensor.sparse.basic.mul(pytensor.sparse.sign(W - W.T), W - W.T)
W = Assert(msg)(W, pt.isclose(pytensor.sparse.sp_sum(abs_diff), 0))
else:
W = Assert(msg)(W, pt.allclose(W, W.T))

Expand Down
2 changes: 1 addition & 1 deletion pymc/logprob/transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ def find_measurable_transforms(fgraph: FunctionGraph, node: Node) -> Optional[Li
return None
try:
(power,) = other_inputs
power = pt.get_scalar_constant_value(power).item()
power = pt.get_underlying_scalar_constant_value(power).item()
# Power needs to be a constant
except NotScalarConstantError:
return None
Expand Down
2 changes: 1 addition & 1 deletion pymc/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1443,7 +1443,7 @@ def make_obs_var(
# values, and another for the non-missing values.

antimask_idx = (~mask).nonzero()
nonmissing_data = pt.as_tensor_variable(data[antimask_idx])
nonmissing_data = pt.as_tensor_variable(data[antimask_idx].data)
unmasked_rv_var = rv_var[antimask_idx]
unmasked_rv_var = unmasked_rv_var.owner.clone().default_output()

Expand Down
2 changes: 1 addition & 1 deletion requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ numpydoc
pandas>=0.24.0
polyagamma
pre-commit>=2.8.0
pytensor==2.10.1
pytensor>=2.11.0,<2.12
pytest-cov>=2.5
pytest>=3.0
scipy>=1.4.1
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ cloudpickle
fastprogress>=0.2.0
numpy>=1.15.0
pandas>=0.24.0
pytensor==2.10.1
pytensor>=2.11.0,<2.12
scipy>=1.4.1
typing-extensions>=3.7.4
110 changes: 55 additions & 55 deletions tests/test_printing.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,63 +132,63 @@ def setup_class(self):
self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)]
self.expected = {
("plain", True): [
r"alpha ~ N(0, 10)",
r"sigma ~ N**+(0, 1)",
r"alpha ~ Normal(0, 10)",
r"sigma ~ HalfNormal(0, 1)",
r"mu ~ Deterministic(f(beta, alpha))",
r"beta ~ N(0, 10)",
r"Z ~ N(f(), f())",
r"nb_with_p_n ~ NB(10, nbp)",
r"zip ~ MarginalMixture(f(), DiracDelta(0), Pois(5))",
r"w ~ Dir(<constant>)",
r"beta ~ Normal(0, 10)",
r"Z ~ MultivariateNormal(f(), f())",
r"nb_with_p_n ~ NegativeBinomial(10, nbp)",
r"zip ~ MarginalMixture(f(), DiracDelta(0), Poisson(5))",
r"w ~ Dirichlet(<constant>)",
(
r"nested_mix ~ MarginalMixture(w, "
r"MarginalMixture(f(), DiracDelta(0), Pois(5)), "
r"Censored(Bern(0.5), -1, 1))"
r"MarginalMixture(f(), DiracDelta(0), Poisson(5)), "
r"Censored(Bernoulli(0.5), -1, 1))"
),
r"Y_obs ~ N(mu, sigma)",
r"Y_obs ~ Normal(mu, sigma)",
r"pot ~ Potential(f(beta, alpha))",
],
("plain", False): [
r"alpha ~ N",
r"sigma ~ N**+",
r"alpha ~ Normal",
r"sigma ~ HalfNormal",
r"mu ~ Deterministic",
r"beta ~ N",
r"Z ~ N",
r"nb_with_p_n ~ NB",
r"beta ~ Normal",
r"Z ~ MultivariateNormal",
r"nb_with_p_n ~ NegativeBinomial",
r"zip ~ MarginalMixture",
r"w ~ Dir",
r"w ~ Dirichlet",
r"nested_mix ~ MarginalMixture",
r"Y_obs ~ N",
r"Y_obs ~ Normal",
r"pot ~ Potential",
],
("latex", True): [
r"$\text{alpha} \sim \operatorname{N}(0,~10)$",
r"$\text{sigma} \sim \operatorname{N^{+}}(0,~1)$",
r"$\text{alpha} \sim \operatorname{Normal}(0,~10)$",
r"$\text{sigma} \sim \operatorname{HalfNormal}(0,~1)$",
r"$\text{mu} \sim \operatorname{Deterministic}(f(\text{beta},~\text{alpha}))$",
r"$\text{beta} \sim \operatorname{N}(0,~10)$",
r"$\text{Z} \sim \operatorname{N}(f(),~f())$",
r"$\text{nb_with_p_n} \sim \operatorname{NB}(10,~\text{nbp})$",
r"$\text{zip} \sim \operatorname{MarginalMixture}(f(),~\operatorname{DiracDelta}(0),~\operatorname{Pois}(5))$",
r"$\text{w} \sim \operatorname{Dir}(\text{<constant>})$",
r"$\text{beta} \sim \operatorname{Normal}(0,~10)$",
r"$\text{Z} \sim \operatorname{MultivariateNormal}(f(),~f())$",
r"$\text{nb_with_p_n} \sim \operatorname{NegativeBinomial}(10,~\text{nbp})$",
r"$\text{zip} \sim \operatorname{MarginalMixture}(f(),~\operatorname{DiracDelta}(0),~\operatorname{Poisson}(5))$",
r"$\text{w} \sim \operatorname{Dirichlet}(\text{<constant>})$",
(
r"$\text{nested_mix} \sim \operatorname{MarginalMixture}(\text{w},"
r"~\operatorname{MarginalMixture}(f(),~\operatorname{DiracDelta}(0),~\operatorname{Pois}(5)),"
r"~\operatorname{Censored}(\operatorname{Bern}(0.5),~-1,~1))$"
r"~\operatorname{MarginalMixture}(f(),~\operatorname{DiracDelta}(0),~\operatorname{Poisson}(5)),"
r"~\operatorname{Censored}(\operatorname{Bernoulli}(0.5),~-1,~1))$"
),
r"$\text{Y_obs} \sim \operatorname{N}(\text{mu},~\text{sigma})$",
r"$\text{Y_obs} \sim \operatorname{Normal}(\text{mu},~\text{sigma})$",
r"$\text{pot} \sim \operatorname{Potential}(f(\text{beta},~\text{alpha}))$",
],
("latex", False): [
r"$\text{alpha} \sim \operatorname{N}$",
r"$\text{sigma} \sim \operatorname{N^{+}}$",
r"$\text{alpha} \sim \operatorname{Normal}$",
r"$\text{sigma} \sim \operatorname{HalfNormal}$",
r"$\text{mu} \sim \operatorname{Deterministic}$",
r"$\text{beta} \sim \operatorname{N}$",
r"$\text{Z} \sim \operatorname{N}$",
r"$\text{nb_with_p_n} \sim \operatorname{NB}$",
r"$\text{beta} \sim \operatorname{Normal}$",
r"$\text{Z} \sim \operatorname{MultivariateNormal}$",
r"$\text{nb_with_p_n} \sim \operatorname{NegativeBinomial}$",
r"$\text{zip} \sim \operatorname{MarginalMixture}$",
r"$\text{w} \sim \operatorname{Dir}$",
r"$\text{w} \sim \operatorname{Dirichlet}$",
r"$\text{nested_mix} \sim \operatorname{MarginalMixture}$",
r"$\text{Y_obs} \sim \operatorname{N}$",
r"$\text{Y_obs} \sim \operatorname{Normal}$",
r"$\text{pot} \sim \operatorname{Potential}$",
],
}
Expand All @@ -210,28 +210,28 @@ def setup_class(self):
self.formats = [("plain", True), ("plain", False), ("latex", True), ("latex", False)]
self.expected = {
("plain", True): [
r"a ~ N(2, 1)",
r"b ~ N(<shared>, 1)",
r"c ~ N(2, 1)",
r"d ~ N(<constant>, 1)",
r"a ~ Normal(2, 1)",
r"b ~ Normal(<shared>, 1)",
r"c ~ Normal(2, 1)",
r"d ~ Normal(<constant>, 1)",
],
("plain", False): [
r"a ~ N",
r"b ~ N",
r"c ~ N",
r"d ~ N",
r"a ~ Normal",
r"b ~ Normal",
r"c ~ Normal",
r"d ~ Normal",
],
("latex", True): [
r"$\text{a} \sim \operatorname{N}(2,~1)$",
r"$\text{b} \sim \operatorname{N}(\text{<shared>},~1)$",
r"$\text{c} \sim \operatorname{N}(2,~1)$",
r"$\text{d} \sim \operatorname{N}(\text{<constant>},~1)$",
r"$\text{a} \sim \operatorname{Normal}(2,~1)$",
r"$\text{b} \sim \operatorname{Normal}(\text{<shared>},~1)$",
r"$\text{c} \sim \operatorname{Normal}(2,~1)$",
r"$\text{d} \sim \operatorname{Normal}(\text{<constant>},~1)$",
],
("latex", False): [
r"$\text{a} \sim \operatorname{N}$",
r"$\text{b} \sim \operatorname{N}$",
r"$\text{c} \sim \operatorname{N}$",
r"$\text{d} \sim \operatorname{N}$",
r"$\text{a} \sim \operatorname{Normal}$",
r"$\text{b} \sim \operatorname{Normal}$",
r"$\text{c} \sim \operatorname{Normal}$",
r"$\text{d} \sim \operatorname{Normal}$",
],
}

Expand All @@ -249,9 +249,9 @@ def test_model_latex_repr_three_levels_model():
expected = [
"$$",
"\\begin{array}{rcl}",
"\\text{mu} &\\sim & \\operatorname{N}(0,~5)\\\\\\text{sigma} &\\sim & "
"\\operatorname{C^{+}}(0,~2.5)\\\\\\text{censored_normal} &\\sim & "
"\\operatorname{Censored}(\\operatorname{N}(\\text{mu},~\\text{sigma}),~-2,~2)",
"\\text{mu} &\\sim & \\operatorname{Normal}(0,~5)\\\\\\text{sigma} &\\sim & "
"\\operatorname{HalfCauchy}(0,~2.5)\\\\\\text{censored_normal} &\\sim & "
"\\operatorname{Censored}(\\operatorname{Normal}(\\text{mu},~\\text{sigma}),~-2,~2)",
"\\end{array}",
"$$",
]
Expand All @@ -268,8 +268,8 @@ def test_model_latex_repr_mixture_model():
"$$",
"\\begin{array}{rcl}",
"\\text{w} &\\sim & "
"\\operatorname{Dir}(\\text{<constant>})\\\\\\text{mix} &\\sim & "
"\\operatorname{MarginalMixture}(\\text{w},~\\operatorname{N}(0,~5),~\\operatorname{StudentT}(7,~0,~1))",
"\\operatorname{Dirichlet}(\\text{<constant>})\\\\\\text{mix} &\\sim & "
"\\operatorname{MarginalMixture}(\\text{w},~\\operatorname{Normal}(0,~5),~\\operatorname{StudentT}(7,~0,~1))",
"\\end{array}",
"$$",
]
Expand Down

0 comments on commit 2a324bc

Please sign in to comment.