Skip to content

Exponential scale default to 1.0 #7604

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Dec 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 5 additions & 6 deletions pymc/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -1373,13 +1373,12 @@ class Exponential(PositiveContinuous):
rv_op = exponential

@classmethod
def dist(cls, lam=None, scale=None, *args, **kwargs):
if lam is not None and scale is not None:
def dist(cls, lam=None, *, scale=None, **kwargs):
if lam is None and scale is None:
scale = 1.0
elif lam is not None and scale is not None:
raise ValueError("Incompatible parametrization. Can't specify both lam and scale.")
elif lam is None and scale is None:
raise ValueError("Incompatible parametrization. Must specify either lam or scale.")

if scale is None:
elif lam is not None:
scale = pt.reciprocal(lam)

scale = pt.as_tensor_variable(scale)
Expand Down
24 changes: 12 additions & 12 deletions pymc/step_methods/hmc/nuts.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,24 +242,24 @@ def competence(var, has_grad):

class _Tree:
__slots__ = (
"ndim",
"integrator",
"start",
"step_size",
"Emax",
"start_energy",
"rng",
"left",
"right",
"proposal",
"depth",
"log_size",
"floatX",
"integrator",
"left",
"log_accept_sum",
"log_size",
"max_energy_change",
"mean_tree_accept",
"n_proposals",
"ndim",
"p_sum",
"max_energy_change",
"floatX",
"proposal",
"right",
"rng",
"start",
"start_energy",
"step_size",
)

def __init__(
Expand Down
21 changes: 12 additions & 9 deletions tests/distributions/test_continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -461,15 +461,6 @@ def test_exponential(self):
lambda q, lam: st.expon.ppf(q, loc=0, scale=1 / lam),
)

def test_exponential_wrong_arguments(self):
msg = "Incompatible parametrization. Can't specify both lam and scale"
with pytest.raises(ValueError, match=msg):
pm.Exponential.dist(lam=0.5, scale=5)

msg = "Incompatible parametrization. Must specify either lam or scale"
with pytest.raises(ValueError, match=msg):
pm.Exponential.dist()

def test_laplace(self):
check_logp(
pm.Laplace,
Expand Down Expand Up @@ -2274,8 +2265,20 @@ class TestExponential(BaseTestDistributionRandom):
checks_to_run = [
"check_pymc_params_match_rv_op",
"check_pymc_draws_match_reference",
"check_both_lam_scale_raises",
"check_default_scale",
]

def check_both_lam_scale_raises(self):
msg = "Incompatible parametrization. Can't specify both lam and scale"
with pytest.raises(ValueError, match=msg):
pm.Exponential.dist(lam=0.5, scale=5)

def check_default_scale(self):
rv = self.pymc_dist.dist()
[scale] = rv.owner.op.dist_params(rv.owner)
assert scale.data == 1.0


class TestExponentialScale(BaseTestDistributionRandom):
pymc_dist = pm.Exponential
Expand Down
Loading