Skip to content

Bug Fix for 2909 #2979

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 18, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
38 changes: 19 additions & 19 deletions pymc3/distributions/continuous.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def __init__(self, lower=0, upper=1, transform='interval',

def random(self, point=None, size=None):
lower, upper = draw_values([self.lower, self.upper],
point=point)
point=point, size=size)
return generate_samples(stats.uniform.rvs, loc=lower,
scale=upper - lower,
dist_shape=self.shape,
Expand Down Expand Up @@ -748,7 +748,7 @@ def __init__(self, lam, *args, **kwargs):
assert_negative_support(lam, 'lam', 'Exponential')

def random(self, point=None, size=None):
lam = draw_values([self.lam], point=point)[0]
lam = draw_values([self.lam], point=point, size=size)[0]
return generate_samples(np.random.exponential, scale=1. / lam,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -817,7 +817,7 @@ def __init__(self, mu, b, *args, **kwargs):
assert_negative_support(b, 'b', 'Laplace')

def random(self, point=None, size=None):
mu, b = draw_values([self.mu, self.b], point=point)
mu, b = draw_values([self.mu, self.b], point=point, size=size)
return generate_samples(np.random.laplace, mu, b,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -921,7 +921,7 @@ def _random(self, mu, tau, size=None):
return np.exp(mu + (tau**-0.5) * samples)

def random(self, point=None, size=None):
mu, tau = draw_values([self.mu, self.tau], point=point)
mu, tau = draw_values([self.mu, self.tau], point=point, size=size)
return generate_samples(self._random, mu, tau,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1023,7 +1023,7 @@ def __init__(self, nu, mu=0, lam=None, sd=None, *args, **kwargs):

def random(self, point=None, size=None):
nu, mu, lam = draw_values([self.nu, self.mu, self.lam],
point=point)
point=point, size=size)
return generate_samples(stats.t.rvs, nu, loc=mu, scale=lam**-0.5,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1121,7 +1121,7 @@ def _random(self, alpha, m, size=None):

def random(self, point=None, size=None):
alpha, m = draw_values([self.alpha, self.m],
point=point)
point=point, size=size)
return generate_samples(self._random, alpha, m,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1202,7 +1202,7 @@ def _random(self, alpha, beta, size=None):

def random(self, point=None, size=None):
alpha, beta = draw_values([self.alpha, self.beta],
point=point)
point=point, size=size)
return generate_samples(self._random, alpha, beta,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1276,7 +1276,7 @@ def _random(self, beta, size=None):
return beta * np.abs(np.tan(np.pi * (u - 0.5)))

def random(self, point=None, size=None):
beta = draw_values([self.beta], point=point)[0]
beta = draw_values([self.beta], point=point, size=size)[0]
return generate_samples(self._random, beta,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1381,7 +1381,7 @@ def get_alpha_beta(self, alpha=None, beta=None, mu=None, sd=None):

def random(self, point=None, size=None):
alpha, beta = draw_values([self.alpha, self.beta],
point=point)
point=point, size=size)
return generate_samples(stats.gamma.rvs, alpha, scale=1. / beta,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1474,7 +1474,7 @@ def _calculate_mean(self):

def random(self, point=None, size=None):
alpha, beta = draw_values([self.alpha, self.beta],
point=point)
point=point, size=size)
return generate_samples(stats.invgamma.rvs, a=alpha, scale=beta,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -1610,7 +1610,7 @@ def __init__(self, alpha, beta, *args, **kwargs):

def random(self, point=None, size=None):
alpha, beta = draw_values([self.alpha, self.beta],
point=point)
point=point, size=size)

def _random(a, b, size=None):
return b * (-np.log(np.random.uniform(size=size)))**(1 / a)
Expand Down Expand Up @@ -1708,7 +1708,7 @@ def __init__(self, nu=1, sd=None, lam=None, *args, **kwargs):
assert_negative_support(nu, 'nu', 'HalfStudentT')

def random(self, point=None, size=None):
nu, sd = draw_values([self.nu, self.sd], point=point)
nu, sd = draw_values([self.nu, self.sd], point=point, size=size)
return np.abs(generate_samples(stats.t.rvs, nu, loc=0, scale=sd,
dist_shape=self.shape,
size=size))
Expand Down Expand Up @@ -1813,7 +1813,7 @@ def __init__(self, mu, sigma, nu, *args, **kwargs):

def random(self, point=None, size=None):
mu, sigma, nu = draw_values([self.mu, self.sigma, self.nu],
point=point)
point=point, size=size)

def _random(mu, sigma, nu, size=None):
return (np.random.normal(mu, sigma, size=size)
Expand Down Expand Up @@ -1905,7 +1905,7 @@ def __init__(self, mu=0.0, kappa=None, transform='circular',

def random(self, point=None, size=None):
mu, kappa = draw_values([self.mu, self.kappa],
point=point)
point=point, size=size)
return generate_samples(stats.vonmises.rvs, loc=mu, kappa=kappa,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -2002,7 +2002,7 @@ def __init__(self, mu=0.0, sd=None, tau=None, alpha=1, *args, **kwargs):

def random(self, point=None, size=None):
mu, tau, _, alpha = draw_values(
[self.mu, self.tau, self.sd, self.alpha], point=point)
[self.mu, self.tau, self.sd, self.alpha], point=point, size=size)
return generate_samples(stats.skewnorm.rvs,
a=alpha, loc=mu, scale=tau**-0.5,
dist_shape=self.shape,
Expand Down Expand Up @@ -2095,7 +2095,7 @@ def __init__(self, lower=0, upper=1, c=0.5,

def random(self, point=None, size=None):
c, lower, upper = draw_values([self.c, self.lower, self.upper],
point=point)
point=point, size=size)
return generate_samples(stats.triang.rvs, c=c-lower, loc=lower, scale=upper-lower,
size=size, dist_shape=self.shape, random_state=None)

Expand Down Expand Up @@ -2178,7 +2178,7 @@ def __init__(self, mu=0, beta=1.0, **kwargs):
super(Gumbel, self).__init__(**kwargs)

def random(self, point=None, size=None):
mu, sd = draw_values([self.mu, self.beta], point=point)
mu, sd = draw_values([self.mu, self.beta], point=point, size=size)
return generate_samples(stats.gumbel_r.rvs, loc=mu, scale=sd,
dist_shape=self.shape,
size=size)
Expand Down Expand Up @@ -2257,7 +2257,7 @@ def logp(self, value):
-(value - mu) / s - tt.log(s) - 2 * tt.log1p(tt.exp(-(value - mu) / s)), s > 0)

def random(self, point=None, size=None):
mu, s = draw_values([self.mu, self.s], point=point)
mu, s = draw_values([self.mu, self.s], point=point, size=size)

return generate_samples(
stats.logistic.rvs,
Expand Down Expand Up @@ -2333,7 +2333,7 @@ def __init__(self, mu=0, sd=None, tau=None, **kwargs):
super(LogitNormal, self).__init__(**kwargs)

def random(self, point=None, size=None):
mu, _, sd = draw_values([self.mu, self.tau, self.sd], point=point)
mu, _, sd = draw_values([self.mu, self.tau, self.sd], point=point, size=size)
return expit(generate_samples(stats.norm.rvs, loc=mu, scale=sd, dist_shape=self.shape,
size=size))

Expand Down
2 changes: 1 addition & 1 deletion pymc3/distributions/discrete.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def __init__(self, n, p, *args, **kwargs):
self.mode = tt.cast(tround(n * p), self.dtype)

def random(self, point=None, size=None):
n, p = draw_values([self.n, self.p], point=point)
n, p = draw_values([self.n, self.p], point=point, size=size)
return generate_samples(stats.binom.rvs, n=n, p=p,
dist_shape=self.shape,
size=size)
Expand Down
16 changes: 9 additions & 7 deletions pymc3/distributions/distribution.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,7 @@ def random(self, *args, **kwargs):



def draw_values(params, point=None):
def draw_values(params, point=None, size=None):
"""
Draw (fix) parameter values. Handles a number of cases:

Expand Down Expand Up @@ -251,10 +251,10 @@ def draw_values(params, point=None):
named_nodes_children[k] = nnc[k]
else:
named_nodes_children[k].update(nnc[k])

# Init givens and the stack of nodes to try to `_draw_value` from
givens = {}
stored = set([]) # Some nodes
stored = set([]) # Some nodes
stack = list(leaf_nodes.values()) # A queue would be more appropriate
while stack:
next_ = stack.pop(0)
Expand All @@ -269,7 +269,7 @@ def draw_values(params, point=None):
# we can skip it. Furthermore, if this node was treated as a
# TensorVariable that should be compiled by theano in
# _compile_theano_function, it would raise a `TypeError:
# ('Constants not allowed in param list', ...)` for
# ('Constants not allowed in param list', ...)` for
# TensorConstant, and a `TypeError: Cannot use a shared
# variable (...) as explicit input` for SharedVariable.
stored.add(next_.name)
Expand All @@ -285,7 +285,7 @@ def draw_values(params, point=None):
# have the random method
givens[next_.name] = (next_, _draw_value(next_,
point=point,
givens=temp_givens))
givens=temp_givens, size=size))
stored.add(next_.name)
except theano.gof.fg.MissingInputError:
# The node failed, so we must add the node's parents to
Expand All @@ -297,7 +297,7 @@ def draw_values(params, point=None):
node not in params])
values = []
for param in params:
values.append(_draw_value(param, point=point, givens=givens.values()))
values.append(_draw_value(param, point=point, givens=givens.values(), size=size))
return values


Expand Down Expand Up @@ -326,7 +326,7 @@ def _compile_theano_function(param, vars, givens=None):
allow_input_downcast=True)


def _draw_value(param, point=None, givens=None):
def _draw_value(param, point=None, givens=None, size=None):
"""Draw a random value from a distribution or return a constant.

Parameters
Expand All @@ -342,6 +342,8 @@ def _draw_value(param, point=None, givens=None):
givens : dict, optional
A dictionary from theano variables to their values. These values
are used to evaluate `param` if it is a theano variable.
size : int, optional
Number of samples
"""
if isinstance(param, numbers.Number):
return param
Expand Down
14 changes: 14 additions & 0 deletions pymc3/tests/test_distributions_random.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,20 @@ def test_random_sample_returns_nd_array(self):
assert isinstance(mu, np.ndarray)
assert isinstance(tau, np.ndarray)

def test_random_sample_returns_correctly(self):
# Based on what we discovered in #GH2909
with pm.Model():
a = pm.Uniform('a', lower=0, upper=1, shape=10)
b = pm.Binomial('b', n=1, p=a, shape=10)
array_of_uniform = a.random(size=10000).mean(axis=0)
array_of_binomial = b.random(size=10000).mean(axis=0)
npt.assert_allclose(array_of_uniform, [0.49886929, 0.49949713, 0.49946077, 0.49922606, 0.49927498, 0.50003914,
0.49980687, 0.50180495, 0.500905, 0.50035121], rtol=1e-2, atol=0)
npt.assert_allclose(array_of_binomial, [0.7232, 0.131 , 0.9457, 0.8279, 0.2911, 0.8686, 0.57 , 0.9184,
0.8177, 0.1625], rtol=1e-2, atol=0)
assert isinstance(array_of_binomial, np.ndarray)
assert isinstance(array_of_uniform, np.ndarray)


class BaseTestCases(object):
class BaseTestCase(SeededTest):
Expand Down