pm.sample function doesn't take 'njobs' as an argument
vineeshvs opened this issue · comments
vineeshvs commented
Hi,
Great tutorial.
The section where pm.sample is called looks like the following.
Context for the model
with pm.Model() as normal_model:
# The prior for the model parameters will be a normal distribution
family = pm.glm.families.Normal()
# Creating the model requires a formula and data (and optionally a family)
pm.GLM.from_formula(formula, data = X_train, family = family)
# Perform Markov Chain Monte Carlo sampling
normal_trace = pm.sample(draws=2000, chains = 2, tune = 500, njobs=-1)
It gives me an errors as given below
---------------------------------------------------------------------------
TypeError Traceback (most recent call last)
<ipython-input-108-f2e8efe4a0b2> in <module>
9
10 # Perform Markov Chain Monte Carlo sampling
---> 11 normal_trace = pm.sample(draws=2000, chains = 2, tune = 500, njobs=-1)
/usr/local/lib/python3.6/dist-packages/pymc3/sampling.py in sample(draws, step, init, n_init, start, trace, chain_idx, chains, cores, tune, progressbar, model, random_seed, discard_tuned_samples, compute_convergence_checks, **kwargs)
394 start_, step = init_nuts(init=init, chains=chains, n_init=n_init,
395 model=model, random_seed=random_seed,
--> 396 progressbar=progressbar, **kwargs)
397 if start is None:
398 start = start_
/usr/local/lib/python3.6/dist-packages/pymc3/sampling.py in init_nuts(init, chains, n_init, model, random_seed, progressbar, **kwargs)
1513 'Unknown initializer: {}.'.format(init))
1514
-> 1515 step = pm.NUTS(potential=potential, model=model, **kwargs)
1516
1517 return start, step
/usr/local/lib/python3.6/dist-packages/pymc3/step_methods/hmc/nuts.py in __init__(self, vars, max_treedepth, early_max_treedepth, **kwargs)
150 `pm.sample` to the desired number of tuning steps.
151 """
--> 152 super().__init__(vars, **kwargs)
153
154 self.max_treedepth = max_treedepth
/usr/local/lib/python3.6/dist-packages/pymc3/step_methods/hmc/base_hmc.py in __init__(self, vars, scaling, step_scale, is_cov, model, blocked, potential, dtype, Emax, target_accept, gamma, k, t0, adapt_step_size, step_rand, **theano_kwargs)
70 vars = inputvars(vars)
71
---> 72 super().__init__(vars, blocked=blocked, model=model, dtype=dtype, **theano_kwargs)
73
74 self.adapt_step_size = adapt_step_size
/usr/local/lib/python3.6/dist-packages/pymc3/step_methods/arraystep.py in __init__(self, vars, model, blocked, dtype, **theano_kwargs)
226
227 func = model.logp_dlogp_function(
--> 228 vars, dtype=dtype, **theano_kwargs)
229
230 # handle edge case discovered in #2948
/usr/local/lib/python3.6/dist-packages/pymc3/model.py in logp_dlogp_function(self, grad_vars, **kwargs)
721 varnames = [var.name for var in grad_vars]
722 extra_vars = [var for var in self.free_RVs if var.name not in varnames]
--> 723 return ValueGradFunction(self.logpt, grad_vars, extra_vars, **kwargs)
724
725 @property
/usr/local/lib/python3.6/dist-packages/pymc3/model.py in __init__(self, cost, grad_vars, extra_vars, dtype, casting, **kwargs)
460
461 self._theano_function = theano.function(
--> 462 inputs, [self._cost_joined, grad], givens=givens, **kwargs)
463
464 def set_extra_values(self, extra_vars):
TypeError: function() got an unexpected keyword argument 'njobs'