@@ -36,8 +36,34 @@ class Mixture(Distribution):
36
36
w : array of floats
37
37
w >= 0 and w <= 1
38
38
the mixture weights
39
- comp_dists : multidimensional PyMC3 distribution or iterable of one-dimensional PyMC3 distributions
40
- the component distributions :math:`f_1, \ldots, f_n`
39
+ comp_dists : multidimensional PyMC3 distribution (e.g. `pm.Poisson.dist(...)`)
40
+ or iterable of one-dimensional PyMC3 distributions the
41
+ component distributions :math:`f_1, \ldots, f_n`
42
+
43
+ Example
44
+ -------
45
+ # 2-Mixture Poisson distribution
46
+ with pm.Model() as model:
47
+ lam = pm.Exponential('lam', lam=1, shape=(2,)) # `shape=(2,)` indicates two mixtures.
48
+
49
+ # As we just need the logp, rather than add a RV to the model, we need to call .dist()
50
+ components = pm.Poisson.dist(mu=lam, shape=(2,))
51
+
52
+ w = pm.Dirichlet('w', a=np.array([1, 1])) # two mixture component weights.
53
+
54
+ like = pm.Mixture('like', w=w, comp_dists=components, observed=data)
55
+
56
+ # 2-Mixture Poisson using iterable of distributions.
57
+ with pm.Model() as model:
58
+ lam1 = pm.Exponential('lam1', lam=1)
59
+ lam2 = pm.Exponential('lam2', lam=1)
60
+
61
+ pois1 = pm.Poisson.dist(mu=lam1)
62
+ pois2 = pm.Poisson.dist(mu=lam2)
63
+
64
+ w = pm.Dirichlet('w', a=np.array([1, 1]))
65
+
66
+ like = pm.Mixture('like', w=w, comp_dists = [pois1, pois2], observed=data)
41
67
"""
42
68
def __init__ (self , w , comp_dists , * args , ** kwargs ):
43
69
shape = kwargs .pop ('shape' , ())
@@ -165,7 +191,7 @@ class NormalMixture(Mixture):
165
191
the component standard deviations
166
192
tau : array of floats
167
193
the component precisions
168
-
194
+
169
195
Note: You only have to pass in sd or tau, but not both.
170
196
"""
171
197
def __init__ (self , w , mu , * args , ** kwargs ):
0 commit comments