# Input:
# N = Number of data vectors
# D = Dimensionality
# n_krnl = Number of kernels
# Prior
P = nds.Dirichlet(1e-5*np.ones(n_krnl), name='P')
# N n_krnl-dimensional cluster (for the data)
I = nds.Categorical(P, plates=(N,), name='I')
# n_krnl D-dimensional components means
if covariance == 'full':
# n_krnl D-dim component covariance
mu = nds.Gaussian(np.zeros(D), 1e-5*np.identity(D), plates = (n_krnl,), name = 'mu')
Lambda = nds.Wishart(D, 1e-5*np.identity(D), plates = (n_krnl,), name = 'Lambda')
Y = nds.Mixture(I, nds.Gaussian, mu, Lambda, plates = (N,), name = 'Y')
else:
print('diagonal')
# inverse variances
mu = nds.GaussianARD(np.zeros(D), 1e-5*np.identity(D), shape = (D,), plates = (n_krnl,), name = 'mu')
Lambda = nds.Gamma(1e-3, 1e-3, plates = (n_krnl, D), name = 'Lambda')
Y = nds.Mixture(I, nds.GaussianARD, mu, Lambda, plates = (N,), name = 'Y')
I.initialize_from_random()
return VB(Y, mu, Lambda, I, P)