?? mixture.py
字號:
# Create mixture of Gaussiansfrom numpy import *from scipy.stats.distributions import *import pylab as PLclass Mix: def __init__(self,w,mu,sigma): self.mu=mu # mixture means self._dim=size(mu) # now is univariate self.sigma=sigma # mixture variances self.k=size(w) # number of components self.w=w def dataGenerate(self,num): out=zeros(1) #out=zeros(num) for j in range(self.k): #out=out+self.w[j]*random.normal(self.mu[j],self.sigma[j],[1,num]) #out+=self.w[j]*norm.rvs(size=num,loc=self.mu[j],scale=self.sigma[j]) out=concatenate((out,norm.rvs(size=self.w[j]*num,loc=self.mu[j],scale=self.sigma[j]))) return out def logLikelihood(self,data): dsize=size(data) out=0 for i in range(dsize): norm=0 for j in range(self.k): norm+=self.normLikelihood(data[i],j) out+=log(norm) return out def normLikelihood(self,xn,j): err=xn-self.mu[j] #out=self.w[j]*(1/(sqrt(2*pi*sqrt(self.sigma[j])))) *exp(-0.5*(err**2.0)/self.sigma[j]) out=self.w[j]*norm.pdf(err,loc=0,scale=self.sigma[j]) return outclass EM: def __init__(self,data,mix): self._data=data self._dsize=size(data) self.mix=mix def e_step(self): posterior=zeros((self._dsize,self.mix.k)) for i in range(self._dsize): for j in range(self.mix.k): posterior[i,j]=self.mix.normLikelihood(self._data[i],j) posterior[i,:]/=sum(posterior[i,:]) return posterior def m_step(self,partial): mu_n=zeros(self.mix.k) sigma_n=zeros(self.mix.k) w_n=zeros(self.mix.k) nk=sum(partial,0) for j in range(self.mix.k): w_n[j]=nk[j]/self._dsize #mu_n[j]=sum(partial[:,j]*self._data) mu_n[j]=dot(partial[:,j],self._data) mu_n[j]/=nk[j] err=self._data-mu_n[j] #sigma_n[j]=sum(partial[:,j]*(err**2.0)) sigma_n[j]=dot(partial[:,j],(err**2.0)) sigma_n[j]/=nk[j] self.mix.mu=mu_n self.mix.sigma=sigma_n print('Variance : ',sigma_n) print('Weights : ',w_n) print('Mean : ',mu_n) self.mix.w=w_n return self.mix.logLikelihood(self._data) def run(self,iter): error=zeros(iter) for i in range(iter): error[i]=self.m_step(self.e_step()) print("Iteration : ",i,", Log-Likelihood : ",error[i]) return error # main # Gaussian mixture modelmu=array([1,10])sigma=array([1,1])w=array([.2,.8])mix_t=Mix(w,mu,sigma)# Generate data data=mix_t.dataGenerate(100)ll=mix_t.logLikelihood(data)print('Log likelihood Real Model: ',ll)# initial estimatemu_i=random.uniform(0,10,2)sigma_i=random.uniform(0,2,2)w_i=zeros(2)w_i[0]=random.uniform(0,1,1)w_i[1]=1-w_i[0]mix_i=Mix(array(w_i),array(mu_i),array(sigma_i))data2=mix_i.dataGenerate(100)ll2=mix_i.logLikelihood(data)print('Log likelihood Initial Model: ',ll2)# EM algorithmiter=10em=EM(data,mix_i)err=em.run(iter)print('True Weight : ',mix_t.w)print('True Mean : ',mix_t.mu)print('True Sigma : ',mix_t.sigma)print('Estimated Weight : ',em.mix.w)print('Estimated Mean : ',em.mix.mu)print('Estimated Sigma : ',em.mix.sigma)PL.hist(data)PL.show()
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -