亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? mbclust.m

?? 常用ROBUST STATISTICAL
?? M
字號:
function [bics,modelout,model,Z,clabs] = mbclust(data,maxclus);

% Model-based clustering - entire process
%
%   [BICS,BESTMODEL,ALLMODELS,Z,CLABS] = MBCLUST(DATA,MAXCLUS);
%
% This does the entire MB Clustering given a set of data.
% It only does the 9 basic models, unequal-unknown priors. It
% returns the BESTMODEL based on the highest BIC. 
%
% See the HELP on MBCFINMIX for information on the model types.
%
% The output variable BICS contains the values of the BIC for
% each model (row) and number of clusters (col). The output variable 
% CLABS contains the class labels for the input data according to the 
% optimal clustering given by BIC.
%
% The output variable Z contains the cluster structure from the 
% agglomerative model-based clustering. The matrix Z can be used 
% in the DENDROGRAM function or the RECTPLOT plotting function.
% 
% The output variable ALLMODELS is a structure containing all of the models.
% ALLMODElS(I) indicates the I-th model type (1-9) and CLUS(J) indicates
% the model for J clusters.
%
% The input variable MAXCLUS denotes the maximum number of clusters to
% check for.
%

%   Model-based Clustering Toolbox, January 2003
%   Revised June 2004 to add all 9 models to this.

% model(i) indicates the i-th model type.
% clus(i) indicates that there are i clusters in the model.

warning off

[n,d] = size(data);
bics = zeros(9,maxclus);     % Each row is a BIC for a model. Each col is a BIC for # clusters.

% Initialize the structure using all data points.
% This is the information for the one term/cluster model.
for i = 1:9
	model(i).clus(1).pies = 1;
	model(i).clus(1).mus = mean(data)';
	model(i).clus(1).vars = varupm(data,ones(n,1),1,mean(data)',i);
    bics(i,1) = bic(data,1,model(i).clus(1).mus,model(i).clus(1).vars,i);
end

if nargin == 3 
	disp('Getting the Adaptive Mixtures initial partition.')
	% Find an initial partition using AMDE.
	[pies,mus,vars,nterms] = amde(data,100);
	disp('Getting the agglomerative model based clustering structure')
	% Do the agglomerative model based clustering using mclust.
	Z = amdemclust(data,pies,mus,vars);
else
	disp('Getting the agglomerative model based clustering structure')
	Z = agmbclust(data);
end
	
% Based on the initialization of AMBC, get the models.

for m = 2:maxclus		% Loop over the different number of clusters.
	
	% m represents the number of clusters/terms in the model.
	
	% Find the cluster labels for the number of clusters.
	labs = cluster(Z,m);
	
    % Loop over the 4 different types of models.
    for i = 1:9 
        musin = zeros(d,m);     % each column is a term.     
        piesin = zeros(1,m); 
        % Find all of the points belonging to each cluster. 
        for j = 1:m
            ind = find(labs==j);
            musin(:,j) = mean(data(ind,:))';
            piesin(j) = length(ind)/n;
            varsin(:,:,j) = varupm(data(ind,:),ones(length(ind),1),1,musin(:,j),i);
        end % j loop
        % get the finite mixture only if the previous one did not diverge
        tmp = length(model(i).clus);
        if ~isempty(model(i).clus(tmp).mus)     % then get the model
            disp(['Getting the finite mixture estimate for model ' int2str(i) ', ' int2str(m) ' clusters.'])
            [model(i).clus(m).pies,model(i).clus(m).mus,model(i).clus(m).vars] = mbcfinmix(data,musin,varsin,piesin,i);
            if ~isempty(model(i).clus(m).mus)
                bics(i,m) = bic(data,model(i).clus(m).pies,model(i).clus(m).mus,model(i).clus(m).vars,i);
            else
                bics(i,m) = 0/0;   % set it equal to a nan
            end
        else
            bics(i,m) = 0/0;
		end
        
    end  % i model type loop
        
end   % for m loop

% Once we have the BIC for each model, then get the 
% Then get the class labels according to the highest BIC.
[maxbic,maxi] = max(bics(:));
[mi,mj] = ind2sub(size(bics),maxi);

disp(['Maximum BIC is ' num2str(maxbic) '. Model number ' int2str(mi) '. Number of clusters is ' int2str(mj)])

% get the best model.
pies = model(mi).clus(mj).pies;
mus = model(mi).clus(mj).mus;
vars = model(mi).clus(mj).vars;

clabs = zeros(1,n);
for i = 1:n     
    posterior = postm(data(i,:)',pies,mus,vars);
    [v, clabs(i)] = max(posterior);     % classify it with the highest posterior prob.
end

modelout.pies = pies;
modelout.mus = mus;
modelout.vars = vars;

warning on
%%%%%%%%%%%%%%%%%%%%%%%%%5
%	INITIALIZE VARS
%%%%%%%%%%%%%%%%%%%%%%%%%%
function varm = varupm(data,posterior,mix_cof,mu,model)
[nn,c]=size(posterior);
[n,d]=size(data);
switch model
case 1
    % lambda*I
    % Spherical family.
    % diagonal equal covariance matrices
    % first find the full one.
    W_k = zeros(d,d,c);
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        mat=cen_data'*diag(posterior(:,i))*cen_data;
        W_k(:,:,i)=mat;
    end
    % common covariance is the sum of these individual ones.
    W = sum(W_k,3);
    lambda = trace(W)/(n*d);
    varmt = lambda*eye(d);
    varm = zeros(d,d,c);
    for i = 1:c
        varm(:,:,i) = varmt;
    end
case 2
    % lambda_k*I
    % Spherical family.
    % diagonal, unequal covariance matrices
    % first find the full one.
    varm = zeros(d,d,c);
    nk = mix_cof*n; % one for each term. See equation 13-15, C&G
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        Wk=cen_data'*diag(posterior(:,i))*cen_data;
        lambda = trace(Wk)/(d*nk(i));
        varm(:,:,i) = lambda*eye(d);
    end
case 3
    % lambda*B
    % Added April, 2003
    % Diagonal family. Equal covariances. Fixed volume and shape.
    % first find the full one.
    W_k = zeros(d,d,c);
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        mat=cen_data'*diag(posterior(:,i))*cen_data;
        W_k(:,:,i)=mat;
    end
    % common covariance is the sum of these individual ones.
    W = sum(W_k,3);
    dw = diag(W);
    detW = det(diag(dw))^(1/d);
    B = diag(dw)/detW;    % the matrix B is the diagonal of these
    lambda = detW/n;
    % put same covariance in each term
    varm = zeros(d,d,c);
    mt = lambda*B;
    for i = 1:c
        varm(:,:,i) = mt;
    end
case 4
    % lambda*B_k
    % Added April, 2003
    % Diagonal family. Unequal shapes, same volume.
    B_k = zeros(d,d,c);
    tmp = 0;    % to calculate the lambda
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        Wk=cen_data'*diag(posterior(:,i))*cen_data;
        dWk = diag(Wk);
        detW = det(diag(dWk))^(1/d);
        tmp = tmp + detW;
        B_k(:,:,i) = diag(dWk)/detW;
    end
    % Now get the new matrices based on each individual term.
    varm = zeros(d,d,c);
    lambda = tmp/n;
    for i = 1:c
        varm(:,:,i) = lambda*B_k(:,:,i);
    end    
case 5
    % lambda_k*B_k
    % Added April 2003
    % Diagonal family. Unequal shapes, unequal volume.
    varm = zeros(d,d,c);
    nk = mix_cof*n; % one for each term. See equation 13-15, C&G
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        Wk=cen_data'*diag(posterior(:,i))*cen_data;
        dWk = diag(Wk);
        detW = det(diag(dWk))^(1/d);
        lambdak = detW/nk(i);
        Bk = diag(dWk)/detW;
        varm(:,:,i) = Bk*lambdak;
    end
case 6
    % lambda*D*A*D'
    % Full covariance matrix, equal across terms
    % Same volume, shape, orientation
    W_k = zeros(d,d,c);
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        mat=cen_data'*diag(posterior(:,i))*cen_data;
        W_k(:,:,i)=mat;
    end
    % common covariance is the sum of these individual ones.
    varmt = sum(W_k,3);
    % put same covariance in each term
    varm = zeros(d,d,c);
    for i = 1:c
        varm(:,:,i) = varmt/n;
    end
case 7
    % lambda*D_k*A*(D_k)'
    % Added April 2003
    % Full covariance matrix.
    % same volume and shape, different orientation
    varm = zeros(d,d,c);
    omegak = zeros(d,d,c);
    dk = zeros(d,d,c);
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        Wk=cen_data'*diag(posterior(:,i))*cen_data;
        [dk(:,:,i), omegak(:,:,i)] = eig(Wk);
        % reorder so the eigenvalues are in decreasing order
        [es,index] = sort(diag(omegak(:,:,i)));
        index = flipud(index(:));  % sorts descending
        dk(:,:,i) = dk(:,index,i);
        omt = diag(omegak(:,:,i));
        omts = omt(index);
        omegak(:,:,i) = diag(omts);
    end
    A = sum(omegak,3);
    detA = det(A)^(1/d);
    A = A/detA;
    lambda = detA/n;
    for i = 1:c
        varm(:,:,i) = lambda*dk(:,:,i)*A*dk(:,:,i)';
    end
case 8
    % lambda*D_k*A_k*(D_k)'
    % Added April 2003
    % Full covariance matrix.
    % same volume, different shape and orientation.
    C_k = zeros(d,d,c);
    tmp = 0;
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        Wk=cen_data'*diag(posterior(:,i))*cen_data;
        detWk = det(Wk)^(1/d);
        C_k(:,:,i) = Wk/detWk;
        tmp = tmp + detWk;
    end
    varm = zeros(d,d,c);
    lambda = tmp/n;
    for i = 1:c
        varm(:,:,i) = lambda*C_k(:,:,i);
    end

case 9
    % lambda_k*D_k*A_k*(D_k)'
    % this is the unconstrained version
    % variable shape, volume, and orientation
    W_k = zeros(d,d,c);
    for i=1:c
        cen_data=data-ones(n,1)*mu(:,i)';
        mat=cen_data'*diag(posterior(:,i))*cen_data;
        W_k(:,:,i)=mat./(mix_cof(i)*n);
    end
    varm = W_k;
otherwise
    error(['You entered ' int2str(model) ' for the model. Values must be 1, 2, 3 or 4'])
    return
end

%%%%%%%%%%%%%%%%%%%%  FUNCTION - BIC %%%%%%%%%%%%%%%%%%%%%%%%

function val = bic(data,pies,mus,vars,model)

% function val = bic(data,pies,mus,vars,model)
% This function returns the BIC criterion for 
% evaluating a finite mixture model obtained from
% the EM algorithm. This is an approximation to
% twice the Bayes factor.

% Reference: How many clusters? Which clustering method? 
% Answers via model-based cluster analysis. 
% Reference is also Celeux and Govaert, 1995, from their
% table.

[n,d] = size(data);
c = length(pies);	% number of terms.
alpha = c*d + c-1;  % number of parameters in means and pies.
beta = d*(d+1)/2;   % number of parameters in cov matrix - full.

% Now find the number of independent parameters in the model.
switch model
case 1
	m = d*c + c;
case 2
	m = 2*c + d*c-1;
case 3
    m = alpha + d;
case 4
    m = alpha + c*d - c +1;
case 5
    m = alpha + d*c;
case 6
	m = c-1 + d*c + d*(d+1)/2;
case 7
    m = alpha + c*beta - (c -1)*d;
case 8
    m = alpha + c*beta - (c-1);
case 9
	m = c-1 + d*c + c*d*(d+1)/2;
otherwise
	error('Model not recognized')
	return
end

loglike = likelihood(data,mus,vars,pies);

val = 2*loglike - m*log(n);
	
%%%%%%%%%%%%%%  FUNCTION TO EVALUATE LIKELIHOOD %%%%%%%%%%%%%%
function like = likelihood(data,mu,var_mat,mix_cof)  
% This will return the likelihood - actually the log likelihood
[n,d]=size(data);
[d,c]=size(mu);
tmplike = 0;
for i=1:c	
    % Find the value of the mixture at each data point and for each term.
    tmplike = tmplike + mix_cof(i)*evalnorm(data,mu(:,i)',var_mat(:,:,i));
end
% The likelihood is the product.
like = sum(log(tmplike));


%%%%%%%%%%%%%%%  FUNCTION EVALNORM %%%%%%%%%%%%%
function prob = evalnorm(x,mu,cov_mat);
[n,d]=size(x);
prob = zeros(n,1);
a=(2*pi)^(d/2)*sqrt(det(cov_mat));
covi = inv(cov_mat);
for i = 1:n
	xc = x(i,:)-mu;
	arg=xc*covi*xc';
	prob(i)=exp((-.5)*arg);
end
prob=prob/a;

%%%%%%%%%%%%FUNCTION - POSTM %%%%%%%%%%%%%%%%%%%%%%
function posterior = postm(x,pies,mus,vars)
nterms = length(pies);
totprob=0;
posterior=zeros(1,nterms);
for i=1:nterms	%loop to find total prob in denominator (hand, pg 37)
  posterior(i)=pies(i)*evalnorm(x',mus(:,i)',vars(:,:,i));
  totprob=totprob+posterior(i);
end
posterior=posterior/totprob;

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
蜜臀va亚洲va欧美va天堂| 久久精品男人的天堂| 欧美一区二区人人喊爽| 国产人久久人人人人爽| 蜜臀久久99精品久久久久久9| www.日韩大片| 国产亚洲欧美一区在线观看| 日韩有码一区二区三区| 色妹子一区二区| 欧美国产精品一区二区| 老司机一区二区| 欧美日韩精品高清| 亚洲精品乱码久久久久久| jvid福利写真一区二区三区| 欧美精品一区二区三区在线 | 激情深爱一区二区| 欧美日韩国产免费| 亚洲一区二区在线免费观看视频| 成人ar影院免费观看视频| 久久久久久一级片| 国产精品系列在线播放| 久久久久久久久久久久久久久99| 精品一区二区三区影院在线午夜| 欧美酷刑日本凌虐凌虐| 日韩中文字幕亚洲一区二区va在线| 在线观看亚洲专区| 亚洲丶国产丶欧美一区二区三区| 在线免费精品视频| 性欧美疯狂xxxxbbbb| 欧美日韩中字一区| 日韩不卡手机在线v区| 欧美一级片在线观看| 麻豆精品精品国产自在97香蕉| 欧美一区在线视频| 精品一区二区免费视频| 久久精品在线免费观看| 成人av手机在线观看| 亚洲欧美另类久久久精品2019| 色婷婷久久久综合中文字幕| 亚洲国产精品久久人人爱| 51精品视频一区二区三区| 日本aⅴ精品一区二区三区| 日韩女优电影在线观看| 久久99精品久久久久久动态图 | av激情亚洲男人天堂| 国产精品乱码一区二区三区软件| av电影在线不卡| 亚洲一区二区三区国产| 欧美一区二区三区免费视频| 精品一区二区av| 国产精品欧美一级免费| 色菇凉天天综合网| 日韩av一区二区三区四区| 久久精品无码一区二区三区| 成人免费视频app| 亚洲成在线观看| 欧美成人在线直播| 成人av综合在线| 久久av资源网| 欧美高清在线视频| 欧美亚洲高清一区| 国产一区久久久| 夜夜爽夜夜爽精品视频| 精品999久久久| 色欧美乱欧美15图片| 麻豆国产91在线播放| 亚洲色图制服丝袜| 日韩精品资源二区在线| 91丝袜高跟美女视频| 狠狠色丁香九九婷婷综合五月| 国产精品伦理一区二区| 日韩欧美在线1卡| 99精品在线观看视频| 日韩电影网1区2区| 成人欧美一区二区三区白人| 日韩欧美亚洲国产另类| 91国偷自产一区二区三区成为亚洲经典| 美女久久久精品| 亚洲一区免费视频| 国产精品电影院| 精品盗摄一区二区三区| 欧美精选一区二区| 一本色道亚洲精品aⅴ| 国产精品一区二区视频| 日本亚洲最大的色成网站www| 亚洲特级片在线| 欧美国产精品v| 日韩欧美国产综合一区| 欧美性xxxxx极品少妇| 成人精品国产福利| 国产一区二区三区综合| 秋霞午夜av一区二区三区| 一区二区欧美精品| 亚洲视频网在线直播| 中文字幕av不卡| 久久久久久久久伊人| 日韩一区二区三区在线视频| 欧美在线一区二区| 91蝌蚪porny| 9久草视频在线视频精品| 国产高清一区日本| 久久精品国产第一区二区三区 | 亚洲欧洲日产国码二区| 精品国一区二区三区| 欧美一区二区三区视频在线观看| 91福利精品第一导航| 在线免费亚洲电影| 欧美在线视频日韩| 欧美日韩精品系列| 欧美精品粉嫩高潮一区二区| 欧美网站大全在线观看| 欧美日韩一级黄| 欧美久久久久免费| 国产日韩欧美激情| 精品国产免费人成在线观看| 日韩欧美www| 日韩欧美色综合网站| 日韩精品一区二区三区swag| 精品国产免费久久| 久久先锋资源网| 国产精品美女久久久久久久久| 久久久综合视频| 中文一区一区三区高中清不卡| 久久精品一区二区三区四区| 中文字幕av不卡| 亚洲人一二三区| 亚洲综合精品久久| 日本亚洲天堂网| 国产乱码精品1区2区3区| 国产成人精品亚洲午夜麻豆| 成人精品免费网站| 91原创在线视频| 欧美精品第一页| 久久久久99精品国产片| 国产精品久久久99| 一区二区三区四区乱视频| 日日夜夜一区二区| 狠狠久久亚洲欧美| 99久久免费视频.com| 欧美视频一区在线观看| 日韩久久精品一区| 国产精品国产自产拍高清av| 亚洲国产日韩综合久久精品| 蜜桃久久精品一区二区| 懂色av一区二区在线播放| 欧美羞羞免费网站| 欧美成人激情免费网| 国产精品卡一卡二卡三| 亚洲成人免费影院| 国产成人精品三级| 欧美日韩精品一区二区三区四区| 欧美成人性战久久| 亚洲激情第一区| 国产精品911| 在线精品观看国产| 国产欧美一区在线| 青青青伊人色综合久久| 成人av网站在线观看| 7777精品伊人久久久大香线蕉完整版 | 91精品国产麻豆| 国产精品日产欧美久久久久| 日本不卡视频在线| 色8久久精品久久久久久蜜| 欧美videos中文字幕| 亚洲一区二区在线免费看| 国产精品1区二区.| 91精品国产一区二区三区香蕉| 国产精品女人毛片| 国产一区在线观看视频| 欧美精品乱码久久久久久| 亚洲欧洲国产专区| 国产麻豆精品在线| 欧美一区二区三区免费观看视频| 亚洲欧美一区二区在线观看| 国产美女一区二区| 日韩一区二区免费电影| 亚洲大尺度视频在线观看| 91在线一区二区| 欧美国产一区二区在线观看| 激情综合网av| 欧美一级免费大片| 日日欢夜夜爽一区| 欧美色爱综合网| 一区二区三区小说| 91在线无精精品入口| 久久精品一区二区| 国产精品自拍在线| 久久久久久久久久久久久女国产乱| 琪琪久久久久日韩精品| 69堂成人精品免费视频| 亚洲一区在线电影| 欧洲中文字幕精品| 亚洲精品免费看| 色婷婷av一区二区三区大白胸| 国产精品久久久久久久久免费桃花| 国产原创一区二区三区| 2021国产精品久久精品| 国产一区二区在线电影| 久久中文娱乐网| 国产在线精品视频|