亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? nnarmax2.c

?? matlab實現神經網絡程序集合
?? C
?? 第 1 頁 / 共 2 頁
字號:
/*
 *     INCLUDE HEADERS
 */
#include <stdio.h>
#include <math.h>
#include <time.h>
#include "mex.h"
#include "matrix2.h"
#include "nnmisc.h"

void nnarmax2(matrix**, int*, double*, matrix*, matrix*, matrix*, matrix*, matrix*,\
 int, matrix*, matrix*);



/*********************************************************************************
 *                                                                               *
 *    NNARMAX2                                                                   *
 *    --------                                                                   *
 *                                                                               *
 *    This is a CMEX-version of the Matlab function nnarmax2.                    *
 *    Type 'help nnarmax2' from Matlab for information on                        *
 *    how to call this function.                                                 *
 *                                                                               *
 *                                                                               *
 *    Programmed by: Magnus Norgaard                                             *
 *    LastEditDate : sep. 04, 1995                                               *
 *                                                                               *
 *********************************************************************************/
void nnarmax2(matrix **NSSEvecpp, int *iter, double *lam,\
	matrix *NetDef, matrix *NN, matrix *W1, matrix *W2, matrix *trparms,\
	int skip, matrix *Y, matrix *U)
{
/*
----------------------------------------------------------------------------------- 
---------------              VARIABLE DECLARATIONS                    ------------- 
----------------------------------------------------------------------------------- 
*/ 
register i, j, k, t; 
int max_iter, outputs, N, Nout, layers, dummy, hidden, inputs, iteration; 
int parameters1, parameters2, parameters, reduced, index1, ii, jj; 
int lhids, hhids, louts, houts, index11;
int Ndat, N2, na, nc, nu, nab, nabc, nmax, index5, dummy2;
double stop_crit, lambda, SSE, SSE_new, NSSE, NSSE_new, L, tmp1, sum, dummy3; 
char dw; 
matrix *L_hidden, *H_hidden, *L_output, *H_output, *h1, *h2, *y1, *y2; 
matrix *E, *E_new, *W1_new, *W2_new, *PHI, *D, *Dtmp; 
matrix *NSSEvec, *miter, *tmp, *Htmp, *R, *W1tmp; 
matrix *theta, *thtmp, *theta_index, *theta_red, *theta_red_new, *PSI, *G, *H, *h; 
matrix *all, *index0, *index7, *onesvec, *tmp0, *tmp2, *tmp3, *index, *index2;
matrix *nb, *nk, *dy2dy1, *dy2de, *dy1de, *dy2de_vec, *Y2, *dummy1;
struct tm *c; 
time_t lt; 
 
 
/* 
----------------------------------------------------------------------------------- 
---------------             NETWORK INITIALIZATIONS                   ------------- 
----------------------------------------------------------------------------------- 
 */
Ndat      = getcols(Y);                  /* # of data                            */
na        = vget(NN,0);                  /* Past predictions used as inputs      */
nu	  = getrows(U);			 /* # of input signals                   */ 
nc        = vget(NN,nu+1);               /* Past prediction errors used as input */
if(nu!=0){
	nb = mmake(1,nu);                /* Past controls used as inputs	 */
            subvec(nb,NN,1,nu);
	nk = mmake(1,nu);                /* Time delays                          */
	    subvec(nk,NN,nu+2,2*nu+1);
}
nmax      = na;		                 /* Oldest signal used as input          */
if(nmax<nc) nmax=nc;
for(k=0;k<nu;k++){
  i=rvget(nb,k)+rvget(nk,k)-1;
  if(nmax<i) nmax=i;
}
N         = Ndat - nmax;                 /* Size of training set                 */
N2        = N-skip;
nab       = na; 			 /* na+nb                                */
for(k=0;k<nu;k++) nab=nab+rvget(nb,k);
nabc      = nab+nc;			 /* na+nb+nc                             */
Y2        = mmake(1,N);                  /* Observed outputs used for training   */
hidden    = getcols(NetDef);             /* # of hidden units                    */
inputs    = nabc;                 	 /* Number of inputs to network          */
outputs   = 1;		                 /* Always one outputs                   */
Nout      = N*outputs;                   /* N*outputs                            */
L_hidden  = neuvector(NetDef,1,'L');     /* Location of linear hidden units      */
H_hidden  = neuvector(NetDef,1,'H');     /* Location of tanh hidden units        */ 
L_output  = neuvector(NetDef,2,'L');     /* Location of linear output units      */ 
H_output  = neuvector(NetDef,2,'H');     /* Location of tanh output units        */ 
lhids     = getrows(L_hidden);           /* # of linear hidden units             */ 
hhids     = getrows(H_hidden);           /* # of tanh hidden units               */ 
louts     = getrows(L_output);           /* # of linear output units             */  
houts     = getrows(H_output);           /* # of tanh output units               */
miter     = mmake(1,1);                  /* Temp element                         */ 
h1        = mmake(hidden,1);             /* Argument to hidden layer act. fcts   */ 
h2        = mmake(outputs,1);            /* Argument to hidden layer act. fcts   */ 
onesvec   = mmake(1,N);                  /* Vector of all ones                   */
minitx(onesvec,1.0); 
y1        = mmake(hidden+1,N);           /* Hidden layer outputs                 */
minit(y1); 
mat2mat(y1,hidden,0,onesvec);            /* Add a row of ones (bias to outputs)  */ 
y2        = mmake(outputs,N);            /* Output layer output                  */ 
minit(y2);
E         = mmake(outputs,N);            /* Prediction error matrix              */
E_new     = mmake(outputs,N);            /* A priori E                           */ 
index     = mmake(hidden,1);             /* Index vector outputs*(hidden+1)+...  */
for(k=0;k<hidden;k++) cvput(index,k,(double)(outputs*(hidden+1)+k*(inputs+1))); 
index2    = mmake(N,1);                  /* Index vector (0:N-1)*outputs         */
for(k=0;k<N;k++) cvput(index2,k,(double)k*outputs); 
iteration = 1;                           /* Initialize iteration counter         */
dw        = 1;                           /* Flag telling that the weights are new*/ 
parameters1= hidden*(inputs+1);          /* # of input-to-hidden weights         */
parameters2= outputs*(hidden+1);         /* # of hidden-to-output weights        */ 
parameters = parameters1+parameters2;    /* Total # of weights                   */ 

/*
 >>>>>>>>>>>>>>>>>>>>  CONSTRUCT THE REGRESSION MATRIX PHI   <<<<<<<<<<<<<<<<<<<<<
 */
PHI = mmake(nabc+1,N);      	         /* Matrix of input vectors (incl. bias) */
minit(PHI);
mat2mat(PHI,nabc,0,onesvec);
for(k=0;k<na;k++){
	for(i=0;i<Ndat-nmax;i++) mput(PHI,k,i,vget(Y,i+nmax-k-1));
}
index5 = na;                             /* Insert controls in PHI               */
for(i=0;i<nu;i++){
	for(k=0;k<vget(nb,i);k++){
		for(j=0;j<Ndat-nmax;j++){
			mput(PHI,index5+k,j,mget(U,i,nmax+j-k-vget(nk,i)));
		}
	}
	index5=index5+vget(nb,i);
}
for(t=0;t<N;t++) rvput(Y2,t,rvget(Y,t+nmax));

/*
 >>>>>>>>>>>>>>>>>  INITIALIZE WEIGHTS WITH NNARX IF NECESSARY   <<<<<<<<<<<<<<<<<<
 */
if(getrows(W2)==0){
	W2->row=1;
	mrand(W1); smul(W1,W1,0.025);
   	mrand(W2); smul(W2,W2,0.5);
   	W1tmp = mmake(hidden,nab+1);
   	mrand(W1tmp); smul(W1tmp,W1tmp,0.5);
	PHI->row=nab;
	marqc(&dummy1, &dummy2, &dummy3, NetDef, W1tmp, W2, PHI, Y2, trparms);
	PHI->row=inputs+1;
   	mat2mat(W1,0,0,W1tmp);
	mfree(dummy1); mfree(W1tmp);
}

W1_new     = mmake(hidden,inputs+1);     /* A priori updated W1                  */
W2_new     = mmake(outputs,hidden+1);    /* A priori updated W2                  */ 
theta      = mmake(parameters,1);        /* Vector containing all weights        */ 
m2vreshape(theta,0,W2); 
m2vreshape(theta,parameters2,W1); 
thtmp      = mnofind(theta,0.0);         /* Find non-zero entries in theta       */ 
reduced    = getrows(thtmp);             /* # of non-zero elements               */ 
theta_index = mmake(reduced,1);          /* Indices to weights <> 0              */ 
submat(theta_index,thtmp,0,reduced-1,0,0); 
theta_red = mmake(reduced,1);            /* Reduced parameter vector             */ 
for(i=0;i<reduced;i++)                   /* theta_red = theta(theta_index)       */ 
  cvput(theta_red,i,cvget(theta,(int)cvget(theta_index,i))); 
theta_red_new = mmake(reduced,1);        /* A priori update of parameters        */ 
dy2de     = mmake(nc,N);                 /* Der. of output wrt. past pred. errors*/
dy1de     = mmake(hidden,nc);            /* Der.of hid. outp. wrt. past pred. err*/
dy2dy1    = mmake(1,hidden);             /* Der. of outp. wrt. hidden outp.      */
dy2de_vec = mmake(1,nc);		 /* For temp. results   		 */
PSI       = mmake(parameters,Nout);      /* Der. of each output wrt. each weight */
G         = mmake(reduced,1);            /* Gradient vector                      */ 
H         = mmake(reduced,reduced);      /* Hessian matrix                       */ 
R         = mmake(reduced,reduced);      /* Mean square error G-N Hessian        */
Htmp      = mmake(reduced,reduced);      /* Matrix used by the linear sys solver */ 
h         = mmake(reduced,1);            /* Update vector                        */ 
all       = mmake(N,1);                  /* Index vector (0:N-1)                 */ 
for(k=0;k<N;k++) cvput(all,k,(double)k); 
index0    = mmake(1,1);                  /* Index vector (0)                     */ 
put_val(index0,0,0,0); 
index7    = mmake(parameters,1);         /* Index vector (0:parameters-1)        */ 
for(k=0;k<parameters;k++) cvput(index7,k,(double)k); 
if (hhids>0) tmp0 = mmake(hhids,N);      /* Used to construct PSI                */
else tmp0 = mmake(1,1);
tmp2      = mmake(1,N);                  /* Used to construct PSI                */ 
tmp3      = mmake(1,N);                  /* Used to construct PSI                */ 
max_iter  = vget(trparms,0);             /* Max. no. iterations                  */
stop_crit = vget(trparms,1);             /* Error bound                          */ 
lambda    = vget(trparms,2);             /* Levenberg-Marquardt parameter        */ 
D         = mmake(reduced,1);            /* Initialize vector cont. weight decays*/ 
Dtmp      = mmake(parameters,1); 
if(length(trparms)==4)                   /* Scalar weight decay parameters       */ 
  for(i=0;i<reduced;i++) cvput(D,i,rvget(trparms,3)); 
else if(length(trparms)==5)              /* Two weight decay parameters          */ 
{ 
  for(i=0;i<parameters2;i++) cvput(Dtmp,i,rvget(trparms,3)); 
  for(i=parameters2;i<parameters;i++) cvput(Dtmp,i,rvget(trparms,4)); 
  mcopyi(D,theta_index,index0,Dtmp,index7,index0); 
} 
else{                                    /* Individual weight decays             */ 
  for(i=0;i<reduced;i++) cvput(D,i,rvget(trparms,3+i));    
}
NSSE      = stop_crit+1;                 /* Intialize cost function              */
NSSEvec = mmake(max_iter,1);             /* Vector containing normailzed SSEs    */ 
minit(NSSEvec); 


/*
----------------------------------------------------------------------------------- 
---------------                    TRAIN NETWORK                      ------------- 
----------------------------------------------------------------------------------- 
*/
lt = time(NULL); 
c  = localtime(&lt); 

/* Clear screen on HP systems.
Uncomment the following line and comment the subsequent one */
/*printf("\x1BH\x1BJNetwork training started at %.8s\n\n",asctime(c)+11);*/

printf("\nNetwork training started at %.8s\n\n",asctime(c)+11);


/* 
 >>>>>>>>>>>>>>       Compute network output y2(theta)          <<<<<<<<<<<<<<<  
*/
for(t=0;t<N;t++){
	mvmul(h1,W1,PHI,t);
	vtanh(y1,H_hidden,t,h1,H_hidden,0);
	vcopyi(y1,L_hidden,t,h1,L_hidden,0);

	mvmul(h2,W2,y1,t);
	vtanh(y2,H_output,t,h2,H_output,0);
	vcopyi(y2,L_output,t,h2,L_output,0);
	
	rvput(E,t,rvget(Y2,t)-rvget(y2,t));          /* Prediction error        */	
	j=nc;
	if(N-t-1<nc) j=N-t-1;
	for(i=1;i<=j;i++){
		put_val(PHI,nab+i-1,t+i,rvget(E,t));
	}
}
for(SSE=0,t=skip;t<N;t++) SSE+=rvget(E,t)*rvget(E,t);/* Sum of squared errors   */
for(tmp1=0,i=0;i<reduced;i++) tmp1+=cvget(theta_red,i)*cvget(theta_red,i)*cvget(D,i); 
NSSE = (SSE+tmp1)/(2*N2);                            /* Value of cost function  */

while (iteration<=max_iter && NSSE>stop_crit && lambda<1e7)
{
  if(dw==1)
  {
/*
 >>>>>>>>>>>>>>>>>>>>>>>>>>>   COMPUTE THE PSI MATRIX   <<<<<<<<<<<<<<<<<<<<<<<<<<
 (The derivative of each network output (y2) with respect to each weight)
*/
/* Some intermidiate computations */
    for(j=0;j<hhids;j++)
    {
      jj = (int)cvget(H_hidden,j);
      for(k=0;k<N;k++)
	put_val(tmp0,j,k,1-get_val(y1,jj,k)*get_val(y1,jj,k));
    }

/*   ==========   Elements corresponding to the linear output units   ===========*/
    for(i=0; i<louts; i++)
    {
      ii = (int)cvget(L_output,i);

      /***  The part of PSI corresponding to hidden-to-output layer weights ***/
      index1 = ii * (hidden+1);
      psi1(PSI, index1, index2, ii, y1);
      /************************************************************************/

      /**** The part of PSI corresponding to input-to-hidden layer weights ****/
      for(j=0; j<lhids; j++)
      {
	jj = (int)cvget(L_hidden,j);
        psi2(PSI, (int)cvget(index,jj), index2, ii, get_val(W2,ii,jj), PHI);
      }

      for(j=0; j<hhids;j++)
      {
        jj = (int)cvget(H_hidden,j);
	psi3(tmp3, tmp0, j, get_val(W2,ii,jj));
	psi4(PSI, (int)cvget(index,jj), index2, ii, tmp3, PHI);
      }
      /************************************************************************/    
    }


    /* ===========  Elements corresponding to the tanh output units   ===========*/
    for(i=0; i<houts; i++)
    {
      ii = (int)cvget(H_output,i);
      index1 = ii * (hidden + 1);
      for(k=0; k<N; k++)
	put_val(tmp2,0,k,1-get_val(y2,ii,k)*get_val(y2,ii,k));

      /* -- The part of PSI corresponding to hidden-to-output layer weights --*/
      psi4(PSI, index1, index2, ii, tmp2, y1);
      /* ---------------------------------------------------------------------*/
    
      /* -- The part of PSI corresponding to input-to-hidden layer weights ---*/
      for(j=0; j<lhids; j++)
      {
        jj = (int)cvget(L_hidden,j);
	smul(tmp3, tmp2, get_val(W2,ii,jj));
	psi4(PSI, (int)cvget(index,jj), index2, ii, tmp3, PHI);
      }
      
      for(j=0; j<hhids; j++)
      {
      	jj = (int)cvget(H_hidden,j);
	psi3(tmp3, tmp0, j, get_val(W2,ii,jj));
        psi5(PSI, (int)cvget(index,jj), index2, ii, tmp3, tmp2, PHI);
      }
      /* ---------------------------------------------------------------------*/
    }
      

    /* 

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
韩日精品视频一区| www.在线成人| 亚洲乱码一区二区三区在线观看| 欧美手机在线视频| 成人小视频在线| 日本午夜一本久久久综合| 亚洲少妇30p| 久久久久久亚洲综合影院红桃| 91久久精品国产91性色tv| 国产成人亚洲综合a∨猫咪| 香港成人在线视频| 亚洲欧美日韩国产手机在线| 久久久精品天堂| 欧美一级国产精品| 欧美日韩电影一区| 在线观看免费一区| 91在线免费看| 波多野结衣中文字幕一区| 国产一区二区不卡老阿姨| 久久精品99久久久| 奇米888四色在线精品| 亚洲国产毛片aaaaa无费看| 国产精品美女视频| 欧美国产精品v| 久久久久久**毛片大全| 日韩欧美你懂的| 制服.丝袜.亚洲.中文.综合| 欧美三级视频在线观看| 91欧美一区二区| 99久久综合狠狠综合久久| 国产高清不卡一区二区| 国产99一区视频免费| 国产盗摄一区二区| 国产河南妇女毛片精品久久久| 国产一区视频网站| 国产一区二区视频在线播放| 国产一区二区在线电影| 国产成人超碰人人澡人人澡| 国产精品亚洲视频| 成人h版在线观看| www.99精品| av不卡在线播放| 91久久一区二区| 欧美日韩视频一区二区| 欧美色偷偷大香| 欧美精品久久久久久久多人混战 | proumb性欧美在线观看| 国产一区二区久久| 国产成人午夜片在线观看高清观看| 国产精品一级黄| 99久久国产综合精品麻豆| 色综合中文字幕| 欧美年轻男男videosbes| 91麻豆精品91久久久久同性| 欧美一级高清大全免费观看| 精品av久久707| 国产精品久久影院| 亚洲亚洲精品在线观看| 日韩电影免费在线观看网站| 极品美女销魂一区二区三区免费| 国产在线一区观看| 成人a级免费电影| 欧美午夜片在线看| 精品国产a毛片| 中文字幕亚洲一区二区av在线| 亚洲国产日韩在线一区模特 | 色综合色综合色综合色综合色综合| 99精品国产99久久久久久白柏| 91久久线看在观草草青青| 在线播放/欧美激情| 久久久99精品免费观看不卡| 国产精品久久久久一区二区三区| 一区二区三区av电影| 日本美女视频一区二区| 丰满亚洲少妇av| 欧美性猛交一区二区三区精品| 精品国产乱码久久久久久1区2区| 欧美激情在线免费观看| 亚洲国产三级在线| 国产经典欧美精品| 欧美日韩国产在线播放网站| 久久久.com| 日韩av一二三| 99re成人精品视频| 日韩一区二区三区av| 中文字幕乱码亚洲精品一区| 性做久久久久久| 波多野结衣在线一区| 欧美美女一区二区| 中文字幕在线观看不卡| 蜜臀久久99精品久久久画质超高清| 成人短视频下载| 日韩一级黄色大片| 亚洲综合男人的天堂| 国产在线精品国自产拍免费| 欧美在线播放高清精品| 久久久av毛片精品| 亚洲一级片在线观看| 成人av网址在线| 精品国产乱码久久久久久1区2区 | 国产精品久久网站| 久久国产精品无码网站| 欧美色网站导航| 日韩一区欧美一区| 国产福利不卡视频| 日韩欧美一区中文| 亚洲午夜视频在线| 91香蕉视频污| 中文字幕电影一区| 国产一区二区三区免费看| 欧美一级xxx| 天堂久久久久va久久久久| 色综合咪咪久久| 一色屋精品亚洲香蕉网站| 国产精品一区二区果冻传媒| 欧美成人vr18sexvr| 香蕉影视欧美成人| 欧美亚洲免费在线一区| 亚洲免费高清视频在线| gogo大胆日本视频一区| 国产欧美日本一区二区三区| 国产尤物一区二区| 久久久亚洲午夜电影| 久久99精品视频| 欧美大尺度电影在线| 日本中文字幕一区二区视频 | 日韩欧美一级特黄在线播放| 午夜视频在线观看一区| 欧美日韩1区2区| 无码av免费一区二区三区试看| 欧美制服丝袜第一页| 亚洲一区精品在线| 欧美日韩精品免费| 日本sm残虐另类| 欧美一级视频精品观看| 免费看欧美美女黄的网站| 日韩欧美黄色影院| 国模大尺度一区二区三区| 精品国产免费人成在线观看| 国产一区二区不卡在线 | 亚洲免费观看高清完整版在线观看熊 | 亚洲老妇xxxxxx| 色播五月激情综合网| 亚洲一区av在线| 欧美一区二区视频在线观看| 日本视频在线一区| 久久久亚洲高清| 成人久久视频在线观看| 亚洲欧美日韩电影| 欧美调教femdomvk| 日本不卡一二三区黄网| 精品福利一区二区三区 | 国产麻豆成人传媒免费观看| 久久久久久久精| 91免费看视频| 日韩高清一区在线| 国产午夜亚洲精品羞羞网站| 91蜜桃视频在线| 日韩电影在线免费| 亚洲国产精品t66y| 欧美日韩在线直播| 国产自产视频一区二区三区| 国产欧美一区二区精品婷婷| 色天天综合色天天久久| 天堂蜜桃91精品| 中文字幕精品在线不卡| 欧美视频一区二区三区四区| 麻豆国产精品777777在线| 国产精品护士白丝一区av| 欧美日韩一区在线观看| 国产一区二区三区精品欧美日韩一区二区三区| 国产目拍亚洲精品99久久精品| 91蝌蚪porny成人天涯| 三级精品在线观看| 国产精品污网站| 欧美精品一二三四| 成人久久视频在线观看| 石原莉奈一区二区三区在线观看| 国产色产综合色产在线视频 | 欧美激情资源网| 欧美人狂配大交3d怪物一区| 国产精品一线二线三线| 亚洲国产成人tv| 国产偷国产偷精品高清尤物| 在线观看视频一区二区欧美日韩| 国产在线播放一区二区三区| 一区二区三区精品久久久| 精品国产乱码久久久久久老虎| 色先锋aa成人| 国产成人亚洲精品狼色在线| 日韩国产欧美在线播放| 亚洲你懂的在线视频| 久久男人中文字幕资源站| 欧美三级在线视频| 97久久精品人人澡人人爽| 久久国产福利国产秒拍| 亚洲国产毛片aaaaa无费看| 国产精品乱人伦一区二区| 日韩精品资源二区在线| 欧美视频第二页|