亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? svm_learn.c

?? 馬克斯普朗克提供的機(jī)器學(xué)習(xí)程序包
?? C
?? 第 1 頁 / 共 5 頁
字號(hào):
/* Learns an SVM regression model based on the training data in   docs/label. The resulting model is returned in the structure   model. */void svm_learn_regression(DOC *docs, double *value, long int totdoc, 			  long int totwords, LEARN_PARM *learn_parm, 			  KERNEL_PARM *kernel_parm, 			  KERNEL_CACHE *kernel_cache, MODEL *model)     /* docs:        Training vectors (x-part) */     /* class:       Training value (y-part) */     /* totdoc:      Number of examples in docs/label */     /* totwords:    Number of features (i.e. highest feature index) */     /* learn_parm:  Learning paramenters */     /* kernel_parm: Kernel paramenters */     /* kernel_cache:Initialized Cache of size 2*totdoc */     /* model:       Returns learning result (assumed empty before called) */{  long *inconsistent,i,j;  long inconsistentnum;  long upsupvecnum;  double loss,model_length,example_length;  double maxdiff,*lin,*a,*c;  long runtime_start,runtime_end;  long iterations;  long *unlabeled;  double r_delta_sq=0,r_delta,r_delta_avg;  double *xi_fullset; /* buffer for storing xi on full sample in loo */  double *a_fullset;  /* buffer for storing alpha on full sample in loo */  TIMING timing_profile;  SHRINK_STATE shrink_state;  DOC *docs_org;  long *label;  /* set up regression problem in standard form */  docs_org=docs;  docs = (DOC *)my_malloc(sizeof(DOC)*2*totdoc);  label = (long *)my_malloc(sizeof(long)*2*totdoc);  c = (double *)my_malloc(sizeof(double)*2*totdoc);  for(i=0;i<totdoc;i++) {       docs[i]=docs_org[i];    docs[i].docnum=i;    label[i]=+1;    c[i]=value[i];    docs[2*totdoc-1-i]=docs_org[i];    docs[2*totdoc-1-i].docnum=2*totdoc-1-i;    label[2*totdoc-1-i]=-1;    c[2*totdoc-1-i]=value[i];  }  totdoc*=2;  runtime_start=get_runtime();  timing_profile.time_kernel=0;  timing_profile.time_opti=0;  timing_profile.time_shrink=0;  timing_profile.time_update=0;  timing_profile.time_model=0;  timing_profile.time_check=0;  timing_profile.time_select=0;  kernel_cache_statistic=0;  learn_parm->totwords=totwords;  /* make sure -n value is reasonable */  if((learn_parm->svm_newvarsinqp < 2)      || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) {    learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;  }  init_shrink_state(&shrink_state,totdoc,(long)20000);  inconsistent = (long *)my_malloc(sizeof(long)*totdoc);  unlabeled = (long *)my_malloc(sizeof(long)*totdoc);  a = (double *)my_malloc(sizeof(double)*totdoc);  a_fullset = (double *)my_malloc(sizeof(double)*totdoc);  xi_fullset = (double *)my_malloc(sizeof(double)*totdoc);  lin = (double *)my_malloc(sizeof(double)*totdoc);  learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc);  model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2));  model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2));  model->index = (long *)my_malloc(sizeof(long)*(totdoc+2));  model->at_upper_bound=0;  model->b=0;	         model->supvec[0]=0;  /* element 0 reserved and empty for now */  model->alpha[0]=0;  model->lin_weights=NULL;  model->totwords=totwords;  model->totdoc=totdoc;  model->kernel_parm=(*kernel_parm);  model->sv_num=1;  model->loo_error=-1;  model->loo_recall=-1;  model->loo_precision=-1;  model->xa_error=-1;  model->xa_recall=-1;  model->xa_precision=-1;  inconsistentnum=0;  r_delta=estimate_r_delta(docs,totdoc,kernel_parm);  r_delta_sq=r_delta*r_delta;  r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm);  if(learn_parm->svm_c == 0.0) {  /* default value for C */    learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg);    if(verbosity>=1)       printf("Setting default regularization parameter C=%.4f\n",	     learn_parm->svm_c);  }  for(i=0;i<totdoc;i++) {    /* various inits */    inconsistent[i]=0;    a[i]=0;    lin[i]=0;    unlabeled[i]=0;    if(label[i] > 0) {      learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio;    }    else if(label[i] < 0) {      learn_parm->svm_cost[i]=learn_parm->svm_c;    }  }  /* caching makes no sense for linear kernel */  if(kernel_parm->kernel_type == LINEAR) {    kernel_cache = NULL;     }   if(verbosity==1) {    printf("Optimizing"); fflush(stdout);  }  /* train the svm */  iterations=optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,				     kernel_parm,kernel_cache,&shrink_state,model,				     inconsistent,unlabeled,a,lin,c,&timing_profile,				     &maxdiff,(long)-1,				     (long)1);    if(verbosity>=1) {    if(verbosity==1) printf("done. (%ld iterations)\n",iterations);    printf("Optimization finished (maxdiff=%.5f).\n",maxdiff);     runtime_end=get_runtime();    if(verbosity>=2) {      printf("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n",        ((float)runtime_end-(float)runtime_start)/100.0,        (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start),	(100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start),	(100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start),        (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start),        (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start),        (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start),        (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start));    }    else {      printf("Runtime in cpu-seconds: %.2f\n",	     (runtime_end-runtime_start)/100.0);    }    if(learn_parm->remove_inconsistent) {	        inconsistentnum=0;      for(i=0;i<totdoc;i++) 	if(inconsistent[i]) 	  inconsistentnum++;      printf("Number of SV: %ld (plus %ld inconsistent examples)\n",	     model->sv_num-1,inconsistentnum);    }    else {      upsupvecnum=0;      for(i=1;i<model->sv_num;i++) {	if(fabs(model->alpha[i]) >= 	   (learn_parm->svm_cost[(model->supvec[i])->docnum]-	    learn_parm->epsilon_a)) 	  upsupvecnum++;      }      printf("Number of SV: %ld (including %ld at upper bound)\n",	     model->sv_num-1,upsupvecnum);    }        if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) {      loss=0;      model_length=0;       for(i=0;i<totdoc;i++) {	if((lin[i]-model->b)*(double)label[i] < (-learn_parm->eps+(double)label[i]*c[i])-learn_parm->epsilon_crit)	  loss+=-learn_parm->eps+(double)label[i]*c[i]-(lin[i]-model->b)*(double)label[i];	model_length+=a[i]*label[i]*lin[i];      }      model_length=sqrt(model_length);      fprintf(stdout,"L1 loss: loss=%.5f\n",loss);      fprintf(stdout,"Norm of weight vector: |w|=%.5f\n",model_length);      example_length=estimate_sphere(model,kernel_parm);       fprintf(stdout,"Norm of longest example vector: |x|=%.5f\n",	      length_of_longest_document_vector(docs,totdoc,kernel_parm));    }    if(verbosity>=1) {      printf("Number of kernel evaluations: %ld\n",kernel_cache_statistic);    }  }      if(learn_parm->alphafile[0])    write_alphas(learn_parm->alphafile,a,label,totdoc);  /* this makes sure the model we return does not contain pointers to the      temporary documents */  for(i=1;i<model->sv_num;i++) {     j=model->supvec[i]->docnum;    if(j >= (totdoc/2)) {      j=totdoc-j-1;    }    model->supvec[i]=&(docs_org[j]);  }    shrink_state_cleanup(&shrink_state);  free(docs);  free(label);  free(inconsistent);  free(unlabeled);  free(c);  free(a);  free(a_fullset);  free(xi_fullset);  free(lin);  free(learn_parm->svm_cost);}long optimize_to_convergence(DOC *docs, long int *label, long int totdoc, 			     long int totwords, LEARN_PARM *learn_parm, 			     KERNEL_PARM *kernel_parm, 			     KERNEL_CACHE *kernel_cache, 			     SHRINK_STATE *shrink_state, MODEL *model, 			     long int *inconsistent, long int *unlabeled, 			     double *a, double *lin, double *c, 			     TIMING *timing_profile, double *maxdiff, 			     long int heldout, long int retrain)     /* docs: Training vectors (x-part) */     /* label: Training labels/value (y-part, zero if test example for			      transduction) */     /* totdoc: Number of examples in docs/label */     /* totwords: Number of features (i.e. highest feature index) */     /* laern_parm: Learning paramenters */     /* kernel_parm: Kernel paramenters */     /* kernel_cache: Initialized/partly filled Cache */     /* shrink_state: State of active variables */     /* model: Returns learning result */     /* inconsistent: examples thrown out as inconstistent */     /* unlabeled: test examples for transduction */     /* a: alphas */     /* lin: linear component of gradient */     /* c: upper bounds on alphas */     /* maxdiff: returns maximum violation of KT-conditions */     /* heldout: marks held-out example for leave-one-out (or -1) */     /* retrain: selects training mode (1=regular / 2=holdout) */{  long *chosen,*key,i,j,jj,*last_suboptimal_at,noshrink;  long inconsistentnum,choosenum,already_chosen=0,iteration;  long misclassified,supvecnum=0,*active2dnum,inactivenum;  long *working2dnum,*selexam;  long activenum;  double criterion,eq;  double *a_old;  long t0=0,t1=0,t2=0,t3=0,t4=0,t5=0,t6=0; /* timing */  long transductcycle;  long transduction;  double epsilon_crit_org;   double *selcrit;  /* buffer for sorting */          CFLOAT *aicache;  /* buffer to keep one row of hessian */  double *weights;  /* buffer for weight vector in linear case */  QP qp;            /* buffer for one quadratic program */  epsilon_crit_org=learn_parm->epsilon_crit; /* save org */  if(kernel_parm->kernel_type == LINEAR) {    learn_parm->epsilon_crit=2.0;    kernel_cache=NULL;   /* caching makes no sense for linear kernel */  }   learn_parm->epsilon_shrink=2;  (*maxdiff)=1;  learn_parm->totwords=totwords;  chosen = (long *)my_malloc(sizeof(long)*totdoc);  last_suboptimal_at = (long *)my_malloc(sizeof(long)*totdoc);  key = (long *)my_malloc(sizeof(long)*(totdoc+11));   selcrit = (double *)my_malloc(sizeof(double)*totdoc);  selexam = (long *)my_malloc(sizeof(long)*totdoc);  a_old = (double *)my_malloc(sizeof(double)*totdoc);  aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc);  working2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11));  active2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11));  qp.opt_ce = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);  qp.opt_ce0 = (double *)my_malloc(sizeof(double));  qp.opt_g = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize				 *learn_parm->svm_maxqpsize);  qp.opt_g0 = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);  qp.opt_xinit = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);  qp.opt_low=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);  qp.opt_up=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);  weights=(double *)my_malloc(sizeof(double)*(totwords+1));  choosenum=0;  inconsistentnum=0;  transductcycle=0;  transduction=0;  if(!retrain) retrain=1;  iteration=1;  if(kernel_cache) {    kernel_cache->time=iteration;  /* for lru cache */    kernel_cache_reset_lru(kernel_cache);  }  for(i=0;i<totdoc;i++) {    /* various inits */    chosen[i]=0;    a_old[i]=a[i];    last_suboptimal_at[i]=1;    if(inconsistent[i])       inconsistentnum++;    if(unlabeled[i]) {      transduction=1;    }  }  activenum=compute_index(shrink_state->active,totdoc,active2dnum);  inactivenum=totdoc-activenum;  clear_index(working2dnum);                            /* repeat this loop until we have convergence */  for(;retrain;iteration++) {    if(kernel_cache)      kernel_cache->time=iteration;  /* for lru cache */    if(verbosity>=2) {      printf(	"Iteration %ld: ",iteration); fflush(stdout);    }    else if(verbosity==1) {      printf("."); fflush(stdout);    }    if(verbosity>=2) t0=get_runtime();    if(verbosity>=3) {      printf("\nSelecting working set... "); fflush(stdout);     }    if(learn_parm->svm_newvarsinqp>learn_parm->svm_maxqpsize)       learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;    i=0;    for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* clear working set */      if((chosen[j]>=(learn_parm->svm_maxqpsize/		      minl(learn_parm->svm_maxqpsize,			   learn_parm->svm_newvarsinqp))) 	 || (inconsistent[j])	 || (j == heldout)) {	chosen[j]=0; 	choosenum--;       }      else {	chosen[j]++;	working2dnum[i++]=j;      }    }    working2dnum[i]=-1;    if(retrain == 2) {      choosenum=0;      for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* fully clear working set */	chosen[j]=0;       }      clear_index(working2dnum);      for(i=0;i<totdoc;i++) { /* set inconsistent examples to zero (-i 1) */	if((inconsistent[i] || (heldout==i)) && (a[i] != 0.0)) {	  chosen[i]=99999;	  choosenum++;	  a[i]=0;	}      }      if(learn_parm->biased_hyperplane) {	eq=0;	for(i=0;i<totdoc;i++) { /* make sure we fulfill equality constraint */	  eq+=a[i]*label[i];	}	for(i=0;(i<totdoc) && (fabs(eq) > learn_parm->epsilon_a);i++) {	  if((eq*label[i] > 0) && (a[i] > 0)) {	    chosen[i]=88888;	    choosenum++;	    if((eq*label[i]) > a[i]) {	      eq-=(a[i]*label[i]);	      a[i]=0;	    }	    else {	      a[i]-=(eq*label[i]);	      eq=0;	    }	  }	}      }      compute_index(chosen,totdoc,working2dnum);    }    else {      /* select working set according to steepest gradient */      if(iteration % 101) {        already_chosen=0;	if((minl(learn_parm->svm_newvarsinqp,		 learn_parm->svm_maxqpsize-choosenum)>=4) 	   && (kernel_parm->kernel_type != LINEAR)) {	  /* select part of the working set from cache */	  already_chosen=select_next_qp_subproblem_grad_cache(			      label,unlabeled,a,lin,c,totdoc,			      (long)(minl(learn_parm->svm_maxqpsize-choosenum,					  learn_parm->svm_newvarsinqp)

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
色综合天天狠狠| 欧美亚洲综合网| 国产精品每日更新| 国产成人夜色高潮福利影视| 欧美在线观看视频一区二区 | 91老司机福利 在线| 性感美女极品91精品| 日韩精品在线一区二区| 成人美女在线视频| 日韩高清在线不卡| 欧美极品aⅴ影院| 欧美精品久久天天躁| 成人av影视在线观看| 人人精品人人爱| 亚洲一区二区三区中文字幕| 精品久久久三级丝袜| caoporm超碰国产精品| 日韩精品免费视频人成| 久久综合狠狠综合久久激情| 成人性生交大片| 日韩主播视频在线| 国产午夜亚洲精品羞羞网站| 欧美午夜精品久久久久久超碰 | 在线精品视频免费播放| 精品一区二区三区在线观看| 国产精品短视频| 欧美电影免费观看高清完整版在线观看| 精品一区二区日韩| 亚欧色一区w666天堂| 亚洲天堂av老司机| 精品国产91洋老外米糕| 91国在线观看| 日韩一区二区视频| 色诱亚洲精品久久久久久| 久久99最新地址| 久久99精品久久久| 美女网站色91| 狠狠色狠狠色综合系列| 国产日产欧美一区二区三区| 日韩欧美aaaaaa| 久久综合九色综合97婷婷 | 色综合视频一区二区三区高清| 国产在线一区二区综合免费视频| 日韩av一级电影| 亚洲美女偷拍久久| 中文字幕一区二区三区在线不卡 | 成人毛片在线观看| 成人sese在线| 91国产成人在线| 91精品国产色综合久久不卡蜜臀| 色婷婷精品大视频在线蜜桃视频| 久久99精品国产| aaa亚洲精品一二三区| 在线精品视频免费播放| 欧美丰满少妇xxxbbb| 精品va天堂亚洲国产| 国产精品污污网站在线观看| 亚洲精品一卡二卡| 韩国女主播一区| 日本道在线观看一区二区| 日韩精品一区二区三区蜜臀| 国产精品嫩草99a| 亚洲免费观看高清完整版在线观看熊| 国产精品色在线观看| 国产精品久久久久久妇女6080| 久久久亚洲高清| 亚洲一区二区视频在线观看| 免费在线观看成人| 床上的激情91.| 欧美日韩一本到| 久久精品一区二区三区不卡| 国产精品综合一区二区三区| 亚洲精品免费在线播放| 久热成人在线视频| 92国产精品观看| 久久久高清一区二区三区| 亚洲国产欧美一区二区三区丁香婷| 免费欧美日韩国产三级电影| 水野朝阳av一区二区三区| 国产一区二三区好的| 欧美三级日韩三级国产三级| 久久久电影一区二区三区| 午夜伦理一区二区| 国产美女主播视频一区| 欧美日韩国产综合视频在线观看 | 91麻豆福利精品推荐| 久久精品视频一区二区| 午夜欧美2019年伦理| 在线一区二区三区四区五区| 久久精品一级爱片| 免费观看久久久4p| 91精品国产综合久久久久| 一区二区视频在线看| 一本色道亚洲精品aⅴ| 亚洲色图制服诱惑| 色综合久久久久网| 亚洲最大的成人av| 日本韩国精品在线| 亚洲午夜电影在线观看| 在线观看网站黄不卡| 天天做天天摸天天爽国产一区| 欧美日韩一卡二卡| 石原莉奈一区二区三区在线观看| 欧美亚洲国产一区二区三区va| 自拍偷自拍亚洲精品播放| 94-欧美-setu| 国产精品亚洲视频| 夜夜嗨av一区二区三区| 日韩欧美aaaaaa| 日本精品一区二区三区高清| 亚洲电影你懂得| 欧美精品一区二区三区高清aⅴ| 成人黄色国产精品网站大全在线免费观看 | 欧美军同video69gay| 蜜臀av在线播放一区二区三区| 欧美一区二区视频在线观看2022 | 精品盗摄一区二区三区| 欧美日韩精品一区二区三区 | 欧美亚洲高清一区二区三区不卡| 国产综合久久久久久鬼色| 水蜜桃久久夜色精品一区的特点| 国产精品亲子伦对白| 欧美一区二区在线观看| 成人晚上爱看视频| 国产精品亚洲午夜一区二区三区| 国产精品自在在线| 成人激情文学综合网| 粉嫩在线一区二区三区视频| 成人深夜视频在线观看| 99re视频精品| 欧美性一二三区| 制服.丝袜.亚洲.中文.综合| 欧美亚洲国产bt| 精品国产亚洲一区二区三区在线观看| 日韩三级精品电影久久久| 国产片一区二区| 亚洲精品久久久久久国产精华液| 亚洲日本va午夜在线影院| 亚洲一区在线观看视频| 久久99国产精品尤物| 成人的网站免费观看| 日本精品视频一区二区| 欧美成人性战久久| 欧美日韩一区成人| 久久超碰97人人做人人爱| 粉嫩蜜臀av国产精品网站| 在线观看视频91| 国产精品蜜臀av| 精品一区二区综合| 欧美亚洲精品一区| 中文字幕不卡三区| 日日噜噜夜夜狠狠视频欧美人| 国产成人欧美日韩在线电影| 欧美久久一二区| 亚洲天堂久久久久久久| 国产一区二区三区四区五区美女| 欧美日韩一级黄| 亚洲mv在线观看| 在线观看国产一区二区| 国产精品久99| av动漫一区二区| 国产精品久久久久久久久免费桃花 | 亚洲一区二区三区在线看| 国产91在线看| 国产欧美一区二区三区网站| 激情丁香综合五月| 久久久美女毛片| 亚洲一区在线免费观看| 日本韩国一区二区| 国产精品午夜春色av| 国产综合久久久久影院| www精品美女久久久tv| 久久99最新地址| 久久久久久综合| 91丨九色丨国产丨porny| 亚洲精品成人少妇| 色综合久久久网| 久久精品国产一区二区三| 国产精品美女久久福利网站| 色婷婷久久久久swag精品 | 中文字幕av一区二区三区| 国产精品一区三区| 亚洲最大的成人av| 日韩视频123| 99国产欧美久久久精品| 一区二区三区在线免费视频| 欧美人与z0zoxxxx视频| 成人免费毛片片v| 性做久久久久久久免费看| 国产精品素人视频| 欧美精品1区2区3区| 蜜臀av一区二区三区| 欧美成人女星排名| 日韩一区二区不卡| 欧美午夜精品免费| a级精品国产片在线观看| 国产伦精品一区二区三区视频青涩 | 色综合久久久网| 国产精品综合二区| 国产真实乱子伦精品视频|