亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_learn.c

?? 這個是我最近得到的支持向量機 light 的源碼包。。。這個源碼包主要是用作文本分類
?? C
?? 第 1 頁 / 共 5 頁
字號:
    (void)compute_index(index,totdoc,index2dnum);
    slack=(double *)my_malloc(sizeof(double)*(maxslackid+1));
    alphaslack=(double *)my_malloc(sizeof(double)*(maxslackid+1));
    for(i=0;i<=maxslackid;i++) {    /* init shared slacks */
      slack[i]=0;
      alphaslack[i]=0;
    }
    compute_shared_slacks(docs,label,a,lin,c,index2dnum,learn_parm,
			  slack,alphaslack);
    loss=0;
    model->at_upper_bound=0;
    svsetnum=0;
    for(i=0;i<=maxslackid;i++) {    /* create full index */
      loss+=slack[i];
      if(alphaslack[i] > (learn_parm->svm_c - learn_parm->epsilon_a)) 
	model->at_upper_bound++;
      if(alphaslack[i] > learn_parm->epsilon_a)
	svsetnum++;
    }
    free(index);
    free(index2dnum);
    free(slack);
    free(alphaslack);
  }
  
  if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) {
    if(learn_parm->sharedslack) {
      printf("Number of SV: %ld\n",
	     model->sv_num-1);
      printf("Number of non-zero slack variables: %ld (out of %ld)\n",
	     model->at_upper_bound,svsetnum);
      fprintf(stdout,"L1 loss: loss=%.5f\n",loss);
    }
    else {
      upsupvecnum=0;
      for(i=1;i<model->sv_num;i++) {
	if(fabs(model->alpha[i]) >= 
	   (learn_parm->svm_cost[(model->supvec[i])->docnum]-
	    learn_parm->epsilon_a)) 
	  upsupvecnum++;
      }
      printf("Number of SV: %ld (including %ld at upper bound)\n",
	     model->sv_num-1,upsupvecnum);
      fprintf(stdout,"L1 loss: loss=%.5f\n",loss);
    }
    example_length=estimate_sphere(model,kernel_parm); 
    fprintf(stdout,"Norm of longest example vector: |x|=%.5f\n",
	    length_of_longest_document_vector(docs,totdoc,kernel_parm));
  }
  if(verbosity>=1) {
    printf("Number of kernel evaluations: %ld\n",kernel_cache_statistic);
  }
    
  if(alpha) {
    for(i=0;i<totdoc;i++) {    /* copy final alphas */
      alpha[i]=a[i];
    }
  }
 
  if(learn_parm->alphafile[0])
    write_alphas(learn_parm->alphafile,a,label,totdoc);
  
  shrink_state_cleanup(&shrink_state);
  free(label);
  free(unlabeled);
  free(inconsistent);
  free(c);
  free(a);
  free(lin);
  free(learn_parm->svm_cost);
}


long optimize_to_convergence(DOC **docs, long int *label, long int totdoc, 
			     long int totwords, LEARN_PARM *learn_parm, 
			     KERNEL_PARM *kernel_parm, 
			     KERNEL_CACHE *kernel_cache, 
			     SHRINK_STATE *shrink_state, MODEL *model, 
			     long int *inconsistent, long int *unlabeled, 
			     double *a, double *lin, double *c, 
			     TIMING *timing_profile, double *maxdiff, 
			     long int heldout, long int retrain)
     /* docs: Training vectors (x-part) */
     /* label: Training labels/value (y-part, zero if test example for
			      transduction) */
     /* totdoc: Number of examples in docs/label */
     /* totwords: Number of features (i.e. highest feature index) */
     /* laern_parm: Learning paramenters */
     /* kernel_parm: Kernel paramenters */
     /* kernel_cache: Initialized/partly filled Cache, if using a kernel. 
                      NULL if linear. */
     /* shrink_state: State of active variables */
     /* model: Returns learning result */
     /* inconsistent: examples thrown out as inconstistent */
     /* unlabeled: test examples for transduction */
     /* a: alphas */
     /* lin: linear component of gradient */
     /* c: right hand side of inequalities (margin) */
     /* maxdiff: returns maximum violation of KT-conditions */
     /* heldout: marks held-out example for leave-one-out (or -1) */
     /* retrain: selects training mode (1=regular / 2=holdout) */
{
  long *chosen,*key,i,j,jj,*last_suboptimal_at,noshrink;
  long inconsistentnum,choosenum,already_chosen=0,iteration;
  long misclassified,supvecnum=0,*active2dnum,inactivenum;
  long *working2dnum,*selexam;
  long activenum;
  double criterion,eq;
  double *a_old;
  long t0=0,t1=0,t2=0,t3=0,t4=0,t5=0,t6=0; /* timing */
  long transductcycle;
  long transduction;
  double epsilon_crit_org; 
  double bestmaxdiff;
  long   bestmaxdiffiter,terminate;

  double *selcrit;  /* buffer for sorting */        
  CFLOAT *aicache;  /* buffer to keep one row of hessian */
  double *weights;  /* buffer for weight vector in linear case */
  QP qp;            /* buffer for one quadratic program */

  epsilon_crit_org=learn_parm->epsilon_crit; /* save org */
  if(kernel_parm->kernel_type == LINEAR) {
    learn_parm->epsilon_crit=2.0;
    kernel_cache=NULL;   /* caching makes no sense for linear kernel */
  } 
  learn_parm->epsilon_shrink=2;
  (*maxdiff)=1;

  learn_parm->totwords=totwords;

  chosen = (long *)my_malloc(sizeof(long)*totdoc);
  last_suboptimal_at = (long *)my_malloc(sizeof(long)*totdoc);
  key = (long *)my_malloc(sizeof(long)*(totdoc+11)); 
  selcrit = (double *)my_malloc(sizeof(double)*totdoc);
  selexam = (long *)my_malloc(sizeof(long)*totdoc);
  a_old = (double *)my_malloc(sizeof(double)*totdoc);
  aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc);
  working2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11));
  active2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11));
  qp.opt_ce = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);
  qp.opt_ce0 = (double *)my_malloc(sizeof(double));
  qp.opt_g = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize
				 *learn_parm->svm_maxqpsize);
  qp.opt_g0 = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);
  qp.opt_xinit = (double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);
  qp.opt_low=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);
  qp.opt_up=(double *)my_malloc(sizeof(double)*learn_parm->svm_maxqpsize);
  weights=(double *)my_malloc(sizeof(double)*(totwords+1));

  choosenum=0;
  inconsistentnum=0;
  transductcycle=0;
  transduction=0;
  if(!retrain) retrain=1;
  iteration=1;
  bestmaxdiffiter=1;
  bestmaxdiff=999999999;
  terminate=0;

  if(kernel_cache) {
    kernel_cache->time=iteration;  /* for lru cache */
    kernel_cache_reset_lru(kernel_cache);
  }

  for(i=0;i<totdoc;i++) {    /* various inits */
    chosen[i]=0;
    a_old[i]=a[i];
    last_suboptimal_at[i]=1;
    if(inconsistent[i]) 
      inconsistentnum++;
    if(unlabeled[i]) {
      transduction=1;
    }
  }
  activenum=compute_index(shrink_state->active,totdoc,active2dnum);
  inactivenum=totdoc-activenum;
  clear_index(working2dnum);

                            /* repeat this loop until we have convergence */
  for(;retrain && (!terminate);iteration++) {

    if(kernel_cache)
      kernel_cache->time=iteration;  /* for lru cache */
    if(verbosity>=2) {
      printf(
	"Iteration %ld: ",iteration); fflush(stdout);
    }
    else if(verbosity==1) {
      printf("."); fflush(stdout);
    }

    if(verbosity>=2) t0=get_runtime();
    if(verbosity>=3) {
      printf("\nSelecting working set... "); fflush(stdout); 
    }

    if(learn_parm->svm_newvarsinqp>learn_parm->svm_maxqpsize) 
      learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;

    i=0;
    for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* clear working set */
      if((chosen[j]>=(learn_parm->svm_maxqpsize/
		      minl(learn_parm->svm_maxqpsize,
			   learn_parm->svm_newvarsinqp))) 
	 || (inconsistent[j])
	 || (j == heldout)) {
	chosen[j]=0; 
	choosenum--; 
      }
      else {
	chosen[j]++;
	working2dnum[i++]=j;
      }
    }
    working2dnum[i]=-1;

    if(retrain == 2) {
      choosenum=0;
      for(jj=0;(j=working2dnum[jj])>=0;jj++) { /* fully clear working set */
	chosen[j]=0; 
      }
      clear_index(working2dnum);
      for(i=0;i<totdoc;i++) { /* set inconsistent examples to zero (-i 1) */
	if((inconsistent[i] || (heldout==i)) && (a[i] != 0.0)) {
	  chosen[i]=99999;
	  choosenum++;
	  a[i]=0;
	}
      }
      if(learn_parm->biased_hyperplane) {
	eq=0;
	for(i=0;i<totdoc;i++) { /* make sure we fulfill equality constraint */
	  eq+=a[i]*label[i];
	}
	for(i=0;(i<totdoc) && (fabs(eq) > learn_parm->epsilon_a);i++) {
	  if((eq*label[i] > 0) && (a[i] > 0)) {
	    chosen[i]=88888;
	    choosenum++;
	    if((eq*label[i]) > a[i]) {
	      eq-=(a[i]*label[i]);
	      a[i]=0;
	    }
	    else {
	      a[i]-=(eq*label[i]);
	      eq=0;
	    }
	  }
	}
      }
      compute_index(chosen,totdoc,working2dnum);
    }
    else {      /* select working set according to steepest gradient */
      if(iteration % 101) {
        already_chosen=0;
	if((minl(learn_parm->svm_newvarsinqp,
		 learn_parm->svm_maxqpsize-choosenum)>=4) 
	   && (kernel_parm->kernel_type != LINEAR)) {
	  /* select part of the working set from cache */
	  already_chosen=select_next_qp_subproblem_grad(
			      label,unlabeled,a,lin,c,totdoc,
			      (long)(minl(learn_parm->svm_maxqpsize-choosenum,
					  learn_parm->svm_newvarsinqp)
				     /2),
			      learn_parm,inconsistent,active2dnum,
			      working2dnum,selcrit,selexam,kernel_cache,1,
			      key,chosen);
	  choosenum+=already_chosen;
	}
	choosenum+=select_next_qp_subproblem_grad(
                              label,unlabeled,a,lin,c,totdoc,
                              minl(learn_parm->svm_maxqpsize-choosenum,
				   learn_parm->svm_newvarsinqp-already_chosen),
                              learn_parm,inconsistent,active2dnum,
			      working2dnum,selcrit,selexam,kernel_cache,0,key,
			      chosen);
      }
      else { /* once in a while, select a somewhat random working set
		to get unlocked of infinite loops due to numerical
		inaccuracies in the core qp-solver */
	choosenum+=select_next_qp_subproblem_rand(
                              label,unlabeled,a,lin,c,totdoc,
                              minl(learn_parm->svm_maxqpsize-choosenum,
				   learn_parm->svm_newvarsinqp),
                              learn_parm,inconsistent,active2dnum,
			      working2dnum,selcrit,selexam,kernel_cache,key,
			      chosen,iteration);
      }
    }

    if(verbosity>=2) {
      printf(" %ld vectors chosen\n",choosenum); fflush(stdout); 
    }

    if(verbosity>=2) t1=get_runtime();

    if(kernel_cache) 
      cache_multiple_kernel_rows(kernel_cache,docs,working2dnum,
				 choosenum,kernel_parm); 
    
    if(verbosity>=2) t2=get_runtime();
    if(retrain != 2) {
      optimize_svm(docs,label,unlabeled,inconsistent,0.0,chosen,active2dnum,
		   model,totdoc,working2dnum,choosenum,a,lin,c,learn_parm,
		   aicache,kernel_parm,&qp,&epsilon_crit_org);
    }

    if(verbosity>=2) t3=get_runtime();
    update_linear_component(docs,label,active2dnum,a,a_old,working2dnum,totdoc,
			    totwords,kernel_parm,kernel_cache,lin,aicache,
			    weights);

    if(verbosity>=2) t4=get_runtime();
    supvecnum=calculate_svm_model(docs,label,unlabeled,lin,a,a_old,c,
		                  learn_parm,working2dnum,active2dnum,model);

    if(verbosity>=2) t5=get_runtime();

    /* The following computation of the objective function works only */
    /* relative to the active variables */
    if(verbosity>=3) {
      criterion=compute_objective_function(a,lin,c,learn_parm->eps,label,
		                           active2dnum);
      printf("Objective function (over active variables): %.16f\n",criterion);
      fflush(stdout); 
    }

    for(jj=0;(i=working2dnum[jj])>=0;jj++) {
      a_old[i]=a[i];
    }

    if(retrain == 2) {  /* reset inconsistent unlabeled examples */
      for(i=0;(i<totdoc);i++) {
	if(inconsistent[i] && unlabeled[i]) {
	  inconsistent[i]=0;
	  label[i]=0;
	}
      }
    }

    retrain=check_optimality(model,label,unlabeled,a,lin,c,totdoc,learn_parm,
			     maxdiff,epsilon_crit_org,&misclassified,
			     inconsistent,active2dnum,last_suboptimal_at,
			     iteration,kernel_parm);

    if(verbosity>=2) {
      t6=get_runtime();
      timing_profile->time_select+=t1-t0;
      timing_profile->time_kernel+=t2-t1;
      timing_profile->time_opti+=t3-t2;
      timing_profile->time_update+=t4-t3;
      timing_profile->time_model+=t5-t4;
      timing_profile->time_check+=t6-t5;
    }

    /* checking whether optimizer got stuck */
    if((*maxdiff) < bestmaxdiff) {
      bestmaxdiff=(*maxdiff);
      bestmaxdiffiter=iteration;
    }
    if(iteration > (bestmaxdiffiter+learn_parm->maxiter)) { 
      /* long time no progress? */
      terminate=1;
      retrain=0;
      if(verbosity>=1) 
	printf("\nWARNING: Relaxing KT-Conditions due to slow progress! Terminating!\n");

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
99国产精品久久久久久久久久| 国产成人精品免费网站| www激情久久| 精品国产乱码久久久久久牛牛 | 亚洲主播在线播放| 亚洲视频图片小说| 亚洲一区二区中文在线| 亚洲成人在线观看视频| 蜜乳av一区二区三区| 久草在线在线精品观看| 国模少妇一区二区三区| 不卡视频一二三| 91福利精品第一导航| 欧美久久一二区| 精品国产乱码久久久久久图片 | av中文字幕一区| 色综合色狠狠天天综合色| 精品视频色一区| 2欧美一区二区三区在线观看视频| 亚洲精品一区二区三区蜜桃下载 | 欧美午夜免费电影| 日韩欧美一区二区在线视频| 精品国产成人系列| 国产精品久久久久久久久晋中 | 欧美精品电影在线播放| 精品粉嫩超白一线天av| 国产精品久线在线观看| 亚洲成人av在线电影| 国产精品99久久久久久久vr | 波多野结衣视频一区| 日本久久电影网| 精品剧情v国产在线观看在线| 亚洲国产高清aⅴ视频| 亚洲第一成人在线| 丁香婷婷深情五月亚洲| 欧美精品久久久久久久多人混战| 久久色.com| 日日夜夜精品视频天天综合网| 成人一二三区视频| 日韩一区二区三区观看| 亚洲乱码国产乱码精品精98午夜 | 国产精品久久久久久久裸模| 午夜视频一区二区三区| 成人中文字幕合集| 欧美大白屁股肥臀xxxxxx| 一区二区三区日韩欧美| 激情综合色丁香一区二区| 在线免费观看日本欧美| 国产精品色眯眯| 精品无码三级在线观看视频| 欧美日韩成人一区| 老司机免费视频一区二区| 在线欧美日韩国产| 国产精品久久久久7777按摩| 国产在线精品一区二区不卡了| 欧美性大战久久久| 亚洲人成小说网站色在线| 国产成人综合自拍| 久久欧美中文字幕| 蜜臀av一区二区在线观看| 欧美色老头old∨ideo| 亚洲美女屁股眼交3| eeuss鲁片一区二区三区| 久久久久国产免费免费| 国产综合色产在线精品| 精品国产免费久久| 国产乱人伦精品一区二区在线观看| 91.xcao| 亚洲国产日韩a在线播放| 在线观看日韩电影| 一区二区日韩av| 欧美少妇一区二区| 夜夜操天天操亚洲| 欧美视频在线一区| 视频精品一区二区| 欧美一区二区三区小说| 毛片av中文字幕一区二区| 欧美大片在线观看| 国产一区二区三区在线观看精品 | 欧美男男青年gay1069videost| 一区二区在线电影| 欧美伊人精品成人久久综合97 | 久久精品99国产精品日本| 欧美一二三四区在线| 国产综合一区二区| 国产欧美精品一区aⅴ影院| 成人激情小说乱人伦| 中文字幕一区二区三区av| 色香蕉成人二区免费| 亚洲高清久久久| 日韩欧美一区二区三区在线| 国产999精品久久久久久| 136国产福利精品导航| 欧美亚洲另类激情小说| 麻豆精品在线观看| 国产日本欧洲亚洲| 91黄色免费观看| 久久成人久久爱| ...av二区三区久久精品| 91麻豆精品国产91久久久使用方法 | 欧美丰满嫩嫩电影| 国产精品羞羞答答xxdd| 一个色妞综合视频在线观看| 欧美一级在线视频| 成人妖精视频yjsp地址| 东方欧美亚洲色图在线| 亚洲午夜在线观看视频在线| 欧美一级一区二区| av成人动漫在线观看| 日韩va亚洲va欧美va久久| 中文字幕av一区二区三区免费看| 欧美视频一区二区在线观看| 国产一区二区美女| 午夜影视日本亚洲欧洲精品| 日本一二三四高清不卡| 欧美日韩免费高清一区色橹橹| 国产精品77777| 天天综合色天天| 国产精品福利一区| 欧美成人一区二区| 欧美视频一区二区在线观看| 国产成人精品三级| 蜜臀av一区二区在线免费观看| 亚洲精品成a人| 国产精品黄色在线观看| 精品欧美乱码久久久久久| 欧美日韩中文字幕精品| 色综合久久久久综合99| 国产91精品一区二区麻豆网站| 免费在线观看精品| 亚洲一区二区在线免费观看视频| 国产亚洲欧美在线| 欧美成人猛片aaaaaaa| 欧美美女网站色| 欧美吻胸吃奶大尺度电影| 99久久免费精品| 国产盗摄女厕一区二区三区| 麻豆成人综合网| 日韩精品成人一区二区在线| 亚洲午夜免费视频| 一区二区三区精密机械公司| 国产精品久久久久永久免费观看| 久久九九国产精品| 337p日本欧洲亚洲大胆精品| 欧美刺激午夜性久久久久久久| 欧美久久高跟鞋激| 欧美精品一级二级三级| 欧美日韩国产大片| 欧美精品v国产精品v日韩精品| 欧美日本在线看| 91精品国产福利在线观看| 欧美精品久久一区二区三区| 欧美日韩激情一区| 日韩丝袜美女视频| 日韩欧美123| 精品欧美黑人一区二区三区| 2021国产精品久久精品 | 成人免费视频一区二区| 成人免费福利片| 91在线精品一区二区三区| av爱爱亚洲一区| 在线这里只有精品| 欧美日本国产视频| 精品少妇一区二区三区免费观看 | 国产一区二区三区在线看麻豆| 国产一区二区毛片| 99久久久免费精品国产一区二区| 91视频xxxx| 欧美日韩精品电影| 久久伊人蜜桃av一区二区| 国产精品美女一区二区| 亚洲一本大道在线| 久久不见久久见中文字幕免费| 国产精品综合一区二区三区| 成人免费黄色大片| 欧美日韩在线观看一区二区| 日韩一区二区三区免费看| 久久久精品一品道一区| 国产精品亚洲一区二区三区妖精| 暴力调教一区二区三区| 欧美中文字幕一区二区三区| 日韩一级欧美一级| 成人免费在线视频观看| 免费在线欧美视频| 成人短视频下载| 91精品中文字幕一区二区三区| 国产色91在线| 天堂资源在线中文精品| 国产成人免费视频网站| 欧美婷婷六月丁香综合色| 久久久久9999亚洲精品| 亚洲6080在线| 99免费精品视频| 欧美tickling网站挠脚心| 一区二区三区在线视频观看| 韩国三级电影一区二区| 欧美日韩成人高清| 亚洲欧美另类图片小说| 国产麻豆精品一区二区| 777久久久精品|