亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_learn.c

?? 關于支持向量機的源代碼 包括算法和說明
?? C
?? 第 1 頁 / 共 5 頁
字號:
	 < 1.0) { 
	/* guaranteed to not produce a leave-one-out error */
	if(verbosity==1) {
	  printf("+"); fflush(stdout); 
	}
      }
      else if(xi_fullset[heldout] > 1.0) {
	/* guaranteed to produce a leave-one-out error */
	loo_count++;
	if(label[heldout] > 0)  loo_count_pos++; else loo_count_neg++;
	if(verbosity==1) {
	  printf("-"); fflush(stdout); 
	}
      }
      else {
	loocomputed++;
	heldout_c=learn_parm->svm_cost[heldout]; /* set upper bound to zero */
	learn_parm->svm_cost[heldout]=0;
	/* make sure heldout example is not currently  */
	/* shrunk away. Assumes that lin is up to date! */
	shrink_state.active[heldout]=1;  
	if(verbosity>=2) 
	  printf("\nLeave-One-Out test on example %ld\n",heldout);
	if(verbosity>=1) {
	  printf("(?[%ld]",heldout); fflush(stdout); 
	}
	
	optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,
				kernel_parm,
				kernel_cache,&shrink_state,model,inconsistent,unlabeled,
				a,lin,c,&timing_profile,
				&maxdiff,heldout,(long)2);

	/* printf("%.20f\n",(lin[heldout]-model->b)*(double)label[heldout]); */

	if(((lin[heldout]-model->b)*(double)label[heldout]) <= 0.0) { 
	  loo_count++;                            /* there was a loo-error */
	  if(label[heldout] > 0)  loo_count_pos++; else loo_count_neg++;
	  if(verbosity>=1) {
	    printf("-)"); fflush(stdout); 
	  }
	}
	else {
	  if(verbosity>=1) {
	    printf("+)"); fflush(stdout); 
	  }
	}
	/* now we need to restore the original data set*/
	learn_parm->svm_cost[heldout]=heldout_c; /* restore upper bound */
      }
    } /* end of leave-one-out loop */


    if(verbosity>=1) {
      printf("\nRetrain on full problem"); fflush(stdout); 
    }
    optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,
			    kernel_parm,
			    kernel_cache,&shrink_state,model,inconsistent,unlabeled,
			    a,lin,c,&timing_profile,
			    &maxdiff,(long)-1,(long)1);
    if(verbosity >= 1) 
      printf("done.\n");
    
    
    /* after all leave-one-out computed */
    model->loo_error=100.0*loo_count/(double)totdoc;
    model->loo_recall=(1.0-(double)loo_count_pos/(double)trainpos)*100.0;
    model->loo_precision=(trainpos-loo_count_pos)/
      (double)(trainpos-loo_count_pos+loo_count_neg)*100.0;
    if(verbosity >= 1) {
      fprintf(stdout,"Leave-one-out estimate of the error: error=%.2f%%\n",
	      model->loo_error);
      fprintf(stdout,"Leave-one-out estimate of the recall: recall=%.2f%%\n",
	      model->loo_recall);
      fprintf(stdout,"Leave-one-out estimate of the precision: precision=%.2f%%\n",
	      model->loo_precision);
      fprintf(stdout,"Actual leave-one-outs computed:  %ld (rho=%.2f)\n",
	      loocomputed,learn_parm->rho);
      printf("Runtime for leave-one-out in cpu-seconds: %.2f\n",
	     (double)(get_runtime()-runtime_start_loo)/100.0);
    }
  }
    
  if(learn_parm->alphafile[0])
    write_alphas(learn_parm->alphafile,a,label,totdoc);
  
  shrink_state_cleanup(&shrink_state);
  free(label);
  free(inconsistent);
  free(unlabeled);
  free(c);
  free(a);
  free(a_fullset);
  free(xi_fullset);
  free(lin);
  free(learn_parm->svm_cost);
}


/* Learns an SVM regression model based on the training data in
   docs/label. The resulting model is returned in the structure
   model. */

void svm_learn_regression(DOC **docs, double *value, long int totdoc, 
			  long int totwords, LEARN_PARM *learn_parm, 
			  KERNEL_PARM *kernel_parm, 
			  KERNEL_CACHE **kernel_cache, MODEL *model)
     /* docs:        Training vectors (x-part) */
     /* class:       Training value (y-part) */
     /* totdoc:      Number of examples in docs/label */
     /* totwords:    Number of features (i.e. highest feature index) */
     /* learn_parm:  Learning paramenters */
     /* kernel_parm: Kernel paramenters */
     /* kernel_cache:Initialized Cache, if using a kernel. NULL if
                     linear. Note that it will be free'd and reassigned */
     /* model:       Returns learning result (assumed empty before called) */
{
  long *inconsistent,i,j;
  long inconsistentnum;
  long upsupvecnum;
  double loss,model_length,example_length;
  double maxdiff,*lin,*a,*c;
  long runtime_start,runtime_end;
  long iterations,kernel_cache_size;
  long *unlabeled;
  double r_delta_sq=0,r_delta,r_delta_avg;
  double *xi_fullset; /* buffer for storing xi on full sample in loo */
  double *a_fullset;  /* buffer for storing alpha on full sample in loo */
  TIMING timing_profile;
  SHRINK_STATE shrink_state;
  DOC **docs_org;
  long *label;

  /* set up regression problem in standard form */
  docs_org=docs;
  docs = (DOC **)my_malloc(sizeof(DOC)*2*totdoc);
  label = (long *)my_malloc(sizeof(long)*2*totdoc);
  c = (double *)my_malloc(sizeof(double)*2*totdoc);
  for(i=0;i<totdoc;i++) {   
    j=2*totdoc-1-i;
    docs[i]=create_example(i,0,0,docs_org[i]->costfactor,docs_org[i]->fvec);
    label[i]=+1;
    c[i]=value[i];
    docs[j]=create_example(j,0,0,docs_org[i]->costfactor,docs_org[i]->fvec);
    label[j]=-1;
    c[j]=value[i];
  }
  totdoc*=2;

  /* need to get a bigger kernel cache */
  if(*kernel_cache) {
    kernel_cache_size=(*kernel_cache)->buffsize*sizeof(CFLOAT)/(1024*1024);
    kernel_cache_cleanup(*kernel_cache);
    (*kernel_cache)=kernel_cache_init(totdoc,kernel_cache_size);
  }

  runtime_start=get_runtime();
  timing_profile.time_kernel=0;
  timing_profile.time_opti=0;
  timing_profile.time_shrink=0;
  timing_profile.time_update=0;
  timing_profile.time_model=0;
  timing_profile.time_check=0;
  timing_profile.time_select=0;
  kernel_cache_statistic=0;

  learn_parm->totwords=totwords;

  /* make sure -n value is reasonable */
  if((learn_parm->svm_newvarsinqp < 2) 
     || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) {
    learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;
  }

  init_shrink_state(&shrink_state,totdoc,(long)MAXSHRINK);

  inconsistent = (long *)my_malloc(sizeof(long)*totdoc);
  unlabeled = (long *)my_malloc(sizeof(long)*totdoc);
  a = (double *)my_malloc(sizeof(double)*totdoc);
  a_fullset = (double *)my_malloc(sizeof(double)*totdoc);
  xi_fullset = (double *)my_malloc(sizeof(double)*totdoc);
  lin = (double *)my_malloc(sizeof(double)*totdoc);
  learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc);
  model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2));
  model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2));
  model->index = (long *)my_malloc(sizeof(long)*(totdoc+2));

  model->at_upper_bound=0;
  model->b=0;	       
  model->supvec[0]=0;  /* element 0 reserved and empty for now */
  model->alpha[0]=0;
  model->lin_weights=NULL;
  model->totwords=totwords;
  model->totdoc=totdoc;
  model->kernel_parm=(*kernel_parm);
  model->sv_num=1;
  model->loo_error=-1;
  model->loo_recall=-1;
  model->loo_precision=-1;
  model->xa_error=-1;
  model->xa_recall=-1;
  model->xa_precision=-1;
  inconsistentnum=0;

  r_delta=estimate_r_delta(docs,totdoc,kernel_parm);
  r_delta_sq=r_delta*r_delta;

  r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm);
  if(learn_parm->svm_c == 0.0) {  /* default value for C */
    learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg);
    if(verbosity>=1) 
      printf("Setting default regularization parameter C=%.4f\n",
	     learn_parm->svm_c);
  }

  for(i=0;i<totdoc;i++) {    /* various inits */
    inconsistent[i]=0;
    a[i]=0;
    lin[i]=0;
    unlabeled[i]=0;
    if(label[i] > 0) {
      learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio*
	docs[i]->costfactor;
    }
    else if(label[i] < 0) {
      learn_parm->svm_cost[i]=learn_parm->svm_c*docs[i]->costfactor;
    }
  }

  /* caching makes no sense for linear kernel */
  if((kernel_parm->kernel_type == LINEAR) && (*kernel_cache)) {
    printf("WARNING: Using a kernel cache for linear case will slow optimization down!\n");
  } 

  if(verbosity==1) {
    printf("Optimizing"); fflush(stdout);
  }

  /* train the svm */
  iterations=optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,
				     kernel_parm,*kernel_cache,&shrink_state,
				     model,inconsistent,unlabeled,a,lin,c,
				     &timing_profile,&maxdiff,(long)-1,
				     (long)1);
  
  if(verbosity>=1) {
    if(verbosity==1) printf("done. (%ld iterations)\n",iterations);

    printf("Optimization finished (maxdiff=%.5f).\n",maxdiff); 

    runtime_end=get_runtime();
    if(verbosity>=2) {
      printf("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n",
        ((float)runtime_end-(float)runtime_start)/100.0,
        (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start),
	(100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start),
	(100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start));
    }
    else {
      printf("Runtime in cpu-seconds: %.2f\n",
	     (runtime_end-runtime_start)/100.0);
    }

    if(learn_parm->remove_inconsistent) {	  
      inconsistentnum=0;
      for(i=0;i<totdoc;i++) 
	if(inconsistent[i]) 
	  inconsistentnum++;
      printf("Number of SV: %ld (plus %ld inconsistent examples)\n",
	     model->sv_num-1,inconsistentnum);
    }
    else {
      upsupvecnum=0;
      for(i=1;i<model->sv_num;i++) {
	if(fabs(model->alpha[i]) >= 
	   (learn_parm->svm_cost[(model->supvec[i])->docnum]-
	    learn_parm->epsilon_a)) 
	  upsupvecnum++;
      }
      printf("Number of SV: %ld (including %ld at upper bound)\n",
	     model->sv_num-1,upsupvecnum);
    }
    
    if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) {
      loss=0;
      model_length=0; 
      for(i=0;i<totdoc;i++) {
	if((lin[i]-model->b)*(double)label[i] < (-learn_parm->eps+(double)label[i]*c[i])-learn_parm->epsilon_crit)
	  loss+=-learn_parm->eps+(double)label[i]*c[i]-(lin[i]-model->b)*(double)label[i];
	model_length+=a[i]*label[i]*lin[i];
      }
      model_length=sqrt(model_length);
      fprintf(stdout,"L1 loss: loss=%.5f\n",loss);
      fprintf(stdout,"Norm of weight vector: |w|=%.5f\n",model_length);
      example_length=estimate_sphere(model,kernel_parm); 
      fprintf(stdout,"Norm of longest example vector: |x|=%.5f\n",
	      length_of_longest_document_vector(docs,totdoc,kernel_parm));
    }
    if(verbosity>=1) {
      printf("Number of kernel evaluations: %ld\n",kernel_cache_statistic);
    }
  }
    
  if(learn_parm->alphafile[0])
    write_alphas(learn_parm->alphafile,a,label,totdoc);

  /* this makes sure the model we return does not contain pointers to the 
     temporary documents */
  for(i=1;i<model->sv_num;i++) { 
    j=model->supvec[i]->docnum;
    if(j >= (totdoc/2)) {
      j=totdoc-j-1;
    }
    model->supvec[i]=docs_org[j];
  }
  
  shrink_state_cleanup(&shrink_state);
  for(i=0;i<totdoc;i++)
    free_example(docs[i],0);
  free(docs);
  free(label);
  free(inconsistent);
  free(unlabeled);
  free(c);
  free(a);
  free(a_fullset);
  free(xi_fullset);
  free(lin);
  free(learn_parm->svm_cost);
}

void svm_learn_ranking(DOC **docs, double *rankvalue, long int totdoc, 
		       long int totwords, LEARN_PARM *learn_parm, 
		       KERNEL_PARM *kernel_parm, KERNEL_CACHE **kernel_cache, 
		       MODEL *model)
     /* docs:        Training vectors (x-part) */
     /* rankvalue:   Training target values that determine the ranking */
     /* totdoc:      Number of examples in docs/label */
     /* totwords:    Number of features (i.e. highest feature index) */
     /* learn_parm:  Learning paramenters */
     /* kernel_parm: Kernel paramenters */
     /* kernel_cache:Initialized pointer to Cache of size 1*totdoc, if 
	             using a kernel. NULL if linear. NOTE: Cache is 
                     getting reinitialized in this function */
     /* model:       Returns learning result (assumed empty before called) */
{
  DOC **docdiff;
  long i,j,k,totpair,kernel_cache_size;
  double *target,*alpha,cost;
  long *greater,*lesser;
  MODEL *pairmodel;
  SVECTOR *flow,*fhigh;

  totpair=0;
  for(i=0;i<totdoc;i++) {
    for(j=i+1;j<totdoc;j++) {
      if((docs[i]->queryid==docs[j]->queryid) && (rankvalue[i] != rankvalue[j])) {
	totpair++;
      }
    }
  }

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
欧美丝袜自拍制服另类| 亚洲香蕉伊在人在线观| 欧美日韩免费观看一区三区| 国产自产2019最新不卡| 一区二区三区资源| 久久久久国产精品麻豆ai换脸| 欧美三级中文字幕| 一本色道综合亚洲| 成人小视频免费在线观看| 蜜桃av噜噜一区二区三区小说| 国产欧美日韩一区二区三区在线观看| 欧美性猛片xxxx免费看久爱| 99免费精品在线| 国产成人精品一区二| 麻豆免费看一区二区三区| 亚洲欧洲日韩女同| 亚洲国产精品精华液2区45| 91在线观看一区二区| 国产成人av电影在线| 国产激情91久久精品导航| 久久99国产精品久久99 | 一区免费观看视频| 国产精品美女www爽爽爽| 亚洲欧洲精品一区二区三区| 精品福利视频一区二区三区| 精品久久久久一区| 2014亚洲片线观看视频免费| 国产亚洲人成网站| 久久久国产精华| 中文字幕在线不卡一区 | 国产精品一区一区| a美女胸又www黄视频久久| 99re6这里只有精品视频在线观看| 99re热这里只有精品免费视频 | 韩日av一区二区| 日本欧美一区二区三区乱码| 精品一区二区三区在线视频| 成人午夜又粗又硬又大| 欧美伊人精品成人久久综合97| 在线观看区一区二| 精品国产电影一区二区| 国产精品性做久久久久久| 在线精品亚洲一区二区不卡| 欧美大片在线观看一区| 亚洲日本免费电影| 国产尤物一区二区在线| 欧美主播一区二区三区| 日韩西西人体444www| 亚洲女同一区二区| 成人美女视频在线观看18| 欧美精品视频www在线观看| 久久久噜噜噜久久人人看 | 国产精品情趣视频| 男女男精品视频| 欧美色电影在线| 一个色在线综合| 91在线码无精品| 成人免费在线视频观看| 男男视频亚洲欧美| 欧美精品在线观看一区二区| 国产精品久久久久久久久免费桃花 | 久久久精品人体av艺术| 蜜桃视频一区二区三区| 51精品秘密在线观看| 亚洲国产欧美在线| 欧美视频一区在线观看| 亚洲综合色婷婷| 欧洲日韩一区二区三区| 亚洲精品欧美在线| 9191久久久久久久久久久| 日韩国产欧美在线播放| 欧美一区二区三区白人| 午夜激情一区二区| 91精品国产欧美一区二区18| 亚洲一区在线电影| 欧美日韩电影在线| 精东粉嫩av免费一区二区三区| 久久先锋资源网| av一区二区三区黑人| 一片黄亚洲嫩模| 日韩欧美的一区| 成人小视频免费在线观看| 亚洲欧洲精品一区二区三区不卡| 欧洲精品在线观看| 久久9热精品视频| 亚洲丝袜自拍清纯另类| 91精品久久久久久久91蜜桃| 国产一区中文字幕| 亚洲精品亚洲人成人网| 精品美女被调教视频大全网站| 国产精品亚洲一区二区三区在线 | 亚洲与欧洲av电影| 精品国免费一区二区三区| yourporn久久国产精品| 日韩经典一区二区| 久久99精品久久久| 日韩美女视频一区| 欧美mv和日韩mv的网站| www成人在线观看| 91精选在线观看| 日本韩国精品在线| 福利电影一区二区| 久久精品国产网站| 亚洲图片自拍偷拍| 亚洲欧洲无码一区二区三区| 日韩精品专区在线影院重磅| 91香蕉视频在线| 丰满亚洲少妇av| 极品少妇xxxx精品少妇| 日韩av中文字幕一区二区三区| 亚洲视频免费观看| 18欧美亚洲精品| 国产精品成人一区二区艾草| 久久综合久久99| 欧美成人一区二区三区片免费| 欧美日韩久久久一区| 一本到一区二区三区| av在线免费不卡| 91网站在线播放| 欧美在线视频日韩| 欧美色区777第一页| 欧美三级电影精品| 欧美日韩aaaaaa| 精品久久久久久久人人人人传媒 | 成a人片国产精品| 国产白丝精品91爽爽久久| 懂色一区二区三区免费观看| 成人性视频网站| av亚洲产国偷v产偷v自拍| 91小视频在线| 欧美伊人久久大香线蕉综合69 | 一区二区三区日本| 亚洲永久精品大片| 人妖欧美一区二区| 国产黄人亚洲片| 欧亚洲嫩模精品一区三区| 91精品国产综合久久久久久漫画 | 在线观看亚洲精品视频| 日韩欧美一区二区三区在线| wwww国产精品欧美| 夜夜亚洲天天久久| 青青草97国产精品免费观看 | 精品国产一区久久| 亚洲天天做日日做天天谢日日欢| 亚洲一区二区成人在线观看| 日韩高清一级片| av影院午夜一区| 日韩欧美一级二级| 亚洲综合精品自拍| 不卡一区在线观看| 日韩欧美一级二级三级| 中文字幕成人网| 九一九一国产精品| 制服丝袜在线91| 亚洲午夜视频在线观看| av在线一区二区三区| 国产日韩成人精品| 韩国一区二区在线观看| 欧美日韩在线直播| 一区二区成人在线| 91麻豆免费视频| 一区二区三区91| 91亚洲精品久久久蜜桃网站| 国产亚洲成aⅴ人片在线观看| 日韩高清不卡一区| 91精品婷婷国产综合久久竹菊| 一区二区久久久久| 色婷婷国产精品综合在线观看| 国产欧美视频一区二区| 国产精品白丝jk黑袜喷水| 欧美不卡一区二区三区四区| 丝袜脚交一区二区| 6080日韩午夜伦伦午夜伦| 亚洲午夜久久久久久久久久久| 99久久精品情趣| 夜夜嗨av一区二区三区四季av| 色综合欧美在线| 亚洲一区在线视频观看| 欧美日韩亚洲丝袜制服| 日韩电影网1区2区| 久久亚洲精品小早川怜子| 国产精品18久久久久久久久久久久| 国产日韩精品一区二区三区在线| 成人免费av网站| 亚洲国产精品天堂| 2024国产精品| aaa欧美大片| 蜜臀久久99精品久久久久宅男 | 亚洲欧美怡红院| 一本到三区不卡视频| 日韩不卡手机在线v区| 亚洲精品一区二区三区精华液 | 亚洲成av人片一区二区梦乃| 日韩亚洲欧美一区| 成人av动漫网站| 日韩二区三区在线观看| **欧美大码日韩| 欧美va亚洲va| 欧美三级三级三级| 成人国产精品免费网站|