亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_learn.c

?? SVM Light的多分類源代碼
?? C
?? 第 1 頁 / 共 5 頁
字號:
	 < 1.0) { 
	/* guaranteed to not produce a leave-one-out error */
	if(verbosity==1) {
	  printf("+"); fflush(stdout); 
	}
      }
      else if(xi_fullset[heldout] > 1.0) {
	/* guaranteed to produce a leave-one-out error */
	loo_count++;
	if(label[heldout] > 0)  loo_count_pos++; else loo_count_neg++;
	if(verbosity==1) {
	  printf("-"); fflush(stdout); 
	}
      }
      else {
	loocomputed++;
	heldout_c=learn_parm->svm_cost[heldout]; /* set upper bound to zero */
	learn_parm->svm_cost[heldout]=0;
	/* make sure heldout example is not currently  */
	/* shrunk away. Assumes that lin is up to date! */
	shrink_state.active[heldout]=1;  
	if(verbosity>=2) 
	  printf("\nLeave-One-Out test on example %ld\n",heldout);
	if(verbosity>=1) {
	  printf("(?[%ld]",heldout); fflush(stdout); 
	}
	
	optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,
				kernel_parm,
				kernel_cache,&shrink_state,model,inconsistent,unlabeled,
				a,lin,c,&timing_profile,
				&maxdiff,heldout,(long)2);

	/* printf("%.20f\n",(lin[heldout]-model->b)*(double)label[heldout]); */

	if(((lin[heldout]-model->b)*(double)label[heldout]) <= 0.0) { 
	  loo_count++;                            /* there was a loo-error */
	  if(label[heldout] > 0)  loo_count_pos++; else loo_count_neg++;
	  if(verbosity>=1) {
	    printf("-)"); fflush(stdout); 
	  }
	}
	else {
	  if(verbosity>=1) {
	    printf("+)"); fflush(stdout); 
	  }
	}
	/* now we need to restore the original data set*/
	learn_parm->svm_cost[heldout]=heldout_c; /* restore upper bound */
      }
    } /* end of leave-one-out loop */


    if(verbosity>=1) {
      printf("\nRetrain on full problem"); fflush(stdout); 
    }
    optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,
			    kernel_parm,
			    kernel_cache,&shrink_state,model,inconsistent,unlabeled,
			    a,lin,c,&timing_profile,
			    &maxdiff,(long)-1,(long)1);
    if(verbosity >= 1) 
      printf("done.\n");
    
    
    /* after all leave-one-out computed */
    model->loo_error=100.0*loo_count/(double)totdoc;
    model->loo_recall=(1.0-(double)loo_count_pos/(double)trainpos)*100.0;
    model->loo_precision=(trainpos-loo_count_pos)/
      (double)(trainpos-loo_count_pos+loo_count_neg)*100.0;
    if(verbosity >= 1) {
      fprintf(stdout,"Leave-one-out estimate of the error: error=%.2f%%\n",
	      model->loo_error);
      fprintf(stdout,"Leave-one-out estimate of the recall: recall=%.2f%%\n",
	      model->loo_recall);
      fprintf(stdout,"Leave-one-out estimate of the precision: precision=%.2f%%\n",
	      model->loo_precision);
      fprintf(stdout,"Actual leave-one-outs computed:  %ld (rho=%.2f)\n",
	      loocomputed,learn_parm->rho);
      printf("Runtime for leave-one-out in cpu-seconds: %.2f\n",
	     (double)(get_runtime()-runtime_start_loo)/100.0);
    }
  }
    
  if(learn_parm->alphafile[0])
    write_alphas(learn_parm->alphafile,a,label,totdoc);
  
  shrink_state_cleanup(&shrink_state);
  free(label);
  free(inconsistent);
  free(unlabeled);
  free(c);
  free(a);
  free(a_fullset);
  free(xi_fullset);
  free(lin);
  free(learn_parm->svm_cost);
}


/* Learns an SVM regression model based on the training data in
   docs/label. The resulting model is returned in the structure
   model. */

void svm_learn_regression(DOC **docs, double *value, long int totdoc, 
			  long int totwords, LEARN_PARM *learn_parm, 
			  KERNEL_PARM *kernel_parm, 
			  KERNEL_CACHE **kernel_cache, MODEL *model)
     /* docs:        Training vectors (x-part) */
     /* class:       Training value (y-part) */
     /* totdoc:      Number of examples in docs/label */
     /* totwords:    Number of features (i.e. highest feature index) */
     /* learn_parm:  Learning paramenters */
     /* kernel_parm: Kernel paramenters */
     /* kernel_cache:Initialized Cache, if using a kernel. NULL if
                     linear. Note that it will be free'd and reassigned */
     /* model:       Returns learning result (assumed empty before called) */
{
  long *inconsistent,i,j;
  long inconsistentnum;
  long upsupvecnum;
  double loss,model_length,example_length;
  double maxdiff,*lin,*a,*c;
  long runtime_start,runtime_end;
  long iterations,kernel_cache_size;
  long *unlabeled;
  double r_delta_sq=0,r_delta,r_delta_avg;
  double *xi_fullset; /* buffer for storing xi on full sample in loo */
  double *a_fullset;  /* buffer for storing alpha on full sample in loo */
  TIMING timing_profile;
  SHRINK_STATE shrink_state;
  DOC **docs_org;
  long *label;

  /* set up regression problem in standard form */
  docs_org=docs;
  docs = (DOC **)my_malloc(sizeof(DOC)*2*totdoc);
  label = (long *)my_malloc(sizeof(long)*2*totdoc);
  c = (double *)my_malloc(sizeof(double)*2*totdoc);
  for(i=0;i<totdoc;i++) {   
    j=2*totdoc-1-i;
    docs[i]=create_example(i,0,0,docs_org[i]->costfactor,docs_org[i]->fvec);
    label[i]=+1;
    c[i]=value[i];
    docs[j]=create_example(j,0,0,docs_org[i]->costfactor,docs_org[i]->fvec);
    label[j]=-1;
    c[j]=value[i];
  }
  totdoc*=2;

  /* need to get a bigger kernel cache */
  if(*kernel_cache) {
    kernel_cache_size=(*kernel_cache)->buffsize*sizeof(CFLOAT)/(1024*1024);
    kernel_cache_cleanup(*kernel_cache);
    (*kernel_cache)=kernel_cache_init(totdoc,kernel_cache_size);
  }

  runtime_start=get_runtime();
  timing_profile.time_kernel=0;
  timing_profile.time_opti=0;
  timing_profile.time_shrink=0;
  timing_profile.time_update=0;
  timing_profile.time_model=0;
  timing_profile.time_check=0;
  timing_profile.time_select=0;
  kernel_cache_statistic=0;

  learn_parm->totwords=totwords;

  /* make sure -n value is reasonable */
  if((learn_parm->svm_newvarsinqp < 2) 
     || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) {
    learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;
  }

  init_shrink_state(&shrink_state,totdoc,(long)MAXSHRINK);

  inconsistent = (long *)my_malloc(sizeof(long)*totdoc);
  unlabeled = (long *)my_malloc(sizeof(long)*totdoc);
  a = (double *)my_malloc(sizeof(double)*totdoc);
  a_fullset = (double *)my_malloc(sizeof(double)*totdoc);
  xi_fullset = (double *)my_malloc(sizeof(double)*totdoc);
  lin = (double *)my_malloc(sizeof(double)*totdoc);
  learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc);
  model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2));
  model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2));
  model->index = (long *)my_malloc(sizeof(long)*(totdoc+2));

  model->at_upper_bound=0;
  model->b=0;	       
  model->supvec[0]=0;  /* element 0 reserved and empty for now */
  model->alpha[0]=0;
  model->lin_weights=NULL;
  model->totwords=totwords;
  model->totdoc=totdoc;
  model->kernel_parm=(*kernel_parm);
  model->sv_num=1;
  model->loo_error=-1;
  model->loo_recall=-1;
  model->loo_precision=-1;
  model->xa_error=-1;
  model->xa_recall=-1;
  model->xa_precision=-1;
  inconsistentnum=0;

  r_delta=estimate_r_delta(docs,totdoc,kernel_parm);
  r_delta_sq=r_delta*r_delta;

  r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm);
  if(learn_parm->svm_c == 0.0) {  /* default value for C */
    learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg);
    if(verbosity>=1) 
      printf("Setting default regularization parameter C=%.4f\n",
	     learn_parm->svm_c);
  }

  for(i=0;i<totdoc;i++) {    /* various inits */
    inconsistent[i]=0;
    a[i]=0;
    lin[i]=0;
    unlabeled[i]=0;
    if(label[i] > 0) {
      learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio*
	docs[i]->costfactor;
    }
    else if(label[i] < 0) {
      learn_parm->svm_cost[i]=learn_parm->svm_c*docs[i]->costfactor;
    }
  }

  /* caching makes no sense for linear kernel */
  if((kernel_parm->kernel_type == LINEAR) && (*kernel_cache)) {
    printf("WARNING: Using a kernel cache for linear case will slow optimization down!\n");
  } 

  if(verbosity==1) {
    printf("Optimizing"); fflush(stdout);
  }

  /* train the svm */
  iterations=optimize_to_convergence(docs,label,totdoc,totwords,learn_parm,
				     kernel_parm,*kernel_cache,&shrink_state,
				     model,inconsistent,unlabeled,a,lin,c,
				     &timing_profile,&maxdiff,(long)-1,
				     (long)1);
  
  if(verbosity>=1) {
    if(verbosity==1) printf("done. (%ld iterations)\n",iterations);

    printf("Optimization finished (maxdiff=%.5f).\n",maxdiff); 

    runtime_end=get_runtime();
    if(verbosity>=2) {
      printf("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n",
        ((float)runtime_end-(float)runtime_start)/100.0,
        (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start),
	(100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start),
	(100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start));
    }
    else {
      printf("Runtime in cpu-seconds: %.2f\n",
	     (runtime_end-runtime_start)/100.0);
    }

    if(learn_parm->remove_inconsistent) {	  
      inconsistentnum=0;
      for(i=0;i<totdoc;i++) 
	if(inconsistent[i]) 
	  inconsistentnum++;
      printf("Number of SV: %ld (plus %ld inconsistent examples)\n",
	     model->sv_num-1,inconsistentnum);
    }
    else {
      upsupvecnum=0;
      for(i=1;i<model->sv_num;i++) {
	if(fabs(model->alpha[i]) >= 
	   (learn_parm->svm_cost[(model->supvec[i])->docnum]-
	    learn_parm->epsilon_a)) 
	  upsupvecnum++;
      }
      printf("Number of SV: %ld (including %ld at upper bound)\n",
	     model->sv_num-1,upsupvecnum);
    }
    
    if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) {
      loss=0;
      model_length=0; 
      for(i=0;i<totdoc;i++) {
	if((lin[i]-model->b)*(double)label[i] < (-learn_parm->eps+(double)label[i]*c[i])-learn_parm->epsilon_crit)
	  loss+=-learn_parm->eps+(double)label[i]*c[i]-(lin[i]-model->b)*(double)label[i];
	model_length+=a[i]*label[i]*lin[i];
      }
      model_length=sqrt(model_length);
      fprintf(stdout,"L1 loss: loss=%.5f\n",loss);
      fprintf(stdout,"Norm of weight vector: |w|=%.5f\n",model_length);
      example_length=estimate_sphere(model,kernel_parm); 
      fprintf(stdout,"Norm of longest example vector: |x|=%.5f\n",
	      length_of_longest_document_vector(docs,totdoc,kernel_parm));
    }
    if(verbosity>=1) {
      printf("Number of kernel evaluations: %ld\n",kernel_cache_statistic);
    }
  }
    
  if(learn_parm->alphafile[0])
    write_alphas(learn_parm->alphafile,a,label,totdoc);

  /* this makes sure the model we return does not contain pointers to the 
     temporary documents */
  for(i=1;i<model->sv_num;i++) { 
    j=model->supvec[i]->docnum;
    if(j >= (totdoc/2)) {
      j=totdoc-j-1;
    }
    model->supvec[i]=docs_org[j];
  }
  
  shrink_state_cleanup(&shrink_state);
  for(i=0;i<totdoc;i++)
    free_example(docs[i],0);
  free(docs);
  free(label);
  free(inconsistent);
  free(unlabeled);
  free(c);
  free(a);
  free(a_fullset);
  free(xi_fullset);
  free(lin);
  free(learn_parm->svm_cost);
}

void svm_learn_ranking(DOC **docs, double *rankvalue, long int totdoc, 
		       long int totwords, LEARN_PARM *learn_parm, 
		       KERNEL_PARM *kernel_parm, KERNEL_CACHE **kernel_cache, 
		       MODEL *model)
     /* docs:        Training vectors (x-part) */
     /* rankvalue:   Training target values that determine the ranking */
     /* totdoc:      Number of examples in docs/label */
     /* totwords:    Number of features (i.e. highest feature index) */
     /* learn_parm:  Learning paramenters */
     /* kernel_parm: Kernel paramenters */
     /* kernel_cache:Initialized pointer to Cache of size 1*totdoc, if 
	             using a kernel. NULL if linear. NOTE: Cache is 
                     getting reinitialized in this function */
     /* model:       Returns learning result (assumed empty before called) */
{
  DOC **docdiff;
  long i,j,k,totpair,kernel_cache_size;
  double *target,*alpha,cost;
  long *greater,*lesser;
  MODEL *pairmodel;
  SVECTOR *flow,*fhigh;

  totpair=0;
  for(i=0;i<totdoc;i++) {
    for(j=i+1;j<totdoc;j++) {
      if((docs[i]->queryid==docs[j]->queryid) && (rankvalue[i] != rankvalue[j])) {
	totpair++;
      }
    }
  }

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
a级高清视频欧美日韩| 久久久午夜精品理论片中文字幕| 日韩一区二区三区精品视频| 中文字幕 久热精品 视频在线| 亚洲高清不卡在线观看| 成人黄色电影在线| 精品少妇一区二区三区在线播放| 亚洲日本在线a| 国产成人在线影院| 日韩欧美电影在线| 亚洲国产综合在线| 国产iv一区二区三区| 欧美成人免费网站| 日本伊人色综合网| 欧美丝袜丝交足nylons图片| 国产精品高潮呻吟| 国产精品一区二区无线| 日韩欧美国产麻豆| 奇米在线7777在线精品 | 亚洲免费观看高清完整版在线观看熊| 美国三级日本三级久久99| 日本高清不卡在线观看| 国产精品久久久久桃色tv| 国产精品一区二区视频| 久久综合九色综合97婷婷| 麻豆精品视频在线观看| 91精品一区二区三区久久久久久 | 久久91精品久久久久久秒播| 欧美日韩国产精选| 亚洲国产日韩一区二区| 欧亚一区二区三区| 亚洲激情男女视频| 91美女在线观看| 一区二区三区在线不卡| 在线看一区二区| 五月天激情小说综合| 777xxx欧美| 久久精品国产99国产精品| 日韩视频国产视频| 激情av综合网| 欧美国产精品专区| caoporen国产精品视频| 亚洲日本中文字幕区| 欧美三级日本三级少妇99| 婷婷中文字幕综合| 欧美一个色资源| 国产酒店精品激情| 欧美激情综合在线| 色综合中文字幕国产 | 亚洲欧洲日韩综合一区二区| 91免费版pro下载短视频| 亚洲一区二区视频| 884aa四虎影成人精品一区| 老司机午夜精品| 国产精品久久久久久久久免费桃花| 91蜜桃视频在线| 日韩电影在线免费观看| 国产亚洲1区2区3区| 91亚洲男人天堂| 日韩高清不卡在线| 国产欧美日韩在线| 在线精品观看国产| 久草精品在线观看| 国产精品全国免费观看高清| 欧洲一区二区三区免费视频| 麻豆成人91精品二区三区| 国产精品麻豆99久久久久久| 欧美久久久一区| 成人深夜在线观看| 香港成人在线视频| 中文字幕欧美日韩一区| 欧美日韩亚洲综合| 国产91精品一区二区麻豆网站| 亚洲.国产.中文慕字在线| 久久先锋资源网| 欧美日韩一本到| 丁香网亚洲国际| 日韩av网站免费在线| 亚洲色图制服诱惑| 精品国产免费一区二区三区香蕉| 91高清视频在线| 成人av在线资源网站| 久久99最新地址| 亚洲伊人伊色伊影伊综合网| 国产精品另类一区| 日韩视频一区二区| 欧美亚州韩日在线看免费版国语版| 久久激情五月激情| 亚洲小说春色综合另类电影| 国产精品久久久久婷婷| 日韩视频一区二区在线观看| 欧美体内she精视频| 91首页免费视频| 国产成人日日夜夜| 久久99在线观看| 蜜臀av性久久久久蜜臀av麻豆| 亚洲综合男人的天堂| 国产精品成人一区二区三区夜夜夜| 日韩区在线观看| 6080午夜不卡| 欧美精品高清视频| 欧美体内she精高潮| 在线亚洲高清视频| 91女神在线视频| 91亚洲男人天堂| 91网页版在线| 91免费观看国产| 92国产精品观看| 色综合久久久久网| 99视频在线观看一区三区| 成人自拍视频在线观看| 成人毛片视频在线观看| 高清不卡一区二区在线| 丰满亚洲少妇av| 不卡一区在线观看| 97久久超碰国产精品| 91在线一区二区三区| av资源网一区| 欧美伊人久久久久久久久影院 | 韩国成人在线视频| 久久精品国产免费看久久精品| 精品中文字幕一区二区小辣椒| 狂野欧美性猛交blacked| 日本va欧美va精品发布| 麻豆91免费看| 国产一区二区三区综合| 成人午夜短视频| 99麻豆久久久国产精品免费优播| 99re这里只有精品首页| 日本道免费精品一区二区三区| 欧美熟乱第一页| 日韩欧美中文字幕精品| 精品国精品自拍自在线| 久久九九久久九九| 国产精品无码永久免费888| 成人免费一区二区三区视频| 亚洲综合区在线| 久久精品噜噜噜成人av农村| 国产高清成人在线| 99re热视频这里只精品| 欧美日韩一区 二区 三区 久久精品| 日韩一区二区三区四区| 久久亚洲影视婷婷| 中文字幕日韩精品一区| 亚洲午夜久久久久久久久电影院 | 精品1区2区在线观看| 国产欧美日韩亚州综合| 夜夜嗨av一区二区三区网页| 精品一区二区在线看| 91网上在线视频| 欧美xxxxx裸体时装秀| 亚洲欧美综合另类在线卡通| 日韩不卡在线观看日韩不卡视频| 国产成人一区二区精品非洲| 91在线播放网址| 欧美大黄免费观看| 亚洲欧洲精品一区二区精品久久久| 亚洲电影在线播放| 国产91精品免费| 欧美日韩成人综合| 国产精品白丝在线| 日本午夜一本久久久综合| 99精品国产视频| 久久亚洲精精品中文字幕早川悠里| 亚洲最新在线观看| 丰满亚洲少妇av| 欧美成人欧美edvon| 一区二区高清免费观看影视大全| 国产一区二区三区美女| 欧美日韩亚洲综合一区二区三区| 国产精品另类一区| 精品在线免费视频| 欧美精品日韩一本| 亚洲情趣在线观看| 国产**成人网毛片九色 | 亚洲同性同志一二三专区| 毛片av一区二区三区| 欧美色男人天堂| 综合婷婷亚洲小说| 国产成人午夜精品5599| 日韩精品一区二区三区蜜臀| 亚洲成人av一区二区| 91亚洲男人天堂| 国产精品蜜臀在线观看| 国产高清不卡二三区| 日韩视频在线你懂得| 日韩精品久久理论片| 色天天综合久久久久综合片| 国产精品天美传媒| 国产在线日韩欧美| 日韩欧美一区二区久久婷婷| 日本成人在线不卡视频| 欧美调教femdomvk| 一区二区日韩电影| 在线观看日韩一区| 夜夜精品视频一区二区| 欧美在线不卡一区| 亚洲大片在线观看| 精品视频在线免费观看| 亚洲午夜一区二区|