亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_learn.c

?? 這個是我最近得到的支持向量機 light 的源碼包。。。這個源碼包主要是用作文本分類
?? C
?? 第 1 頁 / 共 5 頁
字號:

  printf("Constructing %ld rank constraints...",totpair); fflush(stdout);
  docdiff=(DOC **)my_malloc(sizeof(DOC)*totpair);
  target=(double *)my_malloc(sizeof(double)*totpair); 
  greater=(long *)my_malloc(sizeof(long)*totpair); 
  lesser=(long *)my_malloc(sizeof(long)*totpair); 

  k=0;
  for(i=0;i<totdoc;i++) {
    for(j=i+1;j<totdoc;j++) {
      if(docs[i]->queryid == docs[j]->queryid) {
	cost=(docs[i]->costfactor+docs[j]->costfactor)/2.0;
	if(rankvalue[i] > rankvalue[j]) {
	  if(kernel_parm->kernel_type == LINEAR)
	    docdiff[k]=create_example(k,0,0,cost,
				      sub_ss(docs[i]->fvec,docs[j]->fvec));
	  else {
	    flow=copy_svector(docs[j]->fvec);
	    flow->factor=-1.0;
	    flow->next=NULL;
	    fhigh=copy_svector(docs[i]->fvec);
	    fhigh->factor=1.0;
	    fhigh->next=flow;
	    docdiff[k]=create_example(k,0,0,cost,fhigh);
	  }
	  target[k]=1;
	  greater[k]=i;
	  lesser[k]=j;
	  k++;
	}
	else if(rankvalue[i] < rankvalue[j]) {
	  if(kernel_parm->kernel_type == LINEAR)
	    docdiff[k]=create_example(k,0,0,cost,
				      sub_ss(docs[i]->fvec,docs[j]->fvec));
	  else {
	    flow=copy_svector(docs[j]->fvec);
	    flow->factor=-1.0;
	    flow->next=NULL;
	    fhigh=copy_svector(docs[i]->fvec);
	    fhigh->factor=1.0;
	    fhigh->next=flow;
	    docdiff[k]=create_example(k,0,0,cost,fhigh);
	  }
	  target[k]=-1;
	  greater[k]=i;
	  lesser[k]=j;
	  k++;
	}
      }
    }
  }
  printf("done.\n"); fflush(stdout);

  /* need to get a bigger kernel cache */
  if(*kernel_cache) {
    kernel_cache_size=(*kernel_cache)->buffsize*sizeof(CFLOAT)/(1024*1024);
    kernel_cache_cleanup(*kernel_cache);
    (*kernel_cache)=kernel_cache_init(totpair,kernel_cache_size);
  }

  /* must use unbiased hyperplane on difference vectors */
  learn_parm->biased_hyperplane=0;
  pairmodel=(MODEL *)my_malloc(sizeof(MODEL));
  svm_learn_classification(docdiff,target,totpair,totwords,learn_parm,
			   kernel_parm,(*kernel_cache),pairmodel,NULL);

  /* Transfer the result into a more compact model. If you would like
     to output the original model on pairs of documents, see below. */
  alpha=(double *)my_malloc(sizeof(double)*totdoc); 
  for(i=0;i<totdoc;i++) {
    alpha[i]=0;
  }
  for(i=1;i<pairmodel->sv_num;i++) {
    alpha[lesser[(pairmodel->supvec[i])->docnum]]-=pairmodel->alpha[i];
    alpha[greater[(pairmodel->supvec[i])->docnum]]+=pairmodel->alpha[i];
  }
  model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2));
  model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2));
  model->index = (long *)my_malloc(sizeof(long)*(totdoc+2));
  model->supvec[0]=0;  /* element 0 reserved and empty for now */
  model->alpha[0]=0;
  model->sv_num=1;
  for(i=0;i<totdoc;i++) {
    if(alpha[i]) {
      model->supvec[model->sv_num]=docs[i];
      model->alpha[model->sv_num]=alpha[i];
      model->index[i]=model->sv_num;
      model->sv_num++;
    }
    else {
      model->index[i]=-1;
    }
  }
  model->at_upper_bound=0;
  model->b=0;	       
  model->lin_weights=NULL;
  model->totwords=totwords;
  model->totdoc=totdoc;
  model->kernel_parm=(*kernel_parm);
  model->loo_error=-1;
  model->loo_recall=-1;
  model->loo_precision=-1;
  model->xa_error=-1;
  model->xa_recall=-1;
  model->xa_precision=-1;

  free(alpha);
  free(greater);
  free(lesser);
  free(target);

  /* If you would like to output the original model on pairs of
     document, replace the following lines with '(*model)=(*pairmodel);' */
  for(i=0;i<totpair;i++)
    free_example(docdiff[i],1);
  free(docdiff);
  free_model(pairmodel,0);
}


/* The following solves a freely defined and given set of
   inequalities. The optimization problem is of the following form:

   min 0.5 w*w + C sum_i C_i \xi_i
   s.t. x_i * w > rhs_i - \xi_i

   This corresponds to the -z o option. */

void svm_learn_optimization(DOC **docs, double *rhs, long int
			    totdoc, long int totwords, 
			    LEARN_PARM *learn_parm, 
			    KERNEL_PARM *kernel_parm, 
			    KERNEL_CACHE *kernel_cache, MODEL *model,
			    double *alpha)
     /* docs:        Left-hand side of inequalities (x-part) */
     /* rhs:         Right-hand side of inequalities */
     /* totdoc:      Number of examples in docs/label */
     /* totwords:    Number of features (i.e. highest feature index) */
     /* learn_parm:  Learning paramenters */
     /* kernel_parm: Kernel paramenters */
     /* kernel_cache:Initialized Cache of size 1*totdoc, if using a kernel. 
                     NULL if linear.*/
     /* model:       Returns solution as SV expansion (assumed empty before called) */
     /* alpha:       Start values for the alpha variables or NULL
	             pointer. The new alpha values are returned after 
		     optimization if not NULL. Array must be of size totdoc. */
{
  long i,*label;
  long misclassified,upsupvecnum;
  double loss,model_length,example_length;
  double maxdiff,*lin,*a,*c;
  long runtime_start,runtime_end;
  long iterations,maxslackid,svsetnum;
  long *unlabeled,*inconsistent;
  double r_delta_sq=0,r_delta,r_delta_avg;
  long *index,*index2dnum;
  double *weights,*slack,*alphaslack;
  CFLOAT *aicache;  /* buffer to keep one row of hessian */

  TIMING timing_profile;
  SHRINK_STATE shrink_state;

  runtime_start=get_runtime();
  timing_profile.time_kernel=0;
  timing_profile.time_opti=0;
  timing_profile.time_shrink=0;
  timing_profile.time_update=0;
  timing_profile.time_model=0;
  timing_profile.time_check=0;
  timing_profile.time_select=0;
  kernel_cache_statistic=0;

  learn_parm->totwords=totwords;

  /* make sure -n value is reasonable */
  if((learn_parm->svm_newvarsinqp < 2) 
     || (learn_parm->svm_newvarsinqp > learn_parm->svm_maxqpsize)) {
    learn_parm->svm_newvarsinqp=learn_parm->svm_maxqpsize;
  }

  init_shrink_state(&shrink_state,totdoc,(long)MAXSHRINK);

  label = (long *)my_malloc(sizeof(long)*totdoc);
  unlabeled = (long *)my_malloc(sizeof(long)*totdoc);
  inconsistent = (long *)my_malloc(sizeof(long)*totdoc);
  c = (double *)my_malloc(sizeof(double)*totdoc);
  a = (double *)my_malloc(sizeof(double)*totdoc);
  lin = (double *)my_malloc(sizeof(double)*totdoc);
  learn_parm->svm_cost = (double *)my_malloc(sizeof(double)*totdoc);
  model->supvec = (DOC **)my_malloc(sizeof(DOC *)*(totdoc+2));
  model->alpha = (double *)my_malloc(sizeof(double)*(totdoc+2));
  model->index = (long *)my_malloc(sizeof(long)*(totdoc+2));

  model->at_upper_bound=0;
  model->b=0;	       
  model->supvec[0]=0;  /* element 0 reserved and empty for now */
  model->alpha[0]=0;
  model->lin_weights=NULL;
  model->totwords=totwords;
  model->totdoc=totdoc;
  model->kernel_parm=(*kernel_parm);
  model->sv_num=1;
  model->loo_error=-1;
  model->loo_recall=-1;
  model->loo_precision=-1;
  model->xa_error=-1;
  model->xa_recall=-1;
  model->xa_precision=-1;

  r_delta=estimate_r_delta(docs,totdoc,kernel_parm);
  r_delta_sq=r_delta*r_delta;

  r_delta_avg=estimate_r_delta_average(docs,totdoc,kernel_parm);
  if(learn_parm->svm_c == 0.0) {  /* default value for C */
    learn_parm->svm_c=1.0/(r_delta_avg*r_delta_avg);
    if(verbosity>=1) 
      printf("Setting default regularization parameter C=%.4f\n",
	     learn_parm->svm_c);
  }

  learn_parm->biased_hyperplane=0; /* learn an unbiased hyperplane */

  learn_parm->eps=0.0;      /* No margin, unless explicitly handcoded
                               in the right-hand side in the training
                               set.  */

  for(i=0;i<totdoc;i++) {    /* various inits */
    docs[i]->docnum=i;
    a[i]=0;
    lin[i]=0;
    c[i]=rhs[i];       /* set right-hand side */
    unlabeled[i]=0;
    inconsistent[i]=0;
    learn_parm->svm_cost[i]=learn_parm->svm_c*learn_parm->svm_costratio*
      docs[i]->costfactor;
    label[i]=1;
  }
  if(learn_parm->sharedslack) /* if shared slacks are used, they must */
    for(i=0;i<totdoc;i++)     /*  be used on every constraint */
      if(!docs[i]->slackid) {
	perror("Error: Missing shared slacks definitions in some of the examples.");
	exit(0);
      }
      
  /* compute starting state for initial alpha values */
  if(alpha) {
    if(verbosity>=1) {
      printf("Computing starting state..."); fflush(stdout);
    }
    index = (long *)my_malloc(sizeof(long)*totdoc);
    index2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11));
    weights=(double *)my_malloc(sizeof(double)*(totwords+1));
    aicache = (CFLOAT *)my_malloc(sizeof(CFLOAT)*totdoc);
    for(i=0;i<totdoc;i++) {    /* create full index and clip alphas */
      index[i]=1;
      alpha[i]=fabs(alpha[i]);
      if(alpha[i]<0) alpha[i]=0;
      if(alpha[i]>learn_parm->svm_cost[i]) alpha[i]=learn_parm->svm_cost[i];
    }
    if(kernel_parm->kernel_type != LINEAR) {
      for(i=0;i<totdoc;i++)     /* fill kernel cache with unbounded SV */
	if((alpha[i]>0) && (alpha[i]<learn_parm->svm_cost[i]) 
	   && (kernel_cache_space_available(kernel_cache))) 
	  cache_kernel_row(kernel_cache,docs,i,kernel_parm);
      for(i=0;i<totdoc;i++)     /* fill rest of kernel cache with bounded SV */
	if((alpha[i]==learn_parm->svm_cost[i]) 
	   && (kernel_cache_space_available(kernel_cache))) 
	  cache_kernel_row(kernel_cache,docs,i,kernel_parm);
    }
    (void)compute_index(index,totdoc,index2dnum);
    update_linear_component(docs,label,index2dnum,alpha,a,index2dnum,totdoc,
			    totwords,kernel_parm,kernel_cache,lin,aicache,
			    weights);
    (void)calculate_svm_model(docs,label,unlabeled,lin,alpha,a,c,
			      learn_parm,index2dnum,index2dnum,model);
    for(i=0;i<totdoc;i++) {    /* copy initial alphas */
      a[i]=alpha[i];
    }
    free(index);
    free(index2dnum);
    free(weights);
    free(aicache);
    if(verbosity>=1) {
      printf("done.\n");  fflush(stdout);
    }   
  } 

  /* removing inconsistent does not work for general optimization problem */
  if(learn_parm->remove_inconsistent) {	  
    learn_parm->remove_inconsistent = 0;
    printf("'remove inconsistent' not available in this mode. Switching option off!"); fflush(stdout);
  }

  /* caching makes no sense for linear kernel */
  if(kernel_parm->kernel_type == LINEAR) {
    kernel_cache = NULL;   
  } 

  if(verbosity==1) {
    printf("Optimizing"); fflush(stdout);
  }

  /* train the svm */
  if(learn_parm->sharedslack)
    iterations=optimize_to_convergence_sharedslack(docs,label,totdoc,
				     totwords,learn_parm,kernel_parm,
				     kernel_cache,&shrink_state,model,
				     a,lin,c,&timing_profile,
				     &maxdiff);
  else
    iterations=optimize_to_convergence(docs,label,totdoc,
				     totwords,learn_parm,kernel_parm,
				     kernel_cache,&shrink_state,model,
				     inconsistent,unlabeled,
				     a,lin,c,&timing_profile,
				     &maxdiff,(long)-1,(long)1);
  
  if(verbosity>=1) {
    if(verbosity==1) printf("done. (%ld iterations)\n",iterations);

    misclassified=0;
    for(i=0;(i<totdoc);i++) { /* get final statistic */
      if((lin[i]-model->b)*(double)label[i] <= 0.0) 
	misclassified++;
    }

    printf("Optimization finished (maxdiff=%.5f).\n",maxdiff); 

    runtime_end=get_runtime();
    if(verbosity>=2) {
      printf("Runtime in cpu-seconds: %.2f (%.2f%% for kernel/%.2f%% for optimizer/%.2f%% for final/%.2f%% for update/%.2f%% for model/%.2f%% for check/%.2f%% for select)\n",
        ((float)runtime_end-(float)runtime_start)/100.0,
        (100.0*timing_profile.time_kernel)/(float)(runtime_end-runtime_start),
	(100.0*timing_profile.time_opti)/(float)(runtime_end-runtime_start),
	(100.0*timing_profile.time_shrink)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_update)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_model)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_check)/(float)(runtime_end-runtime_start),
        (100.0*timing_profile.time_select)/(float)(runtime_end-runtime_start));
    }
    else {
      printf("Runtime in cpu-seconds: %.2f\n",
	     (runtime_end-runtime_start)/100.0);
    }
  }
  if((verbosity>=1) && (!learn_parm->skip_final_opt_check)) {
    loss=0;
    model_length=0; 
    for(i=0;i<totdoc;i++) {
      if((lin[i]-model->b)*(double)label[i] < c[i]-learn_parm->epsilon_crit)
	loss+=c[i]-(lin[i]-model->b)*(double)label[i];
      model_length+=a[i]*label[i]*lin[i];
    }
    model_length=sqrt(model_length);
    fprintf(stdout,"Norm of weight vector: |w|=%.5f\n",model_length);
  }
  
  if(learn_parm->sharedslack) {
    index = (long *)my_malloc(sizeof(long)*totdoc);
    index2dnum = (long *)my_malloc(sizeof(long)*(totdoc+11));
    maxslackid=0;
    for(i=0;i<totdoc;i++) {    /* create full index */
      index[i]=1;
      if(maxslackid<docs[i]->slackid)
	maxslackid=docs[i]->slackid;
    }

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
91一区一区三区| 国产精品国产三级国产aⅴ无密码 国产精品国产三级国产aⅴ原创 | 日韩女优视频免费观看| 日本伊人精品一区二区三区观看方式| 色欧美片视频在线观看| 一区二区三区产品免费精品久久75| 麻豆精品一区二区三区| 久久精品一区二区三区不卡牛牛| 国产经典欧美精品| 一区二区三区**美女毛片| 国产宾馆实践打屁股91| 久久精品一区八戒影视| 久久精品av麻豆的观看方式| 精品国产伦一区二区三区观看体验| 亚洲第一主播视频| 欧美人与禽zozo性伦| 九一九一国产精品| 国产精品18久久久久久久网站| 欧美一区二区高清| 成人av网站在线观看免费| 亚洲人成精品久久久久久| 欧美日韩免费不卡视频一区二区三区| 精品国产凹凸成av人导航| 美国欧美日韩国产在线播放| 欧美成人欧美edvon| 国内精品久久久久影院薰衣草| 日韩精品在线一区二区| 欧美亚日韩国产aⅴ精品中极品| 日韩精品亚洲一区二区三区免费| 精品国产乱码久久久久久图片| 日韩综合小视频| 国产欧美一区二区精品忘忧草| 91精品91久久久中77777| 久久66热re国产| 夜夜夜精品看看| 中文字幕日韩一区二区| 91精品国产一区二区三区香蕉| 99久久99久久精品免费看蜜桃| 亚洲成av人片一区二区梦乃| 亚洲欧洲日产国码二区| 精品裸体舞一区二区三区| 97久久久精品综合88久久| 蜜臀av性久久久久蜜臀aⅴ | 欧美一区二区三区免费大片 | 国产亚洲成年网址在线观看| 色诱视频网站一区| 成人av网站在线观看| 国产成人在线网站| 国产在线一区二区综合免费视频| 亚洲18影院在线观看| 夜夜嗨av一区二区三区四季av| 精品国产乱码久久久久久夜甘婷婷| 91精品国产综合久久久久| 欧美日韩第一区日日骚| 欧美系列在线观看| 99视频超级精品| 91色.com| 欧美色男人天堂| 欧美三电影在线| 欧美老女人第四色| 日韩一区二区免费视频| 精品黑人一区二区三区久久| 欧美日韩一区小说| 精品国产一区二区三区av性色| 欧美精品亚洲一区二区在线播放| 日韩一区二区影院| 69p69国产精品| 精品国产乱码久久久久久蜜臀| 亚洲精品免费看| 三级精品在线观看| 老司机免费视频一区二区| 免费在线观看视频一区| 国产美女在线观看一区| 顶级嫩模精品视频在线看| 成人动漫在线一区| 欧美日韩性生活| 555www色欧美视频| 国产女主播在线一区二区| 日韩理论片网站| 日本成人在线一区| 国产.欧美.日韩| 欧美精品tushy高清| 亚洲人成精品久久久久| 一区二区三区四区中文字幕| 日日夜夜精品视频免费| aaa欧美日韩| 日韩欧美一区二区免费| 日韩美女啊v在线免费观看| 国产精品久久影院| 午夜精品久久久久久久| 国产91精品久久久久久久网曝门| 欧亚洲嫩模精品一区三区| 欧美一区二区网站| 亚洲欧美日韩在线| 国产一区二区在线观看免费| 99久久精品国产精品久久| 精品少妇一区二区三区视频免付费 | 亚洲国产精品视频| 91麻豆成人久久精品二区三区| 99综合影院在线| 色综合久久久久| 成人欧美一区二区三区小说 | 99久久精品久久久久久清纯| 精品精品国产高清a毛片牛牛| 亚洲黄一区二区三区| 99久久免费视频.com| 欧美mv日韩mv国产| 国产盗摄一区二区| 久久久久青草大香线综合精品| 韩国中文字幕2020精品| 欧美成人福利视频| 一区二区日韩av| 欧美久久一二区| 久久狠狠亚洲综合| 欧美日韩精品一区二区| 亚洲精品成人精品456| 99精品在线免费| 亚洲精品国产一区二区精华液| 精品写真视频在线观看| 欧美日韩一级视频| 日本一区二区在线不卡| 国产九九视频一区二区三区| 伊人开心综合网| 不卡av在线网| 一区二区三区国产精品| 欧美日韩一区视频| 免费精品99久久国产综合精品| 欧美美女一区二区三区| 九色综合国产一区二区三区| 久久精品亚洲一区二区三区浴池| 国产福利精品一区二区| 亚洲欧美日本在线| 欧美一区中文字幕| 99免费精品在线| 免费观看成人鲁鲁鲁鲁鲁视频| 久久影视一区二区| 在线看国产一区| 精品一区二区三区在线播放视频 | 久久精品视频在线看| 久久97超碰国产精品超碰| 亚洲乱码国产乱码精品精的特点| 717成人午夜免费福利电影| 午夜精品福利一区二区三区蜜桃| 亚洲精品一区二区三区香蕉| 99久久久久免费精品国产| 奇米亚洲午夜久久精品| 国产欧美一区二区精品性色| 欧美日韩一区二区三区在线 | 成人免费在线观看入口| 日韩一区二区在线观看视频| 91免费小视频| www.日韩大片| 成人毛片在线观看| 国产成人免费视频精品含羞草妖精 | 99精品偷自拍| 粉嫩aⅴ一区二区三区四区 | 色综合色狠狠综合色| 欧美少妇性性性| 国产亚洲精品资源在线26u| 亚洲国产精品ⅴa在线观看| 欧美国产日产图区| 亚洲第一狼人社区| 欧美一级黄色片| 久久免费午夜影院| 国产亚洲精品bt天堂精选| 中文字幕欧美国产| 亚洲男人的天堂在线aⅴ视频| 亚洲裸体在线观看| 亚洲成a人v欧美综合天堂下载| 亚洲成人手机在线| 香蕉成人啪国产精品视频综合网| 丝袜a∨在线一区二区三区不卡| 日韩av中文字幕一区二区| 国产精品66部| 99久久精品免费| 欧美精品一区二区不卡 | 亚洲成人动漫在线免费观看| 国产精品99久久久久久久vr| 欧美亚洲免费在线一区| 欧美激情一区二区三区| 免费视频最近日韩| 制服丝袜亚洲播放| 午夜私人影院久久久久| av在线一区二区三区| 精品99一区二区| 另类专区欧美蜜桃臀第一页| 欧美日韩亚洲国产综合| 亚洲图片自拍偷拍| 欧美性生活影院| 亚洲午夜激情av| 欧美成人综合网站| 亚洲黄色av一区| 国产一区二三区好的| 波多野结衣在线一区| 日韩午夜在线影院| 亚洲高清不卡在线| 国内精品免费**视频| 7777精品伊人久久久大香线蕉经典版下载 | 国产日韩v精品一区二区| 狠狠狠色丁香婷婷综合激情|