亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_struct_learn.c

?? svm(支持向量機)分類算法本質上是二類分類器
?? C
字號:
/***********************************************************************/
/*                                                                     */
/*   svm_struct_learn.c                                                */
/*                                                                     */
/*   Basic algorithm for learning structured outputs (e.g. parses,     */
/*   sequences, multi-label classification) with a Support Vector      */ 
/*   Machine.                                                          */
/*                                                                     */
/*   Author: Thorsten Joachims                                         */
/*   Date: 03.07.04                                                    */
/*                                                                     */
/*   Copyright (c) 2004  Thorsten Joachims - All rights reserved       */
/*                                                                     */
/*   This software is available for non-commercial use only. It must   */
/*   not be modified and distributed without prior permission of the   */
/*   author. The author is not responsible for implications from the   */
/*   use of this software.                                             */
/*                                                                     */
/***********************************************************************/

#include "svm_struct_learn.h"
#include "svm_struct_common.h"
#include "../svm_struct_api.h"
#include <assert.h>

#define MAX(x,y)      ((x) < (y) ? (y) : (x))
#define MIN(x,y)      ((x) > (y) ? (y) : (x))

void svm_learn_struct(SAMPLE sample, STRUCT_LEARN_PARM *sparm,
		      LEARN_PARM *lparm, KERNEL_PARM *kparm, 
		      STRUCTMODEL *sm)
{
  int         i,j;
  int         numIt=0;
  long        newconstraints=0, activenum=0; 
  int         opti_round, *opti;
  long        old_numConst=0;
  double      epsilon;
  long        tolerance;
  double      lossval,factor;
  double      margin=0;
  double      slack, *slacks, slacksum;
  long        sizePsi;
  double      *alpha=NULL;
  CONSTSET    cset;
  SVECTOR     *diff=NULL;
  SVECTOR     *fy, *fybar, *f;
  SVECTOR     *slackvec;
  WORD        slackv[2];
  MODEL       *svmModel=NULL;
  KERNEL_CACHE *kcache=NULL;
  LABEL       ybar;
  DOC         *doc;

  long        n=sample.n;
  EXAMPLE     *ex=sample.examples;
  double      rt_total=0.0, rt_opt=0.0;
  long        rt1,rt2;

  init_struct_model(sample,sm,sparm); 
  sizePsi=sm->sizePsi+1;          /* sm must contain size of psi on return */

  /* initialize example selection heuristic */ 
  opti=(int*)my_malloc(n*sizeof(int));
  for(i=0;i<n;i++) {
    opti[i]=0;
  }
  opti_round=0;

  if(sparm->slack_norm == 1) {
    lparm->svm_c=sparm->C;          /* set upper bound C */
    lparm->sharedslack=1;
  }
  else if(sparm->slack_norm == 2) {
    lparm->svm_c=999999999999999.0; /* upper bound C must never be reached */
    lparm->sharedslack=0;
    if(kparm->kernel_type != LINEAR) {
      printf("ERROR: Kernels are not implemented for L2 slack norm!"); 
      fflush(stdout);
      exit(0);
    }
  }
  else {
    printf("ERROR: Slack norm must be L1 or L2!"); fflush(stdout);
    exit(0);
  }


  epsilon=1.0;                    /* start with low precision and
				     increase later */
  tolerance=n/100;                /* increase precision, whenever less
                                     than that number of constraints
                                     is not fulfilled */
  lparm->biased_hyperplane=0;     /* set threshold to zero */

  cset=init_struct_constraints(sample, sm, sparm);
  if(cset.m > 0) {
    alpha=realloc(alpha,sizeof(double)*cset.m);
    for(i=0; i<cset.m; i++) 
      alpha[i]=0;
  }

  /* set initial model and slack variables*/
  svmModel=(MODEL *)my_malloc(sizeof(MODEL));
  svm_learn_optimization(cset.lhs,cset.rhs,cset.m,sizePsi+n,
			 lparm,kparm,NULL,svmModel,alpha);
  add_weight_vector_to_linear_model(svmModel);
  sm->svm_model=svmModel;
  sm->w=svmModel->lin_weights; /* short cut to weight vector */

  printf("Starting Iterations\n");

    /*****************/
   /*** main loop ***/
  /*****************/
  do { /* iteratively increase precision */

    epsilon=MAX(epsilon*0.09999999999,sparm->epsilon);
    if(epsilon == sparm->epsilon)   /* for final precision, find all SV */
      tolerance=0;
    lparm->epsilon_crit=epsilon/2;  /* svm precision must be higher than eps */
    if(struct_verbosity>=1)
      printf("Setting current working precision to %g.\n",epsilon);

    do { /* iteration until (approx) all SV are found for current
            precision and tolerance */
      
      old_numConst=cset.m;
      opti_round++;
      activenum=n;

      do { /* go through examples that keep producing new constraints */

	if(struct_verbosity>=1) { 
	  printf("--Iteration %i (%ld active): ",++numIt,activenum); 
	  fflush(stdout);
	}
	
	for(i=0; i<n; i++) { /*** example loop ***/
	  
	  rt1=get_runtime();
	    
	  if(opti[i] != opti_round) {/* if the example is not shrunk
	                                away, then see if it is necessary to 
					add a new constraint */
	    if(sparm->loss_type == SLACK_RESCALING) 
	      ybar=find_most_violated_constraint_slackrescaling(ex[i].x,
								ex[i].y,sm,
								sparm);
	    else
	      ybar=find_most_violated_constraint_marginrescaling(ex[i].x,
								 ex[i].y,sm,
								 sparm);
	    
	    if(empty_label(ybar)) {
	      if(opti[i] != opti_round) {
		activenum--;
		opti[i]=opti_round; 
	      }
	      if(struct_verbosity>=2)
		printf("no-incorrect-found(%i) ",i);
	      continue;
	    }
	  
	    /**** get psi(y)-psi(ybar) ****/
	    fy=psi(ex[i].x,ex[i].y,sm,sparm);
	    fybar=psi(ex[i].x,ybar,sm,sparm);
	    
	    /**** scale feature vector and margin by loss ****/
	    lossval=loss(ex[i].y,ybar,sparm);
	    if(sparm->slack_norm == 2)
	      lossval=sqrt(lossval);
	    if(sparm->loss_type == SLACK_RESCALING)
	      factor=lossval;
	    else               /* do not rescale vector for */
	      factor=1.0;      /* margin rescaling loss type */
	    for(f=fy;f;f=f->next)
	      f->factor*=factor;
	    for(f=fybar;f;f=f->next)
	      f->factor*=-factor;
	    margin=lossval;

	    /**** create constraint for current ybar ****/
	    append_svector_list(fy,fybar);/* append the two vector lists */
	    doc=create_example(cset.m,0,i+1,1,fy);

	    /**** compute slack for this example ****/
	    slack=0;
	    for(j=0;j<cset.m;j++) 
	      if(cset.lhs[j]->slackid == i+1) {
		if(sparm->slack_norm == 2) /* works only for linear kernel */
		  slack=MAX(slack,cset.rhs[j]
			          -(classify_example(svmModel,cset.lhs[j])
				    -sm->w[sizePsi+i]/(sqrt(2*sparm->C))));
		else
		  slack=MAX(slack,
			   cset.rhs[j]-classify_example(svmModel,cset.lhs[j]));
	      }
	    
	    /**** if `error' add constraint and recompute ****/
	    if((classify_example(svmModel,doc)+slack)<(margin-epsilon)) { 
	      if(struct_verbosity>=2)
		{printf("(%i) ",i); fflush(stdout);}
	      if(struct_verbosity==1)
		{printf("."); fflush(stdout);}
	      
	      /**** resize constraint matrix and add new constraint ****/
	      cset.m++;
	      cset.lhs=realloc(cset.lhs,sizeof(DOC *)*cset.m);
	      if(kparm->kernel_type == LINEAR) {
		diff=add_list_ss(fy); /* store difference vector directly */
		if(sparm->slack_norm == 1) 
		  cset.lhs[cset.m-1]=create_example(cset.m-1,0,i+1,1,
						    copy_svector(diff));
		else if(sparm->slack_norm == 2) {
		  /**** add squared slack variable to feature vector ****/
		  slackv[0].wnum=sizePsi+i;
		  slackv[0].weight=1/(sqrt(2*sparm->C));
		  slackv[1].wnum=0; /*terminator*/
		  slackvec=create_svector(slackv,"",1.0);
		  cset.lhs[cset.m-1]=create_example(cset.m-1,0,i+1,1,
						    add_ss(diff,slackvec));
		  free_svector(slackvec);
		}
		free_svector(diff);
	      }
	      else { /* kernel is used */
		if(sparm->slack_norm == 1) 
		  cset.lhs[cset.m-1]=create_example(cset.m-1,0,i+1,1,
						    copy_svector(fy));
		else if(sparm->slack_norm == 2)
		  exit(1);
	      }
	      cset.rhs=realloc(cset.rhs,sizeof(double)*cset.m);
	      cset.rhs[cset.m-1]=margin;
	      alpha=realloc(alpha,sizeof(double)*cset.m);
	      alpha[cset.m-1]=0;
	      newconstraints++;
	    }
	    else {
	      printf("+"); fflush(stdout); 
	      if(opti[i] != opti_round) {
		activenum--;
		opti[i]=opti_round; 
	      }
	    }

	    free_example(doc,0);
	    free_svector(fy); /* this also free's fybar */
	    free_label(ybar);
	  }

	  /**** get new QP solution ****/
	  if((newconstraints >= sparm->newconstretrain) 
	     || ((newconstraints > 0) && (i == n-1))) {
	    if(struct_verbosity>=1) {
	      printf("*");fflush(stdout);
	    }
	    rt2=get_runtime();
	    free_model(svmModel,0);
	    svmModel=(MODEL *)my_malloc(sizeof(MODEL));
	    /* Always get a new kernel cache. It is not possible to use the
	       same cache for two different training runs */
	    if(kparm->kernel_type != LINEAR)
	      kcache=kernel_cache_init(cset.m,lparm->kernel_cache_size);
	    /* Run the QP solver on cset. */
	    svm_learn_optimization(cset.lhs,cset.rhs,cset.m,sizePsi+n,
				   lparm,kparm,kcache,svmModel,alpha);
	    if(kcache)
	      kernel_cache_cleanup(kcache);
	    /* Always add weight vector, in case part of the kernel is
	       linear. If not, ignore the weight vector since its
	       content is bogus. */
	    add_weight_vector_to_linear_model(svmModel);
	    sm->svm_model=svmModel;
	    sm->w=svmModel->lin_weights; /* short cut to weight vector */
	    rt_opt+=MAX(get_runtime()-rt2,0);
	    
	    newconstraints=0;
	  }	

	  rt_total+=MAX(get_runtime()-rt1,0);
	} /* end of example loop */

	if(struct_verbosity>=1)
	  printf("(NumConst=%d, SV=%ld, Eps=%.4f)\n",cset.m,svmModel->sv_num-1,
		 svmModel->maxdiff);

      } while(activenum > 0);   /* repeat until all examples produced no
				   constraint at least once */

    } while((cset.m - old_numConst) > tolerance) ;

  } while(epsilon > sparm->epsilon);  

  if(struct_verbosity>=1) {
    /**** compute sum of slacks ****/
    slacks=(double *)my_malloc(sizeof(double)*(n+1));
    for(i=0; i<=n; i++) { 
      slacks[i]=0;
    }
    if(sparm->slack_norm == 1) {
      for(j=0;j<cset.m;j++) 
	slacks[cset.lhs[j]->slackid]=MAX(slacks[cset.lhs[j]->slackid],
			   cset.rhs[j]-classify_example(svmModel,cset.lhs[j]));
      }
    else if(sparm->slack_norm == 2) {
      for(j=0;j<cset.m;j++) 
	slacks[cset.lhs[j]->slackid]=MAX(slacks[cset.lhs[j]->slackid],
		cset.rhs[j]
	         -(classify_example(svmModel,cset.lhs[j])
		   -sm->w[sizePsi+cset.lhs[j]->slackid-1]/(sqrt(2*sparm->C))));
    }
    slacksum=0;
    for(i=0; i<=n; i++)  
      slacksum+=slacks[i];
    free(slacks);

    printf("Final epsilon on KKT-Conditions: %.5f\n",
	   MAX(svmModel->maxdiff,epsilon));
    printf("Total number of constraints added: %i\n",(int)cset.m);
    if(sparm->slack_norm == 1) {
      printf("Number of SV: %ld \n",svmModel->sv_num-1);
      printf("Number of non-zero slack variables: %ld (out of %ld)\n",
	     svmModel->at_upper_bound,n);
      printf("Norm of weight vector: |w|=%.5f\n",
	     model_length_s(svmModel,kparm));
    }
    else if(sparm->slack_norm == 2){ 
      printf("Number of SV: %ld (including %ld at upper bound)\n",
	     svmModel->sv_num-1,svmModel->at_upper_bound);
      printf("Norm of weight vector (including L2-loss): |w|=%.5f\n",
	     model_length_s(svmModel,kparm));
    }
    printf("Sum of slack variables: sum(xi_i)=%.5f\n",slacksum);
    printf("Norm of longest difference vector: ||Psi(x,y)-Psi(x,ybar)||=%.5f\n",
	   length_of_longest_document_vector(cset.lhs,cset.m,kparm));
    printf("Runtime in cpu-seconds: %.2f (%.2f%% for SVM optimization)\n",
	   rt_total/100.0, 100.0*rt_opt/rt_total);
  }
  if(struct_verbosity>=4)
    printW(sm->w,sizePsi,n,lparm->svm_c);

  if(svmModel) {
    sm->svm_model=copy_model(svmModel);
    sm->w=sm->svm_model->lin_weights; /* short cut to weight vector */
  }

  print_struct_learning_stats(sample,sm,cset,alpha,sparm);

  if(svmModel)
    free_model(svmModel,0);
  free(alpha); 
  free(opti); 
  free(cset.rhs); 
  for(i=0;i<cset.m;i++) 
    free_example(cset.lhs[i],1);
  free(cset.lhs);
}



?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
日韩亚洲欧美在线| 国产精品二三区| 国产免费观看久久| 亚洲高清免费视频| 国产福利一区在线| 欧美剧情片在线观看| 国产精品国产成人国产三级| 日韩精品一级中文字幕精品视频免费观看 | 青青草国产成人99久久| 99精品欧美一区二区三区小说| 日韩一区二区中文字幕| 亚洲制服丝袜一区| 色综合天天性综合| 国产色一区二区| 狠狠色狠狠色综合| 欧美不卡123| 偷拍亚洲欧洲综合| 欧美艳星brazzers| 亚洲精品高清视频在线观看| 成人v精品蜜桃久久一区| 久久久久久亚洲综合| 天天综合天天做天天综合| 色综合天天综合网国产成人综合天| 国产婷婷一区二区| 国产伦精一区二区三区| 欧美电影免费观看高清完整版在线 | 欧美日韩国产综合一区二区| 亚洲欧美乱综合| 91色乱码一区二区三区| 亚洲色大成网站www久久九九| 大尺度一区二区| 中文字幕中文字幕一区| 成人精品免费视频| 国产精品欧美一区喷水| www.日韩大片| 亚洲日本电影在线| 欧美午夜一区二区三区| 亚洲成人7777| 69堂亚洲精品首页| 六月丁香婷婷久久| 久久精品免视看| 成人性生交大片免费看中文| 国产精品国产三级国产三级人妇 | 一本色道综合亚洲| 亚洲精品一二三四区| 在线一区二区视频| 婷婷综合另类小说色区| 91精品国产综合久久香蕉麻豆| 婷婷综合久久一区二区三区| 精品久久久久久无| 成人自拍视频在线观看| 亚洲免费在线看| 欧美精品一级二级三级| 紧缚奴在线一区二区三区| 中文字幕不卡在线观看| 在线亚洲人成电影网站色www| 亚洲国产精品久久一线不卡| 欧美一区二区三区视频| 国产精品一区二区久久不卡| 亚洲视频一区在线| 欧美日韩大陆在线| 国产成人在线观看| 一区二区三区欧美在线观看| 日韩一二三区不卡| heyzo一本久久综合| 天堂成人国产精品一区| 久久影院电视剧免费观看| 99久久精品一区| 日韩激情中文字幕| 国产精品色眯眯| 91精品国产综合久久久久久漫画 | 色婷婷精品久久二区二区蜜臂av| 日韩国产高清影视| 国产精品欧美经典| 欧美一区二区三区男人的天堂| 国产99久久久国产精品免费看 | 欧美日韩国产免费一区二区| 狠狠色2019综合网| 午夜伦理一区二区| 国产精品久久久久久亚洲伦 | 国产精品传媒在线| 欧美一个色资源| 精品视频在线免费观看| 国产精品一二二区| 亚洲国产日韩一级| 国产精品夫妻自拍| 久久久久久久久久久黄色| 欧美性xxxxxx少妇| 99视频热这里只有精品免费| 蜜臀av一级做a爰片久久| 亚洲男帅同性gay1069| 久久久久久日产精品| 91精品久久久久久蜜臀| 91浏览器打开| 成人免费观看av| 精品一区二区在线播放| 亚洲高清免费视频| 一区二区三区四区精品在线视频| 精品成人a区在线观看| 3atv一区二区三区| 精品视频一区二区不卡| 一本一本久久a久久精品综合麻豆| 国产成人综合视频| 国产真实精品久久二三区| 免费看日韩a级影片| 亚洲成人一二三| 亚洲成av人片在www色猫咪| 日韩美女视频一区| 亚洲丝袜自拍清纯另类| 中文字幕一区二区日韩精品绯色| 久久精品一区二区三区av| 2欧美一区二区三区在线观看视频| 日韩亚洲欧美中文三级| 日韩欧美色综合网站| 欧美电视剧免费全集观看| 欧美成人激情免费网| 精品久久久久香蕉网| 精品国产凹凸成av人网站| 亚洲精品一区二区三区影院| 亚洲成人手机在线| 午夜精品福利一区二区蜜股av | 国产精品久久久久久久久图文区| 国产欧美精品一区| 国产精品久久久久久久第一福利| 国产精品视频一区二区三区不卡| 1区2区3区精品视频| 亚洲精品视频一区二区| 亚洲一区在线视频| 石原莉奈在线亚洲三区| 精彩视频一区二区| 成人黄动漫网站免费app| 99久久婷婷国产综合精品| 91在线精品秘密一区二区| 日本高清免费不卡视频| 欧美另类高清zo欧美| 欧美一区二区三区精品| 久久综合九色综合97婷婷女人| 久久久一区二区| 亚洲视频小说图片| 天天影视涩香欲综合网 | 婷婷成人综合网| 九九九精品视频| www.欧美色图| 欧美三区在线观看| 久久先锋资源网| 一区二区国产视频| 国产主播一区二区三区| av日韩在线网站| 欧美疯狂性受xxxxx喷水图片| 久久综合网色—综合色88| 日韩码欧中文字| 日本一区中文字幕| 成年人网站91| 日韩欧美的一区二区| ...av二区三区久久精品| 蜜桃精品视频在线| 91麻豆.com| 久久久久久久久久久黄色| 亚洲一区影音先锋| 国产不卡视频在线观看| 欧美剧在线免费观看网站| 国产精品久久久久久久久搜平片| 日韩国产在线一| 9l国产精品久久久久麻豆| 精品理论电影在线观看| 亚洲国产日韩综合久久精品| 粉嫩av亚洲一区二区图片| 91精品国产色综合久久不卡电影| 国产精品理伦片| 国内成+人亚洲+欧美+综合在线 | 成人一二三区视频| 日韩欧美综合一区| 亚洲va欧美va天堂v国产综合| 成人aaaa免费全部观看| 久久日韩精品一区二区五区| 亚洲mv大片欧洲mv大片精品| 91麻豆精品视频| 国产欧美日韩在线观看| 蜜臀a∨国产成人精品| 欧美在线看片a免费观看| 国产精品动漫网站| 国产mv日韩mv欧美| 久久久精品中文字幕麻豆发布| 日韩高清在线观看| 欧美日韩精品免费观看视频| 综合自拍亚洲综合图不卡区| 国产福利视频一区二区三区| 日韩免费视频线观看| 天天综合天天做天天综合| 欧美图区在线视频| 亚洲精选在线视频| 91在线精品一区二区| 亚洲欧洲综合另类在线 | 国产一区二区精品在线观看| 欧美一区二区三区视频| 亚洲成a人片综合在线| 欧美中文字幕一二三区视频| 一区二区免费在线| 欧美色图在线观看| 日产国产高清一区二区三区|