亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? svm_struct_main.c

?? SVM Light的多分類源代碼
?? C
字號:
/***********************************************************************/
/*                                                                     */
/*   svm_struct_main.c                                                 */
/*                                                                     */
/*   Command line interface to the alignment learning module of the    */
/*   Support Vector Machine.                                           */
/*                                                                     */
/*   Author: Thorsten Joachims                                         */
/*   Date: 03.07.04                                                    */
/*                                                                     */
/*   Copyright (c) 2004  Thorsten Joachims - All rights reserved       */
/*                                                                     */
/*   This software is available for non-commercial use only. It must   */
/*   not be modified and distributed without prior permission of the   */
/*   author. The author is not responsible for implications from the   */
/*   use of this software.                                             */
/*                                                                     */
/***********************************************************************/


/* uncomment, if you want to use svm-learn out of C++ */
/* extern "C" { */
# include "../svm_light/svm_common.h"
# include "../svm_light/svm_learn.h"
# include "svm_struct_learn.h"
# include "svm_struct_common.h"
# include "../svm_struct_api.h"

#include <stdio.h>
#include <string.h>
#include <assert.h>
/* } */

char trainfile[200];           /* file with training examples */
char modelfile[200];           /* file for resulting classifier */

void   read_input_parameters(int, char **, char *, char *,long *, long *,
			     STRUCT_LEARN_PARM *, LEARN_PARM *, KERNEL_PARM *);
void   wait_any_key();
void   print_help();


int main (int argc, char* argv[])
{  
  SAMPLE sample;  /* training sample */
  LEARN_PARM learn_parm;
  KERNEL_PARM kernel_parm;
  STRUCT_LEARN_PARM struct_parm;
  STRUCTMODEL structmodel;

  read_input_parameters(argc,argv,trainfile,modelfile,&verbosity,
			&struct_verbosity,&struct_parm,&learn_parm,
			&kernel_parm);

  if(struct_verbosity>=1) {
    printf("Reading training examples..."); fflush(stdout);
  }
  /* read the training examples */
  sample=read_struct_examples(trainfile,&struct_parm);
  if(struct_verbosity>=1) {
    printf("done\n"); fflush(stdout);
  }
  
  /* Do the learning and return structmodel. */
  svm_learn_struct(sample,&struct_parm,&learn_parm,&kernel_parm,&structmodel);
  
  /* Warning: The model contains references to the original data 'docs'.
     If you want to free the original data, and only keep the model, you 
     have to make a deep copy of 'model'. */
  if(struct_verbosity>=1) {
    printf("Writing learned model...");fflush(stdout);
  }
  write_struct_model(modelfile,&structmodel,&struct_parm);
  if(struct_verbosity>=1) {
    printf("done\n");fflush(stdout);
  }

  free_struct_sample(sample);
  free_struct_model(structmodel);

  return 0;
}

/*---------------------------------------------------------------------------*/

void read_input_parameters(int argc,char *argv[],char *trainfile,
			   char *modelfile,
			   long *verbosity,long *struct_verbosity, 
			   STRUCT_LEARN_PARM *struct_parm,
			   LEARN_PARM *learn_parm, KERNEL_PARM *kernel_parm)
{
  long i;
  char type[100];
  
  /* set default */
  /* these defaults correspond to the experiments in the paper*/
  struct_parm->C=0.01;
  struct_parm->slack_norm=1;
  struct_parm->epsilon=0.01;
  struct_parm->custom_argc=0;
  struct_parm->loss_function=0;
  struct_parm->loss_type=SLACK_RESCALING;
  struct_parm->newconstretrain=100;

  strcpy (modelfile, "svm_struct_model");
  strcpy (learn_parm->predfile, "trans_predictions");
  strcpy (learn_parm->alphafile, "");
  (*verbosity)=0;/*verbosity for svm_light*/
  (*struct_verbosity)=1; /*verbosity for struct learning portion*/
  learn_parm->biased_hyperplane=1;
  learn_parm->remove_inconsistent=0;
  learn_parm->skip_final_opt_check=0;
  learn_parm->svm_maxqpsize=10;
  learn_parm->svm_newvarsinqp=0;
  learn_parm->svm_iter_to_shrink=-9999;
  learn_parm->maxiter=100000;
  learn_parm->kernel_cache_size=40;
  learn_parm->svm_c=99999999; /* everridden by struct_parm->C */
  learn_parm->eps=0.01;
  learn_parm->transduction_posratio=-1.0;
  learn_parm->svm_costratio=1.0;
  learn_parm->svm_costratio_unlab=1.0;
  learn_parm->svm_unlabbound=1E-5;
  learn_parm->epsilon_crit=0.001;
  learn_parm->epsilon_a=1E-10;  /* changed from 1e-15 */
  learn_parm->compute_loo=0;
  learn_parm->rho=1.0;
  learn_parm->xa_depth=0;
  kernel_parm->kernel_type=0;
  kernel_parm->poly_degree=3;
  kernel_parm->rbf_gamma=1.0;
  kernel_parm->coef_lin=1;
  kernel_parm->coef_const=1;
  strcpy(kernel_parm->custom,"empty");
  strcpy(type,"c");

  for(i=1;(i<argc) && ((argv[i])[0] == '-');i++) {
    switch ((argv[i])[1]) 
      { 
      case '?': print_help(); exit(0);
      case 'a': i++; strcpy(learn_parm->alphafile,argv[i]); break;
      case 'c': i++; struct_parm->C=atof(argv[i]); break;
      case 'p': i++; struct_parm->slack_norm=atof(argv[i]); break;
      case 'e': i++; struct_parm->epsilon=atof(argv[i]); break;
      case 'k': i++; struct_parm->newconstretrain=atol(argv[i]); break;
      case 'h': i++; learn_parm->svm_iter_to_shrink=atol(argv[i]); break;
      case '#': i++; learn_parm->maxiter=atol(argv[i]); break;
      case 'm': i++; learn_parm->kernel_cache_size=atol(argv[i]); break;
      case 'o': i++; struct_parm->loss_type=atol(argv[i]); break;
      case 'n': i++; learn_parm->svm_newvarsinqp=atol(argv[i]); break;
      case 'q': i++; learn_parm->svm_maxqpsize=atol(argv[i]); break;
      case 'l': i++; struct_parm->loss_function=atol(argv[i]); break;
      case 't': i++; kernel_parm->kernel_type=atol(argv[i]); break;
      case 'd': i++; kernel_parm->poly_degree=atol(argv[i]); break;
      case 'g': i++; kernel_parm->rbf_gamma=atof(argv[i]); break;
      case 's': i++; kernel_parm->coef_lin=atof(argv[i]); break;
      case 'r': i++; kernel_parm->coef_const=atof(argv[i]); break;
      case 'u': i++; strcpy(kernel_parm->custom,argv[i]); break;
      case '-': strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);i++; strcpy(struct_parm->custom_argv[struct_parm->custom_argc++],argv[i]);break; 
      case 'v': i++; (*struct_verbosity)=atol(argv[i]); break;
      case 'y': i++; (*verbosity)=atol(argv[i]); break;
      default: printf("\nUnrecognized option %s!\n\n",argv[i]);
	       print_help();
	       exit(0);
      }
  }
  if(i>=argc) {
    printf("\nNot enough input parameters!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  strcpy (trainfile, argv[i]);
  if((i+1)<argc) {
    strcpy (modelfile, argv[i+1]);
  }
  if(learn_parm->svm_iter_to_shrink == -9999) {
    if(kernel_parm->kernel_type == LINEAR) 
      learn_parm->svm_iter_to_shrink=2;
    else
      learn_parm->svm_iter_to_shrink=100;
  }

  if((learn_parm->skip_final_opt_check) 
     && (kernel_parm->kernel_type == LINEAR)) {
    printf("\nIt does not make sense to skip the final optimality check for linear kernels.\n\n");
    learn_parm->skip_final_opt_check=0;
  }    
  if((learn_parm->skip_final_opt_check) 
     && (learn_parm->remove_inconsistent)) {
    printf("\nIt is necessary to do the final optimality check when removing inconsistent \nexamples.\n");
    wait_any_key();
    print_help();
    exit(0);
  }    
  if((learn_parm->svm_maxqpsize<2)) {
    printf("\nMaximum size of QP-subproblems not in valid range: %ld [2..]\n",learn_parm->svm_maxqpsize); 
    wait_any_key();
    print_help();
    exit(0);
  }
  if((learn_parm->svm_maxqpsize<learn_parm->svm_newvarsinqp)) {
    printf("\nMaximum size of QP-subproblems [%ld] must be larger than the number of\n",learn_parm->svm_maxqpsize); 
    printf("new variables [%ld] entering the working set in each iteration.\n",learn_parm->svm_newvarsinqp); 
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->svm_iter_to_shrink<1) {
    printf("\nMaximum number of iterations for shrinking not in valid range: %ld [1,..]\n",learn_parm->svm_iter_to_shrink);
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->svm_c<0) {
    printf("\nThe C parameter must be greater than zero!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->transduction_posratio>1) {
    printf("\nThe fraction of unlabeled examples to classify as positives must\n");
    printf("be less than 1.0 !!!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->svm_costratio<=0) {
    printf("\nThe COSTRATIO parameter must be greater than zero!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(struct_parm->epsilon<=0) {
    printf("\nThe epsilon parameter must be greater than zero!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((struct_parm->slack_norm<1) || (struct_parm->slack_norm>2)) {
    printf("\nThe norm of the slacks must be either 1 (L1-norm) or 2 (L2-norm)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((struct_parm->loss_type != SLACK_RESCALING) 
     && (struct_parm->loss_type != MARGIN_RESCALING)) {
    printf("\nThe loss type must be either 1 (slack rescaling) or 2 (margin rescaling)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if(learn_parm->rho<0) {
    printf("\nThe parameter rho for xi/alpha-estimates and leave-one-out pruning must\n");
    printf("be greater than zero (typically 1.0 or 2.0, see T. Joachims, Estimating the\n");
    printf("Generalization Performance of an SVM Efficiently, ICML, 2000.)!\n\n");
    wait_any_key();
    print_help();
    exit(0);
  }
  if((learn_parm->xa_depth<0) || (learn_parm->xa_depth>100)) {
    printf("\nThe parameter depth for ext. xi/alpha-estimates must be in [0..100] (zero\n");
    printf("for switching to the conventional xa/estimates described in T. Joachims,\n");
    printf("Estimating the Generalization Performance of an SVM Efficiently, ICML, 2000.)\n");
    wait_any_key();
    print_help();
    exit(0);
  }

  parse_struct_parameters(struct_parm);
}

void wait_any_key()
{
  printf("\n(more)\n");
  (void)getc(stdin);
}

void print_help()
{
  printf("\nSVM-struct learning module: %s, %s, %s\n",INST_NAME,INST_VERSION,INST_VERSION_DATE);
  printf("   includes SVM-struct %s for learning complex outputs, %s\n",STRUCT_VERSION,STRUCT_VERSION_DATE);
  printf("   includes SVM-light %s quadratic optimizer, %s\n",VERSION,VERSION_DATE);
  copyright_notice();
  printf("   usage: svm_struct_learn [options] example_file model_file\n\n");
  printf("Arguments:\n");
  printf("         example_file-> file with training data\n");
  printf("         model_file  -> file to store learned decision rule in\n");

  printf("General options:\n");
  printf("         -?          -> this help\n");
  printf("         -v [0..3]   -> verbosity level (default 1)\n");
  printf("         -y [0..3]   -> verbosity level for svm_light (default 0)\n");
  printf("Learning options:\n");
  printf("         -c float    -> C: trade-off between training error\n");
  printf("                        and margin (default 0.01)\n");
  printf("         -p [1,2]    -> L-norm to use for slack variables. Use 1 for L1-norm,\n");
  printf("                        use 2 for squared slacks. (default 1)\n");
  printf("         -o [1,2]    -> Slack rescaling method to use for loss.\n");
  printf("                        1: slack rescaling\n");
  printf("                        2: margin rescaling\n");
  printf("                        (default 1)\n");
  printf("         -l [0..]    -> Loss function to use.\n");
  printf("                        0: zero/one loss\n");
  printf("                        (default 0)\n");
  printf("Kernel options:\n");
  printf("         -t int      -> type of kernel function:\n");
  printf("                        0: linear (default)\n");
  printf("                        1: polynomial (s a*b+c)^d\n");
  printf("                        2: radial basis function exp(-gamma ||a-b||^2)\n");
  printf("                        3: sigmoid tanh(s a*b + c)\n");
  printf("                        4: user defined kernel from kernel.h\n");
  printf("         -d int      -> parameter d in polynomial kernel\n");
  printf("         -g float    -> parameter gamma in rbf kernel\n");
  printf("         -s float    -> parameter s in sigmoid/poly kernel\n");
  printf("         -r float    -> parameter c in sigmoid/poly kernel\n");
  printf("         -u string   -> parameter of user defined kernel\n");
  printf("Optimization options (see [2][3]):\n");
  printf("         -q [2..]    -> maximum size of QP-subproblems (default 10)\n");
  printf("         -n [2..q]   -> number of new variables entering the working set\n");
  printf("                        in each iteration (default n = q). Set n<q to prevent\n");
  printf("                        zig-zagging.\n");
  printf("         -m [5..]    -> size of cache for kernel evaluations in MB (default 40)\n");
  printf("                        The larger the faster...\n");
  printf("         -e float    -> eps: Allow that error for termination criterion\n");
  printf("                        (default 0.01)\n");
  printf("         -h [5..]    -> number of iterations a variable needs to be\n"); 
  printf("                        optimal before considered for shrinking (default 100)\n");
  printf("         -k [1..]    -> number of new constraints to accumulate before\n"); 
  printf("                        recomputing the QP solution (default 100)\n");
  printf("         -# int      -> terminate optimization, if no progress after this\n");
  printf("                        number of iterations. (default 100000)\n");
  printf("Output options:\n");
  printf("         -a string   -> write all alphas to this file after learning\n");
  printf("                        (in the same order as in the training set)\n");
  printf("Structure learning options:\n");
  print_struct_help();
  wait_any_key();

  printf("\nMore details in:\n");
  printf("[1] T. Joachims, Learning to Align Sequences: A Maximum Margin Aproach.\n");
  printf("    Technical Report, September, 2003.\n");
  printf("[2] I. Tsochantaridis, T. Hofmann, T. Joachims, and Y. Altun, Support Vector \n");
  printf("    Learning for Interdependent and Structured Output Spaces, ICML, 2004.\n");
  printf("[3] T. Joachims, Making Large-Scale SVM Learning Practical. Advances in\n");
  printf("    Kernel Methods - Support Vector Learning, B. Sch鰈kopf and C. Burges and\n");
  printf("    A. Smola (ed.), MIT Press, 1999.\n");
  printf("[4] T. Joachims, Learning to Classify Text Using Support Vector\n");
  printf("    Machines: Methods, Theory, and Algorithms. Dissertation, Kluwer,\n");
  printf("    2002.\n\n");
}



?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产最新精品免费| 国产精品国产精品国产专区不片| 日韩丝袜情趣美女图片| 亚洲精品在线观| 国产精品福利在线播放| 亚洲福利视频一区二区| 久久99精品久久久久久久久久久久 | 亚洲一区二区三区精品在线| 石原莉奈一区二区三区在线观看| 蜜臀av国产精品久久久久 | 中文字幕不卡在线观看| 亚洲网友自拍偷拍| 国产精品一区二区久久不卡| 日本韩国欧美三级| 久久久久久97三级| 亚洲成av人片| 国产白丝网站精品污在线入口| 欧美日韩中文国产| 国产拍欧美日韩视频二区| 亚洲另类色综合网站| 极品美女销魂一区二区三区| 91网站最新地址| 欧美v国产在线一区二区三区| 亚洲人成亚洲人成在线观看图片| 久久99国产精品久久99| 91激情五月电影| 中文字幕高清一区| 久久成人羞羞网站| 欧美日韩亚洲综合一区二区三区| 国产日韩欧美电影| 蜜臀91精品一区二区三区| 色婷婷久久久久swag精品| 久久久久久久网| 日韩精品一二三| 91福利区一区二区三区| 久久新电视剧免费观看| 日韩精品一二区| 在线免费视频一区二区| 国产精品欧美极品| 激情综合网最新| 日韩一区二区三区在线| 亚洲成人久久影院| 91一区二区在线| 国产女人水真多18毛片18精品视频| 婷婷久久综合九色综合伊人色| 99r国产精品| 日本一二三不卡| 久久97超碰国产精品超碰| 欧美久久久久久久久| 中文字幕佐山爱一区二区免费| 国产乱一区二区| 精品少妇一区二区三区视频免付费| 亚洲成a人v欧美综合天堂| 色婷婷激情综合| 亚洲视频一区二区在线| 丁香婷婷深情五月亚洲| 久久午夜羞羞影院免费观看| 日本大胆欧美人术艺术动态| 欧美日韩另类一区| 亚洲综合一区二区精品导航| 91在线国产观看| 国产精品久久久久久久久免费樱桃| 国产精品夜夜爽| 久久久久综合网| 国产精品综合在线视频| www激情久久| 国产综合成人久久大片91| 欧美xxxx老人做受| 久久99精品网久久| 久久欧美中文字幕| 国产成人亚洲综合a∨婷婷| 久久免费视频色| 国产精品资源在线看| 国产亚洲一二三区| 不卡的av电影在线观看| 国产精品理伦片| 成人的网站免费观看| 中文字幕中文在线不卡住| 成人av午夜影院| 亚洲欧洲日韩女同| 91捆绑美女网站| 亚洲高清免费观看| 日韩欧美中文字幕一区| 激情文学综合丁香| 久久精品一区八戒影视| 成人午夜av影视| 亚洲啪啪综合av一区二区三区| 色网站国产精品| 视频一区二区不卡| 精品国产乱码久久久久久久久 | 国产精品久久久久久久浪潮网站| 99国产欧美另类久久久精品| 亚洲免费在线电影| 欧美欧美欧美欧美首页| 蜜桃精品视频在线观看| 2欧美一区二区三区在线观看视频| 国模无码大尺度一区二区三区| 欧美激情一区二区三区蜜桃视频| 99久久er热在这里只有精品15| 亚洲精品免费播放| 538prom精品视频线放| 国产一区免费电影| 日韩美女久久久| 欧美日韩情趣电影| 国产一区二区三区观看| 一色屋精品亚洲香蕉网站| 欧美中文字幕亚洲一区二区va在线 | 国产一区二区视频在线| 国产精品国产a级| 欧美日韩另类一区| 韩国一区二区视频| 亚洲欧洲另类国产综合| 欧美三级韩国三级日本一级| 精品在线播放午夜| 亚洲欧美日韩在线| 日韩免费在线观看| 色综合久久久久网| 久久超碰97中文字幕| 中文字幕一区免费在线观看| 91高清视频在线| 国产麻豆午夜三级精品| 一区二区三区四区激情| 欧美精品一区二区三区一线天视频| 成人综合在线网站| 欧美aⅴ一区二区三区视频| 欧美激情综合网| 91麻豆精品国产91久久久久久久久 | 欧美视频一区在线观看| 国产乱子伦视频一区二区三区| 亚洲国产一区二区在线播放| 国产婷婷色一区二区三区| 欧美日韩精品一区二区天天拍小说| 国产精品亚洲一区二区三区在线 | 欧美日韩一级黄| 国产老肥熟一区二区三区| 亚洲一二三区在线观看| 久久久一区二区| 欧美日韩国产天堂| 91小视频免费看| 国产真实精品久久二三区| 午夜精品123| 亚洲欧洲综合另类| 久久久777精品电影网影网 | 精品影院一区二区久久久| 亚洲最快最全在线视频| 国产清纯白嫩初高生在线观看91| 欧美高清性hdvideosex| 99精品一区二区三区| 国产一区二区不卡| 美女视频一区在线观看| 亚洲一区在线免费观看| 国产精品久久777777| 欧美成人性福生活免费看| 欧美日韩一区视频| 91麻豆视频网站| 成人激情综合网站| 国产在线播放一区| 久久99国产精品尤物| 日韩和欧美一区二区| 亚洲国产精品人人做人人爽| 亚洲欧洲国产日本综合| 国产日韩三级在线| 日韩免费视频线观看| 91精品国产福利在线观看 | 午夜视频在线观看一区二区| 亚洲色大成网站www久久九九| 国产精品视频免费| 国产欧美一区二区精品婷婷| 精品sm捆绑视频| 精品久久久久久久久久久久久久久| 欧美精品 国产精品| 欧美日韩激情在线| 精品视频免费看| 欧美亚洲高清一区| 欧美视频在线一区二区三区| 欧美在线不卡视频| 欧美专区亚洲专区| 欧美视频一区在线观看| 欧美日韩国产免费| 欧美卡1卡2卡| 日韩一区二区在线播放| 日韩欧美亚洲另类制服综合在线| 欧美一区二区三区视频| 6080国产精品一区二区| 777久久久精品| 日韩免费视频线观看| 久久免费视频色| 国产精品天干天干在线综合| 国产精品久久毛片a| 亚洲精品欧美专区| 夜夜嗨av一区二区三区| 亚洲国产一区二区a毛片| 亚洲成a天堂v人片| 麻豆精品久久久| 国产精品18久久久久| 成人蜜臀av电影| 色婷婷久久久久swag精品| 欧美午夜片在线观看| 宅男在线国产精品| 久久久久国产精品麻豆|