亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_common.h

?? 這個是我最近得到的支持向量機 light 的源碼包。。。這個源碼包主要是用作文本分類
?? H
字號:
/************************************************************************/
/*                                                                      */
/*   svm_common.h                                                       */
/*                                                                      */
/*   Definitions and functions used in both svm_learn and svm_classify. */
/*                                                                      */
/*   Author: Thorsten Joachims                                          */
/*   Date: 02.07.02                                                     */
/*                                                                      */
/*   Copyright (c) 2002  Thorsten Joachims - All rights reserved        */
/*                                                                      */
/*   This software is available for non-commercial use only. It must    */
/*   not be modified and distributed without prior permission of the    */
/*   author. The author is not responsible for implications from the    */
/*   use of this software.                                              */
/*                                                                      */
/************************************************************************/

#ifndef SVM_COMMON
#define SVM_COMMON

# define MAXSHRINK     50000    /* maximum number of shrinking rounds */
# define MAXFEATNUM 99999999    /* maximum feature number (must be in
			  	   valid range of long int type!) */

# include <stdio.h>
# include <ctype.h>
# include <math.h>
# include <string.h>
# include <stdlib.h>
# include <time.h> 
# include <float.h>

# define VERSION       "V6.01"
# define VERSION_DATE  "01.09.04"

# define CFLOAT  float       /* the type of float to use for caching */
                             /* kernel evaluations. Using float saves */
                             /* us some memory, but you can use double, too */
# define FNUM    long        /* the type used for storing feature ids */
# define FVAL    float       /* the type used for storing feature values */

# define LINEAR  0           /* linear kernel type */
# define POLY    1           /* polynoial kernel type */
# define RBF     2           /* rbf kernel type */
# define SIGMOID 3           /* sigmoid kernel type */

# define CLASSIFICATION 1    /* train classification model */
# define REGRESSION     2    /* train regression model */
# define RANKING        3    /* train ranking model */
# define OPTIMIZATION   4    /* train on general set of constraints */

typedef struct word {
  FNUM    wnum;	               /* word number */
  FVAL    weight;              /* word weight */
} WORD;

typedef struct svector {
  WORD    *words;              /* The features/values in the vector by
				  increasing feature-number. Feature
				  numbers that are skipped are
				  interpreted as having value zero. */
  double  twonorm_sq;          /* The squared euclidian length of the
                                  vector. Used to speed up the RBF kernel. */
  char    *userdefined;        /* You can put additional information
				  here. This can be useful, if you are
				  implementing your own kernel that
				  does not work with feature/values
				  representations (for example a
				  string kernel). By default,
				  svm-light will put here the string
				  after the # sign from each line of
				  the input file. */
  long    kernel_id;           /* Feature vectors with different
				  kernel_id's are orthogonal (ie. the
				  feature number do not match). This
				  is used for computing component
				  kernels for linear constraints which
				  are a sum of several different
				  weight vectors. (currently not
				  implemented). */
  struct svector *next;        /* Let's you set up a list of SVECTOR's
				  for linear constraints which are a
				  sum of multiple feature
				  vectors. List is terminated by
				  NULL. */
  double  factor;              /* Factor by which this feature vector
				  is multiplied in the sum. */
} SVECTOR;

typedef struct doc {
  long    docnum;              /* Document ID. This has to be the position of 
                                  the document in the training set array. */
  long    queryid;             /* for learning rankings, constraints are 
				  generated for documents with the same 
				  queryID. */
  double  costfactor;          /* Scales the cost of misclassifying this
				  document by this factor. The effect of this
				  value is, that the upper bound on the alpha
				  for this example is scaled by this factor.
				  The factors are set by the feature 
				  'cost:<val>' in the training data. */
  long    slackid;             /* Index of the slack variable
				  corresponding to this
				  constraint. All constraints with the
				  same slackid share the same slack
				  variable. This can only be used for
				  svm_learn_optimization. */
  SVECTOR *fvec;               /* Feature vector of the example. The
				  feature vector can actually be a
				  list of feature vectors. For
				  example, the list will have two
				  elements, if this DOC is a
				  preference constraint. The one
				  vector that is supposed to be ranked
				  higher, will have a factor of +1,
				  the lower ranked one should have a
				  factor of -1. */
} DOC;

typedef struct learn_parm {
  long   type;                 /* selects between regression and
				  classification */
  double svm_c;                /* upper bound C on alphas */
  double eps;                  /* regression epsilon (eps=1.0 for
				  classification */
  double svm_costratio;        /* factor to multiply C for positive examples */
  double transduction_posratio;/* fraction of unlabeled examples to be */
                               /* classified as positives */
  long   biased_hyperplane;    /* if nonzero, use hyperplane w*x+b=0 
				  otherwise w*x=0 */
  long   sharedslack;          /* if nonzero, it will use the shared
                                  slack variable mode in
                                  svm_learn_optimization. It requires
                                  that the slackid is set for every
                                  training example */
  long   svm_maxqpsize;        /* size q of working set */
  long   svm_newvarsinqp;      /* new variables to enter the working set 
				  in each iteration */
  long   kernel_cache_size;    /* size of kernel cache in megabytes */
  double epsilon_crit;         /* tolerable error for distances used 
				  in stopping criterion */
  double epsilon_shrink;       /* how much a multiplier should be above 
				  zero for shrinking */
  long   svm_iter_to_shrink;   /* iterations h after which an example can
				  be removed by shrinking */
  long   maxiter;              /* number of iterations after which the
				  optimizer terminates, if there was
				  no progress in maxdiff */
  long   remove_inconsistent;  /* exclude examples with alpha at C and 
				  retrain */
  long   skip_final_opt_check; /* do not check KT-Conditions at the end of
				  optimization for examples removed by 
				  shrinking. WARNING: This might lead to 
				  sub-optimal solutions! */
  long   compute_loo;          /* if nonzero, computes leave-one-out
				  estimates */
  double rho;                  /* parameter in xi/alpha-estimates and for
				  pruning leave-one-out range [1..2] */
  long   xa_depth;             /* parameter in xi/alpha-estimates upper
				  bounding the number of SV the current
				  alpha_t is distributed over */
  char predfile[200];          /* file for predicitions on unlabeled examples
				  in transduction */
  char alphafile[200];         /* file to store optimal alphas in. use  
				  empty string if alphas should not be 
				  output */

  /* you probably do not want to touch the following */
  double epsilon_const;        /* tolerable error on eq-constraint */
  double epsilon_a;            /* tolerable error on alphas at bounds */
  double opt_precision;        /* precision of solver, set to e.g. 1e-21 
				  if you get convergence problems */

  /* the following are only for internal use */
  long   svm_c_steps;          /* do so many steps for finding optimal C */
  double svm_c_factor;         /* increase C by this factor every step */
  double svm_costratio_unlab;
  double svm_unlabbound;
  double *svm_cost;            /* individual upper bounds for each var */
  long   totwords;             /* number of features */
} LEARN_PARM;

typedef struct kernel_parm {
  long    kernel_type;   /* 0=linear, 1=poly, 2=rbf, 3=sigmoid, 4=custom */
  long    poly_degree;
  double  rbf_gamma;
  double  coef_lin;
  double  coef_const;
  char    custom[50];    /* for user supplied kernel */
} KERNEL_PARM;

typedef struct model {
  long    sv_num;	
  long    at_upper_bound;
  double  b;
  DOC     **supvec;
  double  *alpha;
  long    *index;       /* index from docnum to position in model */
  long    totwords;     /* number of features */
  long    totdoc;       /* number of training documents */
  KERNEL_PARM kernel_parm; /* kernel */

  /* the following values are not written to file */
  double  loo_error,loo_recall,loo_precision; /* leave-one-out estimates */
  double  xa_error,xa_recall,xa_precision;    /* xi/alpha estimates */
  double  *lin_weights;                       /* weights for linear case using
						 folding */
  double  maxdiff;                            /* precision, up to which this 
						 model is accurate */
} MODEL;

typedef struct quadratic_program {
  long   opt_n;            /* number of variables */
  long   opt_m;            /* number of linear equality constraints */
  double *opt_ce,*opt_ce0; /* linear equality constraints */
  double *opt_g;           /* hessian of objective */
  double *opt_g0;          /* linear part of objective */
  double *opt_xinit;       /* initial value for variables */
  double *opt_low,*opt_up; /* box constraints */
} QP;

typedef struct kernel_cache {
  long   *index;  /* cache some kernel evalutations */
  CFLOAT *buffer; /* to improve speed */
  long   *invindex;
  long   *active2totdoc;
  long   *totdoc2active;
  long   *lru;
  long   *occu;
  long   elems;
  long   max_elems;
  long   time;
  long   activenum;
  long   buffsize;
} KERNEL_CACHE;


typedef struct timing_profile {
  long   time_kernel;
  long   time_opti;
  long   time_shrink;
  long   time_update;
  long   time_model;
  long   time_check;
  long   time_select;
} TIMING;

typedef struct shrink_state {
  long   *active;
  long   *inactive_since;
  long   deactnum;
  double **a_history;  /* for shrinking with non-linear kernel */
  long   maxhistory;
  double *last_a;      /* for shrinking with linear kernel */
  double *last_lin;    /* for shrinking with linear kernel */
} SHRINK_STATE;

double classify_example(MODEL *, DOC *);
double classify_example_linear(MODEL *, DOC *);
CFLOAT kernel(KERNEL_PARM *, DOC *, DOC *); 
CFLOAT single_kernel(KERNEL_PARM *, SVECTOR *, SVECTOR *); 
double custom_kernel(KERNEL_PARM *, SVECTOR *, SVECTOR *); 
SVECTOR *create_svector(WORD *, char *, double);
SVECTOR *copy_svector(SVECTOR *);
void   free_svector(SVECTOR *);
double    sprod_ss(SVECTOR *, SVECTOR *);
SVECTOR*  sub_ss(SVECTOR *, SVECTOR *); 
SVECTOR*  add_ss(SVECTOR *, SVECTOR *); 
SVECTOR*  add_list_ss(SVECTOR *); 
void      append_svector_list(SVECTOR *a, SVECTOR *b);
SVECTOR*  smult_s(SVECTOR *, double);
int       featvec_eq(SVECTOR *, SVECTOR *); 
double model_length_s(MODEL *, KERNEL_PARM *);
void   clear_vector_n(double *, long);
void   add_vector_ns(double *, SVECTOR *, double);
double sprod_ns(double *, SVECTOR *);
void   add_weight_vector_to_linear_model(MODEL *);
DOC    *create_example(long, long, long, double, SVECTOR *);
void   free_example(DOC *, long);
MODEL  *read_model(char *);
MODEL  *copy_model(MODEL *);
void   free_model(MODEL *, int);
void   read_documents(char *, DOC ***, double **, long *, long *);
int    parse_document(char *, WORD *, double *, long *, long *, double *, long *, long, char **);
double *read_alphas(char *,long);
void   nol_ll(char *, long *, long *, long *);
long   minl(long, long);
long   maxl(long, long);
long   get_runtime(void);
int    space_or_null(int);
void   *my_malloc(size_t); 
void   copyright_notice(void);
# ifdef _MSC_VER
   int isnan(double);
# endif

extern long   verbosity;              /* verbosity level (0-4) */
extern long   kernel_cache_statistic;

#endif

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲色图一区二区三区| 欧美一区二区三区色| 亚洲色图制服丝袜| 色悠悠亚洲一区二区| 亚洲一区二区三区精品在线| 欧美久久久久久久久中文字幕| 香蕉成人啪国产精品视频综合网| 欧美一区二区视频在线观看| 国产综合久久久久影院| 国产欧美日韩久久| 在线免费观看一区| 久久99精品国产麻豆婷婷| 国产欧美在线观看一区| 91浏览器入口在线观看| 免费成人av在线| 国产女人18水真多18精品一级做 | 欧美精彩视频一区二区三区| 国产成人在线电影| 亚洲精品写真福利| 精品久久久久久亚洲综合网| 东方欧美亚洲色图在线| 香蕉乱码成人久久天堂爱免费| 亚洲精品一区二区三区香蕉| 99久久精品国产精品久久| 午夜免费欧美电影| 久久久国际精品| 在线免费观看不卡av| 久久精品国产亚洲aⅴ| 国产精品超碰97尤物18| 在线播放欧美女士性生活| 国产69精品久久久久777| 亚洲成人中文在线| 国产欧美一区二区在线观看| 欧美日韩中字一区| www.日韩精品| 另类调教123区| 亚洲激情在线播放| 精品成人私密视频| 欧美日韩一区二区三区在线看 | wwwwww.欧美系列| 色综合天天综合网国产成人综合天| 美女视频黄免费的久久| 亚洲欧美日韩国产一区二区三区| 欧美成人女星排名| 在线观看成人小视频| 国产.欧美.日韩| 久久精品国产久精国产爱| 亚洲午夜私人影院| 国产精品久久久久影视| 精品国产伦一区二区三区观看方式| 在线精品视频一区二区三四 | 亚洲裸体xxx| 久久精品欧美日韩精品| 91精品欧美一区二区三区综合在| 91丝袜美女网| 成人精品一区二区三区四区 | 成人黄色免费短视频| 九九久久精品视频| 日本不卡视频在线| 亚洲成av人片在线观看| 亚洲欧美国产高清| 日韩理论片在线| 国产精品电影院| 亚洲国产精品成人久久综合一区| 精品国产乱码久久久久久图片| 91麻豆精品91久久久久久清纯| 91欧美一区二区| 一本大道综合伊人精品热热| 波多野结衣精品在线| 99麻豆久久久国产精品免费| 粉嫩嫩av羞羞动漫久久久| 国产精品一区二区三区乱码| 国产一区在线精品| 国产成人在线观看免费网站| 国产精品一区二区在线观看网站| 国产一区二区影院| 国产一区二区h| 粉嫩嫩av羞羞动漫久久久| 成人午夜视频福利| 99在线热播精品免费| 91麻豆国产自产在线观看| 91激情五月电影| 欧美日韩一区在线观看| 91精品国产综合久久国产大片| 91.xcao| 欧美喷潮久久久xxxxx| 91精品麻豆日日躁夜夜躁| 欧美一区三区四区| 久久综合色天天久久综合图片| 国产午夜亚洲精品理论片色戒| 日本一区二区三区久久久久久久久不| 国产精品青草综合久久久久99| 日韩美女视频一区二区| 亚洲一区二区三区小说| 热久久一区二区| 国产福利不卡视频| 色噜噜偷拍精品综合在线| 欧美日韩高清影院| 日韩欧美专区在线| 国产精品电影一区二区| 性欧美大战久久久久久久久| 久久国产精品72免费观看| 成人美女在线观看| 欧美日韩夫妻久久| 久久久五月婷婷| 一区二区三区在线观看欧美| 日韩激情一二三区| 成人午夜av影视| 精品视频在线看| 久久影院电视剧免费观看| 亚洲人成影院在线观看| 蜜臀av一区二区在线免费观看| 东方aⅴ免费观看久久av| 欧美日韩一区视频| 国产精品美女久久久久久久久| 亚洲一区二区美女| 国产成人精品影院| 欧美精品在线一区二区| 国产亚洲一本大道中文在线| 一区二区三区四区五区视频在线观看| 美女一区二区在线观看| 色综合天天综合给合国产| 精品国产露脸精彩对白| 夜夜揉揉日日人人青青一国产精品| 久久国产精品99久久久久久老狼| 99精品久久久久久| 精品国产一区二区在线观看| 亚洲精品欧美激情| 国产成人亚洲综合a∨猫咪| 欧美人与禽zozo性伦| 国产精品免费久久| 国内外成人在线视频| 欧美视频精品在线观看| 中文字幕va一区二区三区| 蜜桃av噜噜一区二区三区小说| 91丝袜美女网| 国产精品人妖ts系列视频| 麻豆精品国产91久久久久久 | 欧美精品在线一区二区三区| 国产精品伦理在线| 国产精品18久久久| 69av一区二区三区| 一区二区三区精品久久久| 成人免费黄色在线| 久久综合久久久久88| 美女视频黄久久| 日韩欧美综合一区| 日韩av电影免费观看高清完整版在线观看| 波多野洁衣一区| 国产精品久久久爽爽爽麻豆色哟哟| 国产中文字幕精品| 精品三级av在线| 蜜臀a∨国产成人精品| 欧美日韩一区二区三区高清| 亚洲欧美日韩人成在线播放| 99精品热视频| 亚洲欧美日韩精品久久久久| 91毛片在线观看| 亚洲另类在线一区| 91麻豆6部合集magnet| 亚洲免费视频成人| 在线免费视频一区二区| 亚洲一区二区三区四区的| 日本韩国视频一区二区| 亚洲综合激情网| 欧美日韩一区小说| 日韩电影在线一区二区三区| 欧美日韩精品一区二区三区蜜桃| 一区二区免费在线| 欧美日本在线播放| 日韩精品视频网站| 欧美电影免费观看完整版| 久久国产免费看| 久久亚洲一区二区三区明星换脸| 国产真实乱偷精品视频免| 国产亚洲短视频| 99精品热视频| 亚洲国产日韩一区二区| 欧美精品v国产精品v日韩精品| 日韩国产精品久久久久久亚洲| 日韩午夜电影在线观看| 激情国产一区二区| 国产精品久久一级| 欧美在线一区二区| 日韩av在线播放中文字幕| 亚洲精品一线二线三线| 成人禁用看黄a在线| 亚洲一区二区成人在线观看| 91精品福利在线一区二区三区| 久久av老司机精品网站导航| 国产人成亚洲第一网站在线播放 | 欧美二区乱c少妇| 久久超碰97人人做人人爱| 国产日韩欧美制服另类| 日本韩国欧美在线| 另类调教123区| 亚洲男女一区二区三区| 91精品国产91久久久久久一区二区 | 国产精品午夜久久| 欧美午夜寂寞影院|