亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? svm_common.h

?? SVM Light的多分類源代碼
?? H
字號:
/************************************************************************/
/*                                                                      */
/*   svm_common.h                                                       */
/*                                                                      */
/*   Definitions and functions used in both svm_learn and svm_classify. */
/*                                                                      */
/*   Author: Thorsten Joachims                                          */
/*   Date: 02.07.02                                                     */
/*                                                                      */
/*   Copyright (c) 2002  Thorsten Joachims - All rights reserved        */
/*                                                                      */
/*   This software is available for non-commercial use only. It must    */
/*   not be modified and distributed without prior permission of the    */
/*   author. The author is not responsible for implications from the    */
/*   use of this software.                                              */
/*                                                                      */
/************************************************************************/

#ifndef SVM_COMMON
#define SVM_COMMON

# define MAXSHRINK     50000    /* maximum number of shrinking rounds */
# define MAXFEATNUM 99999999    /* maximum feature number (must be in
			  	   valid range of long int type!) */

# include <stdio.h>
# include <ctype.h>
# include <math.h>
# include <string.h>
# include <stdlib.h>
# include <time.h> 
# include <float.h>

# define VERSION       "V6.01"
# define VERSION_DATE  "01.09.04"

# define CFLOAT  float       /* the type of float to use for caching */
                             /* kernel evaluations. Using float saves */
                             /* us some memory, but you can use double, too */
# define FNUM    long        /* the type used for storing feature ids */
# define FVAL    float       /* the type used for storing feature values */

# define LINEAR  0           /* linear kernel type */
# define POLY    1           /* polynoial kernel type */
# define RBF     2           /* rbf kernel type */
# define SIGMOID 3           /* sigmoid kernel type */

# define CLASSIFICATION 1    /* train classification model */
# define REGRESSION     2    /* train regression model */
# define RANKING        3    /* train ranking model */
# define OPTIMIZATION   4    /* train on general set of constraints */

typedef struct word {
  FNUM    wnum;	               /* word number */
  FVAL    weight;              /* word weight */
} WORD;

typedef struct svector {
  WORD    *words;              /* The features/values in the vector by
				  increasing feature-number. Feature
				  numbers that are skipped are
				  interpreted as having value zero. */
  double  twonorm_sq;          /* The squared euclidian length of the
                                  vector. Used to speed up the RBF kernel. */
  char    *userdefined;        /* You can put additional information
				  here. This can be useful, if you are
				  implementing your own kernel that
				  does not work with feature/values
				  representations (for example a
				  string kernel). By default,
				  svm-light will put here the string
				  after the # sign from each line of
				  the input file. */
  long    kernel_id;           /* Feature vectors with different
				  kernel_id's are orthogonal (ie. the
				  feature number do not match). This
				  is used for computing component
				  kernels for linear constraints which
				  are a sum of several different
				  weight vectors. (currently not
				  implemented). */
  struct svector *next;        /* Let's you set up a list of SVECTOR's
				  for linear constraints which are a
				  sum of multiple feature
				  vectors. List is terminated by
				  NULL. */
  double  factor;              /* Factor by which this feature vector
				  is multiplied in the sum. */
} SVECTOR;

typedef struct doc {
  long    docnum;              /* Document ID. This has to be the position of 
                                  the document in the training set array. */
  long    queryid;             /* for learning rankings, constraints are 
				  generated for documents with the same 
				  queryID. */
  double  costfactor;          /* Scales the cost of misclassifying this
				  document by this factor. The effect of this
				  value is, that the upper bound on the alpha
				  for this example is scaled by this factor.
				  The factors are set by the feature 
				  'cost:<val>' in the training data. */
  long    slackid;             /* Index of the slack variable
				  corresponding to this
				  constraint. All constraints with the
				  same slackid share the same slack
				  variable. This can only be used for
				  svm_learn_optimization. */
  SVECTOR *fvec;               /* Feature vector of the example. The
				  feature vector can actually be a
				  list of feature vectors. For
				  example, the list will have two
				  elements, if this DOC is a
				  preference constraint. The one
				  vector that is supposed to be ranked
				  higher, will have a factor of +1,
				  the lower ranked one should have a
				  factor of -1. */
} DOC;

typedef struct learn_parm {
  long   type;                 /* selects between regression and
				  classification */
  double svm_c;                /* upper bound C on alphas */
  double eps;                  /* regression epsilon (eps=1.0 for
				  classification */
  double svm_costratio;        /* factor to multiply C for positive examples */
  double transduction_posratio;/* fraction of unlabeled examples to be */
                               /* classified as positives */
  long   biased_hyperplane;    /* if nonzero, use hyperplane w*x+b=0 
				  otherwise w*x=0 */
  long   sharedslack;          /* if nonzero, it will use the shared
                                  slack variable mode in
                                  svm_learn_optimization. It requires
                                  that the slackid is set for every
                                  training example */
  long   svm_maxqpsize;        /* size q of working set */
  long   svm_newvarsinqp;      /* new variables to enter the working set 
				  in each iteration */
  long   kernel_cache_size;    /* size of kernel cache in megabytes */
  double epsilon_crit;         /* tolerable error for distances used 
				  in stopping criterion */
  double epsilon_shrink;       /* how much a multiplier should be above 
				  zero for shrinking */
  long   svm_iter_to_shrink;   /* iterations h after which an example can
				  be removed by shrinking */
  long   maxiter;              /* number of iterations after which the
				  optimizer terminates, if there was
				  no progress in maxdiff */
  long   remove_inconsistent;  /* exclude examples with alpha at C and 
				  retrain */
  long   skip_final_opt_check; /* do not check KT-Conditions at the end of
				  optimization for examples removed by 
				  shrinking. WARNING: This might lead to 
				  sub-optimal solutions! */
  long   compute_loo;          /* if nonzero, computes leave-one-out
				  estimates */
  double rho;                  /* parameter in xi/alpha-estimates and for
				  pruning leave-one-out range [1..2] */
  long   xa_depth;             /* parameter in xi/alpha-estimates upper
				  bounding the number of SV the current
				  alpha_t is distributed over */
  char predfile[200];          /* file for predicitions on unlabeled examples
				  in transduction */
  char alphafile[200];         /* file to store optimal alphas in. use  
				  empty string if alphas should not be 
				  output */

  /* you probably do not want to touch the following */
  double epsilon_const;        /* tolerable error on eq-constraint */
  double epsilon_a;            /* tolerable error on alphas at bounds */
  double opt_precision;        /* precision of solver, set to e.g. 1e-21 
				  if you get convergence problems */

  /* the following are only for internal use */
  long   svm_c_steps;          /* do so many steps for finding optimal C */
  double svm_c_factor;         /* increase C by this factor every step */
  double svm_costratio_unlab;
  double svm_unlabbound;
  double *svm_cost;            /* individual upper bounds for each var */
  long   totwords;             /* number of features */
} LEARN_PARM;

typedef struct kernel_parm {
  long    kernel_type;   /* 0=linear, 1=poly, 2=rbf, 3=sigmoid, 4=custom */
  long    poly_degree;
  double  rbf_gamma;
  double  coef_lin;
  double  coef_const;
  char    custom[50];    /* for user supplied kernel */
} KERNEL_PARM;

typedef struct model {
  long    sv_num;	
  long    at_upper_bound;
  double  b;
  DOC     **supvec;
  double  *alpha;
  long    *index;       /* index from docnum to position in model */
  long    totwords;     /* number of features */
  long    totdoc;       /* number of training documents */
  KERNEL_PARM kernel_parm; /* kernel */

  /* the following values are not written to file */
  double  loo_error,loo_recall,loo_precision; /* leave-one-out estimates */
  double  xa_error,xa_recall,xa_precision;    /* xi/alpha estimates */
  double  *lin_weights;                       /* weights for linear case using
						 folding */
  double  maxdiff;                            /* precision, up to which this 
						 model is accurate */
} MODEL;

typedef struct quadratic_program {
  long   opt_n;            /* number of variables */
  long   opt_m;            /* number of linear equality constraints */
  double *opt_ce,*opt_ce0; /* linear equality constraints */
  double *opt_g;           /* hessian of objective */
  double *opt_g0;          /* linear part of objective */
  double *opt_xinit;       /* initial value for variables */
  double *opt_low,*opt_up; /* box constraints */
} QP;

typedef struct kernel_cache {
  long   *index;  /* cache some kernel evalutations */
  CFLOAT *buffer; /* to improve speed */
  long   *invindex;
  long   *active2totdoc;
  long   *totdoc2active;
  long   *lru;
  long   *occu;
  long   elems;
  long   max_elems;
  long   time;
  long   activenum;
  long   buffsize;
} KERNEL_CACHE;


typedef struct timing_profile {
  long   time_kernel;
  long   time_opti;
  long   time_shrink;
  long   time_update;
  long   time_model;
  long   time_check;
  long   time_select;
} TIMING;

typedef struct shrink_state {
  long   *active;
  long   *inactive_since;
  long   deactnum;
  double **a_history;  /* for shrinking with non-linear kernel */
  long   maxhistory;
  double *last_a;      /* for shrinking with linear kernel */
  double *last_lin;    /* for shrinking with linear kernel */
} SHRINK_STATE;

double classify_example(MODEL *, DOC *);
double classify_example_linear(MODEL *, DOC *);
CFLOAT kernel(KERNEL_PARM *, DOC *, DOC *); 
CFLOAT single_kernel(KERNEL_PARM *, SVECTOR *, SVECTOR *); 
double custom_kernel(KERNEL_PARM *, SVECTOR *, SVECTOR *); 
SVECTOR *create_svector(WORD *, char *, double);
SVECTOR *copy_svector(SVECTOR *);
void   free_svector(SVECTOR *);
double    sprod_ss(SVECTOR *, SVECTOR *);
SVECTOR*  sub_ss(SVECTOR *, SVECTOR *); 
SVECTOR*  add_ss(SVECTOR *, SVECTOR *); 
SVECTOR*  add_list_ss(SVECTOR *); 
void      append_svector_list(SVECTOR *a, SVECTOR *b);
SVECTOR*  smult_s(SVECTOR *, double);
int       featvec_eq(SVECTOR *, SVECTOR *); 
double model_length_s(MODEL *, KERNEL_PARM *);
void   clear_vector_n(double *, long);
void   add_vector_ns(double *, SVECTOR *, double);
double sprod_ns(double *, SVECTOR *);
void   add_weight_vector_to_linear_model(MODEL *);
DOC    *create_example(long, long, long, double, SVECTOR *);
void   free_example(DOC *, long);
MODEL  *read_model(char *);
MODEL  *copy_model(MODEL *);
void   free_model(MODEL *, int);
void   read_documents(char *, DOC ***, double **, long *, long *);
int    parse_document(char *, WORD *, double *, long *, long *, double *, long *, long, char **);
double *read_alphas(char *,long);
void   nol_ll(char *, long *, long *, long *);
long   minl(long, long);
long   maxl(long, long);
long   get_runtime(void);
int    space_or_null(int);
void   *my_malloc(size_t); 
void   copyright_notice(void);
# ifdef _MSC_VER
   int isnan(double);
# endif

extern long   verbosity;              /* verbosity level (0-4) */
extern long   kernel_cache_statistic;

#endif

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
粉嫩一区二区三区性色av| 青青青伊人色综合久久| 日韩欧美色电影| 欧美又粗又大又爽| 91视频.com| 91亚洲精品久久久蜜桃| 91蝌蚪porny| 在线观看网站黄不卡| 色老头久久综合| 在线观看av一区| 欧美精品久久天天躁| 欧美日产在线观看| 91精品国产91热久久久做人人| 欧美日韩免费观看一区三区| 欧美日本一区二区| 日韩三级视频在线看| 久久综合国产精品| 中文字幕一区二区5566日韩| 亚洲天堂免费看| 亚洲6080在线| 麻豆一区二区在线| 成人一级视频在线观看| 成人app下载| 欧美视频在线播放| 精品免费日韩av| 国产精品私人影院| 一区二区三区高清在线| 日韩电影在线免费| 国内精品国产成人国产三级粉色 | 亚洲激情av在线| 亚洲一区二区五区| 精品一区二区日韩| 91美女在线看| 2欧美一区二区三区在线观看视频| 国产女同性恋一区二区| 亚洲欧美成aⅴ人在线观看| 亚瑟在线精品视频| 风间由美一区二区三区在线观看 | 日本电影欧美片| 日韩三级.com| 国产精品国产自产拍高清av| 午夜一区二区三区在线观看| 国产一区二区三区四区五区美女| 色呦呦国产精品| 亚洲精品国产无天堂网2021| 亚洲bt欧美bt精品| 国产精品一区二区久久不卡| 欧美日韩专区在线| 国产精品伦理在线| 看国产成人h片视频| 欧美在线短视频| 国产精品美女视频| 蜜臀av性久久久久av蜜臀妖精| 99久久精品99国产精品| 日韩欧美电影一二三| 亚洲永久精品国产| 成人免费看黄yyy456| 日韩欧美在线影院| 亚洲国产精品一区二区久久恐怖片| 国产精品69久久久久水密桃| 日韩欧美中文一区二区| 午夜婷婷国产麻豆精品| 色哟哟一区二区三区| 亚洲国产激情av| 国产一区二区三区国产| 日韩一区二区三区在线视频| 亚洲国产精品欧美一二99| 91麻豆国产自产在线观看| 国产日产欧美精品一区二区三区| 男女男精品视频网| 欧美一区午夜视频在线观看| 一区二区三区美女| 色综合视频一区二区三区高清| 国产日产欧美一区二区三区| 国产99久久久精品| 久久久国际精品| 国产精品影音先锋| 久久久久久久精| 国产精品一区二区黑丝| 国产亚洲欧美一区在线观看| 国产一区二区调教| 欧美激情一区不卡| 丰满白嫩尤物一区二区| 国产精品传媒入口麻豆| 成人三级伦理片| 综合久久综合久久| 日本高清不卡一区| 亚洲一区二区三区视频在线播放| 欧美日韩国产高清一区二区三区| 亚洲成人精品一区| 欧美一区二区女人| 国产精品自拍av| 国产精品久久国产精麻豆99网站 | 亚洲综合丝袜美腿| 欧美精品在线观看播放| 捆绑调教一区二区三区| 国产欧美一区二区精品性色超碰| 大尺度一区二区| 亚洲欧美韩国综合色| 欧美羞羞免费网站| 久久精品国产亚洲aⅴ| 久久久九九九九| 国产午夜精品久久久久久久 | 激情国产一区二区| 国产精品毛片高清在线完整版| 色999日韩国产欧美一区二区| 婷婷综合另类小说色区| 国产亚洲视频系列| 91久久精品国产91性色tv| 免费人成网站在线观看欧美高清| 日本一区二区三区四区| 色婷婷综合五月| 韩国女主播成人在线| 亚洲欧美日韩系列| 欧美大片免费久久精品三p| 99视频热这里只有精品免费| 午夜精品爽啪视频| 国产精品久久久久精k8 | 99re亚洲国产精品| 日韩高清不卡一区二区三区| 久久久久久久精| 欧美日本韩国一区二区三区视频| 国产精品一区久久久久| 亚洲动漫第一页| 国产精品视频免费看| 91精品在线麻豆| 色婷婷激情综合| 国产aⅴ综合色| 日韩国产在线一| 悠悠色在线精品| 亚洲国产高清在线观看视频| 日韩一级完整毛片| 欧美综合一区二区| 成人激情开心网| 国产精品夜夜嗨| 日韩 欧美一区二区三区| 亚洲视频每日更新| 久久精品在线观看| 日韩免费在线观看| 制服丝袜中文字幕一区| 91免费看片在线观看| 国产精品亚洲一区二区三区在线| 免费一级片91| 日本成人在线看| 婷婷中文字幕一区三区| 伊人色综合久久天天人手人婷| 欧美高清在线视频| 国产亚洲精品福利| 久久久久久97三级| 欧美va亚洲va| 欧美电影免费观看高清完整版在线观看 | 日韩一区二区在线观看| 欧美日韩视频在线第一区 | 亚洲成人中文在线| 亚洲一区二区三区视频在线播放 | 久久久无码精品亚洲日韩按摩| 91精品视频网| 欧美精品久久99久久在免费线 | 欧美日韩一二三区| 欧美系列一区二区| 欧美日韩高清在线播放| 欧美性做爰猛烈叫床潮| 欧美无砖专区一中文字| 在线成人高清不卡| 日韩免费观看高清完整版在线观看| 777欧美精品| 精品剧情在线观看| 久久久久久亚洲综合| 亚洲国产成人午夜在线一区| 亚洲国产电影在线观看| 成人免费小视频| 午夜精品成人在线视频| 亚洲国产欧美在线| 老司机免费视频一区二区三区| 国产一区二区日韩精品| 成人av高清在线| 91高清视频在线| 欧美大片在线观看| 国产精品视频麻豆| 亚洲欧洲在线观看av| 亚洲一区二区四区蜜桃| 久久91精品国产91久久小草| 国产电影精品久久禁18| 一本色道**综合亚洲精品蜜桃冫| 欧美性感一类影片在线播放| 精品国产电影一区二区| 一色屋精品亚洲香蕉网站| 亚洲一区二区三区国产| 精品无人码麻豆乱码1区2区 | 一区二区三区四区乱视频| 午夜国产不卡在线观看视频| 国产在线日韩欧美| 色婷婷综合五月| 久久日韩精品一区二区五区| 亚洲男同1069视频| 韩国女主播成人在线| 欧美日韩欧美一区二区| 国产精品免费久久久久| 六月丁香婷婷色狠狠久久| 成人av在线资源|