亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? naivebayes.java

?? Java寫的NaiveBayes分類器
?? JAVA
字號:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    NaiveBayes.java *    Copyright (C) 1999 Eibe Frank,Len Trigg * */package weka.classifiers.bayes;import weka.classifiers.Classifier;import weka.classifiers.Evaluation;import weka.classifiers.UpdateableClassifier;import java.io.*;import java.util.*;import weka.core.*;import weka.estimators.*;/** * Class for a Naive Bayes classifier using estimator classes. Numeric  * estimator precision values are chosen based on analysis of the  * training data. For this reason, the classifier is not an  * UpdateableClassifier (which in typical usage are initialized with zero  * training instances) -- if you need the UpdateableClassifier functionality, * use the NaiveBayesUpdateable classifier. The NaiveBayesUpdateable * classifier will  use a default precision of 0.1 for numeric attributes * when buildClassifier is called with zero training instances. * <p> * For more information on Naive Bayes classifiers, see<p> * * George H. John and Pat Langley (1995). <i>Estimating * Continuous Distributions in Bayesian Classifiers</i>. Proceedings * of the Eleventh Conference on Uncertainty in Artificial * Intelligence. pp. 338-345. Morgan Kaufmann, San Mateo.<p> * * Valid options are:<p> * * -K <br> * Use kernel estimation for modelling numeric attributes rather than * a single normal distribution.<p> * * -D <br> * Use supervised discretization to process numeric attributes.<p> * * @author Len Trigg (trigg@cs.waikato.ac.nz) * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @version $Revision: 1.16 $ */public class NaiveBayes extends Classifier   implements OptionHandler, WeightedInstancesHandler {  /** The attribute estimators. */  protected Estimator [][] m_Distributions;    /** The class estimator. */  protected Estimator m_ClassDistribution;  /**   * Whether to use kernel density estimator rather than normal distribution   * for numeric attributes   */  protected boolean m_UseKernelEstimator = false;  /**   * Whether to use discretization than normal distribution   * for numeric attributes   */  protected boolean m_UseDiscretization = false;  /** The number of classes (or 1 for numeric class) */  protected int m_NumClasses;  /**   * The dataset header for the purposes of printing out a semi-intelligible    * model    */  protected Instances m_Instances;  /*** The precision parameter used for numeric attributes */  protected static final double DEFAULT_NUM_PRECISION = 0.01;  /**   * The discretization filter.   */  protected weka.filters.supervised.attribute.Discretize m_Disc = null;  /**   * Returns a string describing this classifier   * @return a description of the classifier suitable for   * displaying in the explorer/experimenter gui   */  public String globalInfo() {    return "Class for a Naive Bayes classifier using estimator classes. Numeric"      +" estimator precision values are chosen based on analysis of the "      +" training data. For this reason, the classifier is not an"      +" UpdateableClassifier (which in typical usage are initialized with zero"      +" training instances) -- if you need the UpdateableClassifier functionality,"      +" use the NaiveBayesUpdateable classifier. The NaiveBayesUpdateable"      +" classifier will  use a default precision of 0.1 for numeric attributes"      +" when buildClassifier is called with zero training instances.\n\n"      +"For more information on Naive Bayes classifiers, see\n\n"      +"George H. John and Pat Langley (1995). Estimating"      + " Continuous Distributions in Bayesian Classifiers. Proceedings"      +" of the Eleventh Conference on Uncertainty in Artificial"      +" Intelligence. pp. 338-345. Morgan Kaufmann, San Mateo.";  }  /**   * Generates the classifier.   *   * @param instances set of instances serving as training data    * @exception Exception if the classifier has not been generated    * successfully   */  public void buildClassifier(Instances instances) throws Exception {    if (instances.checkForStringAttributes()) {      throw new UnsupportedAttributeTypeException("Cannot handle string attributes!");    }    if (instances.classAttribute().isNumeric()) {      throw new UnsupportedClassTypeException("Naive Bayes: Class is numeric!");    }    m_NumClasses = instances.numClasses();    if (m_NumClasses < 0) {      throw new Exception ("Dataset has no class attribute");    }    // Copy the instances    m_Instances = new Instances(instances);    // Discretize instances if required    if (m_UseDiscretization) {      m_Disc = new weka.filters.supervised.attribute.Discretize();      m_Disc.setInputFormat(m_Instances);      m_Instances = weka.filters.Filter.useFilter(m_Instances, m_Disc);    } else {      m_Disc = null;    }    // Reserve space for the distributions    m_Distributions = new Estimator[m_Instances.numAttributes() - 1]    [m_Instances.numClasses()];    m_ClassDistribution = new DiscreteEstimator(m_Instances.numClasses(), 						true);    int attIndex = 0;    Enumeration enu = m_Instances.enumerateAttributes();    while (enu.hasMoreElements()) {      Attribute attribute = (Attribute) enu.nextElement();      // If the attribute is numeric, determine the estimator       // numeric precision from differences between adjacent values      double numPrecision = DEFAULT_NUM_PRECISION;      if (attribute.type() == Attribute.NUMERIC) {	m_Instances.sort(attribute);	if ((m_Instances.numInstances() > 0)	    && !m_Instances.instance(0).isMissing(attribute)) {	  double lastVal = m_Instances.instance(0).value(attribute);	  double currentVal, deltaSum = 0;	  int distinct = 0;	  for (int i = 1; i < m_Instances.numInstances(); i++) {	    Instance currentInst = m_Instances.instance(i);	    if (currentInst.isMissing(attribute)) {	      break;	    }	    currentVal = currentInst.value(attribute);	    if (currentVal != lastVal) {	      deltaSum += currentVal - lastVal;	      lastVal = currentVal;	      distinct++;	    }	  }	  if (distinct > 0) {	    numPrecision = deltaSum / distinct;	  }	}      }      for (int j = 0; j < m_Instances.numClasses(); j++) {	switch (attribute.type()) {	case Attribute.NUMERIC: 	  if (m_UseKernelEstimator) {	    m_Distributions[attIndex][j] = 	    new KernelEstimator(numPrecision);	  } else {	    m_Distributions[attIndex][j] = 	    new NormalEstimator(numPrecision);	  }	  break;	case Attribute.NOMINAL:	  m_Distributions[attIndex][j] = 	  new DiscreteEstimator(attribute.numValues(), true);	  break;	default:	  throw new Exception("Attribute type unknown to NaiveBayes");	}      }      attIndex++;    }    // Compute counts    Enumeration enumInsts = m_Instances.enumerateInstances();    while (enumInsts.hasMoreElements()) {      Instance instance = 	(Instance) enumInsts.nextElement();      updateClassifier(instance);    }    // Save space    m_Instances = new Instances(m_Instances, 0);  }  /**   * Updates the classifier with the given instance.   *   * @param instance the new training instance to include in the model    * @exception Exception if the instance could not be incorporated in   * the model.   */  public void updateClassifier(Instance instance) throws Exception {    if (!instance.classIsMissing()) {      Enumeration enumAtts = m_Instances.enumerateAttributes();      int attIndex = 0;      while (enumAtts.hasMoreElements()) {	Attribute attribute = (Attribute) enumAtts.nextElement();	if (!instance.isMissing(attribute)) {	  m_Distributions[attIndex][(int)instance.classValue()].	    addValue(instance.value(attribute), instance.weight());	}	attIndex++;      }      m_ClassDistribution.addValue(instance.classValue(),				   instance.weight());    }  }  /**   * Calculates the class membership probabilities for the given test    * instance.   *   * @param instance the instance to be classified   * @return predicted class probability distribution   * @exception Exception if there is a problem generating the prediction   */  public double [] distributionForInstance(Instance instance)   throws Exception {         if (m_UseDiscretization) {      m_Disc.input(instance);      instance = m_Disc.output();    }    double [] probs = new double[m_NumClasses];    for (int j = 0; j < m_NumClasses; j++) {      probs[j] = m_ClassDistribution.getProbability(j);    }    Enumeration enumAtts = instance.enumerateAttributes();    int attIndex = 0;    while (enumAtts.hasMoreElements()) {      Attribute attribute = (Attribute) enumAtts.nextElement();      if (!instance.isMissing(attribute)) {	double temp, max = 0;	for (int j = 0; j < m_NumClasses; j++) {	  temp = Math.max(1e-75, m_Distributions[attIndex][j].	  getProbability(instance.value(attribute)));	  probs[j] *= temp;	  if (probs[j] > max) {	    max = probs[j];	  }	  if (Double.isNaN(probs[j])) {	    throw new Exception("NaN returned from estimator for attribute "				+ attribute.name() + ":\n"				+ m_Distributions[attIndex][j].toString());	  }	}	if ((max > 0) && (max < 1e-75)) { // Danger of probability underflow	  for (int j = 0; j < m_NumClasses; j++) {	    probs[j] *= 1e75;	  }	}      }      attIndex++;    }    // Display probabilities    Utils.normalize(probs);    return probs;  }  /**   * Returns an enumeration describing the available options.   *   * @return an enumeration of all the available options.   */  public Enumeration listOptions() {    Vector newVector = new Vector(2);    newVector.addElement(    new Option("\tUse kernel density estimator rather than normal\n"	       +"\tdistribution for numeric attributes",	       "K", 0,"-K"));    newVector.addElement(    new Option("\tUse supervised discretization to process numeric attributes\n",	       "D", 0,"-D"));    return newVector.elements();  }  /**   * Parses a given list of options. Valid options are:<p>   *   * -K <br>   * Use kernel estimation for modelling numeric attributes rather than   * a single normal distribution.<p>   *   * -D <br>   * Use supervised discretization to process numeric attributes.   *   * @param options the list of options as an array of strings   * @exception Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {        boolean k = Utils.getFlag('K', options);    boolean d = Utils.getFlag('D', options);    if (k && d) {      throw new IllegalArgumentException("Can't use both kernel density " +					 "estimation and discretization!");    }    setUseSupervisedDiscretization(d);    setUseKernelEstimator(k);    Utils.checkForRemainingOptions(options);  }  /**   * Gets the current settings of the classifier.   *   * @return an array of strings suitable for passing to setOptions   */  public String [] getOptions() {    String [] options = new String [2];    int current = 0;    if (m_UseKernelEstimator) {      options[current++] = "-K";    }    if (m_UseDiscretization) {      options[current++] = "-D";    }    while (current < options.length) {      options[current++] = "";    }    return options;  }  /**   * Returns a description of the classifier.   *   * @return a description of the classifier as a string.   */  public String toString() {        StringBuffer text = new StringBuffer();    text.append("Naive Bayes Classifier");    if (m_Instances == null) {      text.append(": No model built yet.");    } else {      try {	for (int i = 0; i < m_Distributions[0].length; i++) {	  text.append("\n\nClass " + m_Instances.classAttribute().value(i) +		      ": Prior probability = " + Utils.		      doubleToString(m_ClassDistribution.getProbability(i),				     4, 2) + "\n\n");	  Enumeration enumAtts = m_Instances.enumerateAttributes();	  int attIndex = 0;	  while (enumAtts.hasMoreElements()) {	    Attribute attribute = (Attribute) enumAtts.nextElement();	    text.append(attribute.name() + ":  " 			+ m_Distributions[attIndex][i]);	    attIndex++;	  }	}      } catch (Exception ex) {	text.append(ex.getMessage());      }    }    return text.toString();  }    /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String useKernelEstimatorTipText() {    return "Use a kernel estimator for numeric attributes rather than a "      +"normal distribution.";  }  /**   * Gets if kernel estimator is being used.   *   * @return Value of m_UseKernelEstimatory.   */  public boolean getUseKernelEstimator() {        return m_UseKernelEstimator;  }    /**   * Sets if kernel estimator is to be used.   *   * @param v  Value to assign to m_UseKernelEstimatory.   */  public void setUseKernelEstimator(boolean v) {        m_UseKernelEstimator = v;    if (v) {      setUseSupervisedDiscretization(false);    }  }    /**   * Returns the tip text for this property   * @return tip text for this property suitable for   * displaying in the explorer/experimenter gui   */  public String useSupervisedDiscretizationTipText() {    return "Use supervised discretization to convert numeric attributes to nominal "      +"ones.";  }  /**   * Get whether supervised discretization is to be used.   *   * @return true if supervised discretization is to be used.   */  public boolean getUseSupervisedDiscretization() {        return m_UseDiscretization;  }    /**   * Set whether supervised discretization is to be used.   *   * @param newblah true if supervised discretization is to be used.   */  public void setUseSupervisedDiscretization(boolean newblah) {        m_UseDiscretization = newblah;    if (newblah) {      setUseKernelEstimator(false);    }  }    /**   * Main method for testing this class.   *   * @param argv the options   */  public static void main(String [] argv) {    try {      System.out.println(Evaluation.evaluateModel(new NaiveBayes(), argv));    } catch (Exception e) {      e.printStackTrace();      System.err.println(e.getMessage());    }  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
一本久道久久综合中文字幕| 91天堂素人约啪| 欧美一区二区三区在| 日韩福利电影在线| 精品国产自在久精品国产| 精品一区二区三区在线播放视频| 日韩欧美第一区| 久久er精品视频| 国产亚洲一区二区三区在线观看| 国产aⅴ综合色| 亚洲免费av高清| 欧美一区二区三区影视| 狠狠色2019综合网| 国产精品亲子伦对白| 欧美综合天天夜夜久久| 午夜婷婷国产麻豆精品| 日韩欧美不卡一区| 99久久精品国产毛片| 天天综合色天天综合色h| 亚洲精品一区二区三区蜜桃下载| 懂色中文一区二区在线播放| 一区二区三区在线观看国产| 欧美一区二区视频观看视频| 国产一区二三区| 亚洲制服丝袜一区| 亚洲精品一线二线三线| 91麻豆免费看| 日本va欧美va精品发布| 亚洲国产精品99久久久久久久久| 欧美在线免费视屏| 极品少妇一区二区| 亚洲免费观看高清完整版在线观看熊| 欧美日韩的一区二区| 国产激情一区二区三区四区| 亚洲六月丁香色婷婷综合久久 | 午夜日韩在线观看| 日韩精品一区二区三区蜜臀| 成人av免费在线播放| 热久久久久久久| 亚洲色图丝袜美腿| 久久精品夜色噜噜亚洲a∨| 欧美色欧美亚洲另类二区| 国产91露脸合集magnet| 天堂久久一区二区三区| 1024精品合集| 自拍偷拍欧美精品| 欧美一级高清大全免费观看| 95精品视频在线| 国产成人一级电影| 奇米888四色在线精品| 亚洲精品视频一区二区| 国产清纯美女被跳蛋高潮一区二区久久w| 在线免费av一区| 成人h动漫精品| 国产一区二区美女诱惑| 奇米精品一区二区三区在线观看| 一区二区三区中文字幕在线观看| 欧美经典一区二区| 久久夜色精品国产噜噜av| 欧美另类z0zxhd电影| 色欧美日韩亚洲| www.亚洲免费av| 国产a区久久久| 国产一区二区三区综合| 久久精品久久综合| 亚洲va欧美va天堂v国产综合| 亚洲三级在线观看| 国产精品国产自产拍高清av| 久久久久久久网| 久久久精品免费观看| 久久亚洲私人国产精品va媚药| 91精品国产手机| 3d动漫精品啪啪| 91精品国产91热久久久做人人| 欧美三级中文字| 欧美色成人综合| 欧美色综合网站| 欧美日精品一区视频| 欧美美女一区二区| 欧美精选一区二区| 欧美一级在线免费| 日韩亚洲欧美在线| 欧美不卡在线视频| 26uuu精品一区二区| 久久久久国产一区二区三区四区| 精品欧美一区二区久久| 久久久久99精品国产片| 国产午夜精品一区二区三区视频| 久久久久久麻豆| 国产精品丝袜一区| 一区二区三国产精华液| 亚洲风情在线资源站| 蜜臀av性久久久久av蜜臀妖精| 久久se精品一区二区| 国产成人在线视频网站| 成人晚上爱看视频| 91久久久免费一区二区| 欧美日韩国产小视频在线观看| 欧美精品在线一区二区三区| 日韩一区二区不卡| 欧美精品一区二区在线观看| 国产日本亚洲高清| 亚洲欧美另类综合偷拍| 午夜成人免费电影| 国产成人亚洲综合a∨婷婷图片| 不卡av免费在线观看| 欧美丰满一区二区免费视频| 日韩精品一区二区在线观看| 国产日产亚洲精品系列| 亚洲一区国产视频| 精品一区精品二区高清| proumb性欧美在线观看| 欧美日韩国产a| 久久色成人在线| 亚洲精品成人a在线观看| 日韩在线一二三区| 成人午夜短视频| 欧美精品久久99| 国产精品色哟哟网站| 日本欧美韩国一区三区| 99精品视频在线观看| 日韩欧美一级在线播放| 中文字幕亚洲欧美在线不卡| 日韩av电影一区| 色婷婷av一区二区三区gif| 亚洲精品在线一区二区| 亚洲国产精品一区二区尤物区| 国产精品自拍网站| 欧美肥妇bbw| 亚洲男人都懂的| 国产福利一区二区三区| 制服丝袜亚洲色图| 亚洲日本丝袜连裤袜办公室| 国内久久精品视频| 欧美绝品在线观看成人午夜影视| 中文字幕亚洲欧美在线不卡| 国产一区二区三区观看| 欧美久久久久久蜜桃| 亚洲乱码日产精品bd| 成人综合在线视频| 精品久久久久久综合日本欧美| 一区二区三区四区蜜桃| 成人av在线资源| 久久九九99视频| 久久不见久久见中文字幕免费| 欧美综合色免费| 亚洲激情欧美激情| 99re热视频精品| 国产精品蜜臀av| 成人自拍视频在线| 久久久久综合网| 国产在线视频精品一区| 日韩一区二区电影网| 视频一区二区三区在线| 欧美午夜理伦三级在线观看| 亚洲精品第1页| 日本二三区不卡| 亚洲综合激情另类小说区| 色哟哟在线观看一区二区三区| 日本一二三四高清不卡| 国产成人鲁色资源国产91色综 | 成人午夜看片网址| 欧美大片日本大片免费观看| 天堂久久久久va久久久久| 色999日韩国产欧美一区二区| 亚洲欧美另类小说| 在线视频一区二区三| 亚洲免费电影在线| 欧美性生活一区| 日韩制服丝袜先锋影音| 日韩亚洲欧美一区二区三区| 日韩福利视频网| 精品日韩99亚洲| 国产精品一区二区视频| 国产精品丝袜黑色高跟| 99久久99久久久精品齐齐| 亚洲人成小说网站色在线 | 一本大道久久a久久精二百| 亚洲欧美经典视频| 欧美视频在线观看一区| 午夜成人免费视频| 日韩欧美123| 国产成人免费高清| 亚洲男帅同性gay1069| 欧美乱熟臀69xxxxxx| 久久99久久久欧美国产| 国产亚洲美州欧州综合国| av午夜一区麻豆| 亚洲成av人片在www色猫咪| 日韩色在线观看| 成熟亚洲日本毛茸茸凸凹| 亚洲激情综合网| 日韩精品一区二区三区在线观看| 国产一区二区看久久| 亚洲欧洲一区二区在线播放| 在线观看视频一区| 看电视剧不卡顿的网站| 国产精品福利影院| 91精品午夜视频| gogogo免费视频观看亚洲一|