亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲蟲下載站

?? j48.java

?? 數(shù)據(jù)挖掘分類算法:J48源代碼
?? JAVA
?? 第 1 頁 / 共 2 頁
字號(hào):
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    J48.java *    Copyright (C) 1999 University of Waikato, Hamilton, New Zealand * */package weka.classifiers.trees;import weka.classifiers.Classifier;import weka.classifiers.Sourcable;import weka.classifiers.trees.j48.BinC45ModelSelection;import weka.classifiers.trees.j48.C45ModelSelection;import weka.classifiers.trees.j48.C45PruneableClassifierTree;import weka.classifiers.trees.j48.ClassifierTree;import weka.classifiers.trees.j48.ModelSelection;import weka.classifiers.trees.j48.PruneableClassifierTree;import weka.core.AdditionalMeasureProducer;import weka.core.Capabilities;import weka.core.Drawable;import weka.core.Instance;import weka.core.Instances;import weka.core.Matchable;import weka.core.Option;import weka.core.OptionHandler;import weka.core.RevisionUtils;import weka.core.Summarizable;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.WeightedInstancesHandler;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import java.util.Enumeration;import java.util.Vector;/** <!-- globalinfo-start --> * Class for generating a pruned or unpruned C4.5 decision tree. For more information, see<br/> * <br/> * Ross Quinlan (1993). C4.5: Programs for Machine Learning. Morgan Kaufmann Publishers, San Mateo, CA. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;book{Quinlan1993, *    address = {San Mateo, CA}, *    author = {Ross Quinlan}, *    publisher = {Morgan Kaufmann Publishers}, *    title = {C4.5: Programs for Machine Learning}, *    year = {1993} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -U *  Use unpruned tree.</pre> *  * <pre> -C &lt;pruning confidence&gt; *  Set confidence threshold for pruning. *  (default 0.25)</pre> *  * <pre> -M &lt;minimum number of instances&gt; *  Set minimum number of instances per leaf. *  (default 2)</pre> *  * <pre> -R *  Use reduced error pruning.</pre> *  * <pre> -N &lt;number of folds&gt; *  Set number of folds for reduced error *  pruning. One fold is used as pruning set. *  (default 3)</pre> *  * <pre> -B *  Use binary splits only.</pre> *  * <pre> -S *  Don't perform subtree raising.</pre> *  * <pre> -L *  Do not clean up after the tree has been built.</pre> *  * <pre> -A *  Laplace smoothing for predicted probabilities.</pre> *  * <pre> -Q &lt;seed&gt; *  Seed for random data shuffling (default 1).</pre> *  <!-- options-end --> * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @version $Revision: 1.9 $ */public class J48   extends Classifier   implements OptionHandler, Drawable, Matchable, Sourcable,              WeightedInstancesHandler, Summarizable, AdditionalMeasureProducer,              TechnicalInformationHandler {  /** for serialization */  static final long serialVersionUID = -217733168393644444L;  /** The decision tree */  private ClassifierTree m_root;    /** Unpruned tree? */  private boolean m_unpruned = false;  /** Confidence level */  private float m_CF = 0.25f;  /** Minimum number of instances */  private int m_minNumObj = 2;  /** Determines whether probabilities are smoothed using      Laplace correction when predictions are generated */  private boolean m_useLaplace = false;  /** Use reduced error pruning? */  private boolean m_reducedErrorPruning = false;  /** Number of folds for reduced error pruning. */  private int m_numFolds = 3;  /** Binary splits on nominal attributes? */  private boolean m_binarySplits = false;  /** Subtree raising to be performed? */  private boolean m_subtreeRaising = true;  /** Cleanup after the tree has been built. */  private boolean m_noCleanup = false;  /** Random number seed for reduced-error pruning. */  private int m_Seed = 1;  /**   * Returns a string describing classifier   * @return a description suitable for   * displaying in the explorer/experimenter gui   */  public String globalInfo() {    return  "Class for generating a pruned or unpruned C4.5 decision tree. For more "      + "information, see\n\n"      + getTechnicalInformation().toString();  }  /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;        result = new TechnicalInformation(Type.BOOK);    result.setValue(Field.AUTHOR, "Ross Quinlan");    result.setValue(Field.YEAR, "1993");    result.setValue(Field.TITLE, "C4.5: Programs for Machine Learning");    result.setValue(Field.PUBLISHER, "Morgan Kaufmann Publishers");    result.setValue(Field.ADDRESS, "San Mateo, CA");        return result;  }  /**   * Returns default capabilities of the classifier.   *   * @return      the capabilities of this classifier   */  public Capabilities getCapabilities() {    Capabilities      result;        try {      if (!m_reducedErrorPruning)        result = new C45PruneableClassifierTree(null, !m_unpruned, m_CF, m_subtreeRaising, !m_noCleanup).getCapabilities();      else        result = new PruneableClassifierTree(null, !m_unpruned, m_numFolds, !m_noCleanup, m_Seed).getCapabilities();    }    catch (Exception e) {      result = new Capabilities(this);    }        result.setOwner(this);        return result;  }    /**   * Generates the classifier.   *   * @param instances the data to train the classifier with   * @throws Exception if classifier can't be built successfully   */  public void buildClassifier(Instances instances)        throws Exception {    ModelSelection modSelection;	     if (m_binarySplits)      modSelection = new BinC45ModelSelection(m_minNumObj, instances);    else      modSelection = new C45ModelSelection(m_minNumObj, instances);    if (!m_reducedErrorPruning)      m_root = new C45PruneableClassifierTree(modSelection, !m_unpruned, m_CF,					    m_subtreeRaising, !m_noCleanup);    else      m_root = new PruneableClassifierTree(modSelection, !m_unpruned, m_numFolds,					   !m_noCleanup, m_Seed);    m_root.buildClassifier(instances);    if (m_binarySplits) {      ((BinC45ModelSelection)modSelection).cleanup();    } else {      ((C45ModelSelection)modSelection).cleanup();    }  }  /**   * Classifies an instance.   *   * @param instance the instance to classify   * @return the classification for the instance   * @throws Exception if instance can't be classified successfully   */  public double classifyInstance(Instance instance) throws Exception {    return m_root.classifyInstance(instance);  }  /**    * Returns class probabilities for an instance.   *   * @param instance the instance to calculate the class probabilities for   * @return the class probabilities   * @throws Exception if distribution can't be computed successfully   */  public final double [] distributionForInstance(Instance instance)        throws Exception {    return m_root.distributionForInstance(instance, m_useLaplace);  }  /**   *  Returns the type of graph this classifier   *  represents.   *  @return Drawable.TREE   */     public int graphType() {      return Drawable.TREE;  }  /**   * Returns graph describing the tree.   *   * @return the graph describing the tree   * @throws Exception if graph can't be computed   */  public String graph() throws Exception {    return m_root.graph();  }  /**   * Returns tree in prefix order.   *   * @return the tree in prefix order   * @throws Exception if something goes wrong   */  public String prefix() throws Exception {        return m_root.prefix();  }  /**   * Returns tree as an if-then statement.   *   * @param className the name of the Java class    * @return the tree as a Java if-then type statement   * @throws Exception if something goes wrong   */  public String toSource(String className) throws Exception {    StringBuffer [] source = m_root.toSource(className);    return     "class " + className + " {\n\n"    +"  public static double classify(Object[] i)\n"    +"    throws Exception {\n\n"    +"    double p = Double.NaN;\n"    + source[0]  // Assignment code    +"    return p;\n"    +"  }\n"    + source[1]  // Support code    +"}\n";  }  /**   * Returns an enumeration describing the available options.   *   * Valid options are: <p>   *   * -U <br>   * Use unpruned tree.<p>   *   * -C confidence <br>   * Set confidence threshold for pruning. (Default: 0.25) <p>   *   * -M number <br>   * Set minimum number of instances per leaf. (Default: 2) <p>   *   * -R <br>   * Use reduced error pruning. No subtree raising is performed. <p>   *   * -N number <br>   * Set number of folds for reduced error pruning. One fold is   * used as the pruning set. (Default: 3) <p>   *   * -B <br>   * Use binary splits for nominal attributes. <p>   *   * -S <br>   * Don't perform subtree raising. <p>   *   * -L <br>   * Do not clean up after the tree has been built.   *   * -A <br>   * If set, Laplace smoothing is used for predicted probabilites. <p>   *   * -Q <br>   * The seed for reduced-error pruning. <p>   *   * @return an enumeration of all the available options.   */  public Enumeration listOptions() {    Vector newVector = new Vector(9);    newVector.	addElement(new Option("\tUse unpruned tree.",			      "U", 0, "-U"));    newVector.	addElement(new Option("\tSet confidence threshold for pruning.\n" +			      "\t(default 0.25)",			      "C", 1, "-C <pruning confidence>"));    newVector.	addElement(new Option("\tSet minimum number of instances per leaf.\n" +			      "\t(default 2)",			      "M", 1, "-M <minimum number of instances>"));    newVector.	addElement(new Option("\tUse reduced error pruning.",			      "R", 0, "-R"));    newVector.	addElement(new Option("\tSet number of folds for reduced error\n" +			      "\tpruning. One fold is used as pruning set.\n" +			      "\t(default 3)",			      "N", 1, "-N <number of folds>"));    newVector.	addElement(new Option("\tUse binary splits only.",			      "B", 0, "-B"));    newVector.        addElement(new Option("\tDon't perform subtree raising.",			      "S", 0, "-S"));    newVector.        addElement(new Option("\tDo not clean up after the tree has been built.",			      "L", 0, "-L"));   newVector.        addElement(new Option("\tLaplace smoothing for predicted probabilities.",			      "A", 0, "-A"));    newVector.      addElement(new Option("\tSeed for random data shuffling (default 1).",			    "Q", 1, "-Q <seed>"));    return newVector.elements();  }  /**   * Parses a given list of options.   *    <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -U   *  Use unpruned tree.</pre>   *    * <pre> -C &lt;pruning confidence&gt;   *  Set confidence threshold for pruning.   *  (default 0.25)</pre>   *    * <pre> -M &lt;minimum number of instances&gt;   *  Set minimum number of instances per leaf.   *  (default 2)</pre>   *    * <pre> -R   *  Use reduced error pruning.</pre>   *    * <pre> -N &lt;number of folds&gt;   *  Set number of folds for reduced error   *  pruning. One fold is used as pruning set.   *  (default 3)</pre>   *    * <pre> -B   *  Use binary splits only.</pre>   *    * <pre> -S   *  Don't perform subtree raising.</pre>   *    * <pre> -L   *  Do not clean up after the tree has been built.</pre>   *    * <pre> -A   *  Laplace smoothing for predicted probabilities.</pre>   *    * <pre> -Q &lt;seed&gt;   *  Seed for random data shuffling (default 1).</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {        // Other options    String minNumString = Utils.getOption('M', options);    if (minNumString.length() != 0) {      m_minNumObj = Integer.parseInt(minNumString);    } else {      m_minNumObj = 2;    }    m_binarySplits = Utils.getFlag('B', options);    m_useLaplace = Utils.getFlag('A', options);    // Pruning options    m_unpruned = Utils.getFlag('U', options);    m_subtreeRaising = !Utils.getFlag('S', options);    m_noCleanup = Utils.getFlag('L', options);    if ((m_unpruned) && (!m_subtreeRaising)) {      throw new Exception("Subtree raising doesn't need to be unset for unpruned tree!");    }    m_reducedErrorPruning = Utils.getFlag('R', options);    if ((m_unpruned) && (m_reducedErrorPruning)) {      throw new Exception("Unpruned tree and reduced error pruning can't be selected " +			  "simultaneously!");    }    String confidenceString = Utils.getOption('C', options);    if (confidenceString.length() != 0) {      if (m_reducedErrorPruning) {	throw new Exception("Setting the confidence doesn't make sense " +			    "for reduced error pruning.");

?? 快捷鍵說明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
国产精品日产欧美久久久久| 视频一区视频二区在线观看| 久久久天堂av| 亚洲精品一线二线三线无人区| 337p粉嫩大胆噜噜噜噜噜91av | 7777精品伊人久久久大香线蕉的 | 蜜芽一区二区三区| 婷婷久久综合九色综合绿巨人| 亚洲永久免费av| 亚洲午夜激情网页| 亚洲高清中文字幕| 午夜欧美大尺度福利影院在线看| 亚洲成a人片在线不卡一二三区 | 不卡在线观看av| 99re这里都是精品| 色成人在线视频| 91 com成人网| 久久亚洲二区三区| 国产精品久久网站| 亚洲精品成人a在线观看| 亚洲国产精品人人做人人爽| 视频一区视频二区中文字幕| 久久成人免费网| 国产成人av影院| 日本韩国精品一区二区在线观看| 欧美日韩精品一区二区在线播放| 在线不卡的av| 日韩精品中文字幕在线一区| 2017欧美狠狠色| 亚洲欧洲日本在线| 成人av在线资源| 日本一区二区三区四区在线视频| 亚洲国产一区在线观看| 亚洲bt欧美bt精品777| 美国精品在线观看| 暴力调教一区二区三区| 欧美人狂配大交3d怪物一区 | 国产精品久久久久四虎| 亚洲欧美国产77777| 日韩经典一区二区| 成人免费视频免费观看| 欧美色老头old∨ideo| 欧美r级在线观看| 综合久久综合久久| 青青草一区二区三区| 成人国产亚洲欧美成人综合网 | 欧美成人video| 国产精品网站在线播放| 午夜精品在线看| 国产不卡视频在线观看| 欧美夫妻性生活| 自拍视频在线观看一区二区| 久久精品国产久精国产| 国产亚洲一区二区三区四区| 国产日产精品一区| 一级女性全黄久久生活片免费| 久久精品国产第一区二区三区| 91在线视频观看| 7777精品伊人久久久大香线蕉经典版下载| 久久精品日韩一区二区三区| 成人黄动漫网站免费app| 欧美高清视频在线高清观看mv色露露十八 | 国产精品精品国产色婷婷| 日韩精彩视频在线观看| 91浏览器在线视频| 26uuu国产一区二区三区| 亚洲在线视频网站| 成人免费视频网站在线观看| 欧美大片顶级少妇| 亚洲18色成人| 92国产精品观看| 久久久久国色av免费看影院| 日韩一区精品字幕| 欧美做爰猛烈大尺度电影无法无天| 国产午夜一区二区三区| 国产亚洲美州欧州综合国| 麻豆91在线播放免费| 99久久国产免费看| 久久久久久久久99精品| 日韩电影免费一区| 欧美性生交片4| 亚洲欧美另类小说视频| 国产成人免费视频网站高清观看视频 | 美国毛片一区二区三区| 欧美中文一区二区三区| 国产精品蜜臀在线观看| 国产成人精品aa毛片| 欧美精品一区男女天堂| 麻豆一区二区三区| 日韩欧美国产一二三区| 日韩av中文字幕一区二区| 欧美在线视频全部完| 一区二区三区四区五区视频在线观看 | 国产一区二区精品久久| 日韩免费视频线观看| 蜜臀久久99精品久久久画质超高清| 欧美日韩aaa| 久久久亚洲精品石原莉奈| 欧美美女网站色| 亚洲色图视频网| 成人看片黄a免费看在线| 久久精品综合网| 极品美女销魂一区二区三区免费| 欧美一级日韩免费不卡| 免费成人性网站| 亚洲精品一区二区三区影院| 精品一区二区三区视频在线观看 | 亚洲四区在线观看| 972aa.com艺术欧美| 亚洲黄网站在线观看| 欧美伊人精品成人久久综合97 | 成人国产精品视频| 最新欧美精品一区二区三区| 色综合中文字幕国产 | 捆绑紧缚一区二区三区视频| 成人网在线免费视频| 久久久精品2019中文字幕之3| 国产一区在线精品| 精品91自产拍在线观看一区| 国产一区二区成人久久免费影院| 亚洲一区二区三区四区在线免费观看| 91网站视频在线观看| 玉米视频成人免费看| 欧美天天综合网| 蜜臀av国产精品久久久久| 欧美tk—视频vk| 国产高清不卡一区二区| 一色屋精品亚洲香蕉网站| 欧美视频一二三区| 久久国产视频网| 国产精品欧美综合在线| 欧美在线播放高清精品| 蜜桃视频一区二区三区| 国产日韩欧美不卡在线| 色偷偷久久一区二区三区| 同产精品九九九| 久久先锋影音av| 一本到不卡精品视频在线观看| 亚洲成av人片在线观看无码| 日韩欧美一二区| 成人中文字幕合集| 午夜欧美电影在线观看| 国产拍揄自揄精品视频麻豆| 欧美伊人久久大香线蕉综合69 | 久久99久久99精品免视看婷婷| 国产亚洲欧美中文| 欧美在线不卡一区| 国产一区二区不卡老阿姨| 亚洲精品日韩综合观看成人91| 欧美一区二区三区在线视频| 成人黄页毛片网站| 日本aⅴ免费视频一区二区三区| 国产精品丝袜在线| 欧美一级二级在线观看| 欧美成人欧美edvon| 国产精品白丝jk黑袜喷水| 一区二区三区在线视频播放| 精品国产乱码久久久久久1区2区 | 精品福利一二区| 色综合久久久久综合| 卡一卡二国产精品| 亚洲色图色小说| 亚洲精品一线二线三线| 欧美日韩五月天| a亚洲天堂av| 欧洲一区二区三区免费视频| 精品一区二区三区免费毛片爱| 精品视频1区2区| 国产精品全国免费观看高清| 91麻豆精品久久久久蜜臀 | 色哦色哦哦色天天综合| 九九视频精品免费| 亚洲福利一区二区| **性色生活片久久毛片| 久久精品水蜜桃av综合天堂| 7777精品伊人久久久大香线蕉经典版下载| 成人午夜在线播放| 精久久久久久久久久久| 亚洲.国产.中文慕字在线| 亚洲色图丝袜美腿| 国产精品无遮挡| ww久久中文字幕| 日韩三级在线免费观看| 国产精品自拍毛片| 精品免费国产二区三区| 在线免费不卡视频| caoporm超碰国产精品| 国产精品99久久久久| 久久精品国产澳门| 五月激情综合色| 尤物视频一区二区| 亚洲婷婷综合色高清在线| 国产日韩欧美精品电影三级在线| 欧美va亚洲va香蕉在线| 91精品一区二区三区在线观看| 色婷婷综合激情| 91香蕉视频mp4| 99久久伊人久久99| 成人午夜视频在线| 国产99久久久国产精品免费看|