亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? j48.java

?? 為了下東西 隨便發了個 datamining 的源代碼
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
/*
 *    This program is free software; you can redistribute it and/or modify
 *    it under the terms of the GNU General Public License as published by
 *    the Free Software Foundation; either version 2 of the License, or
 *    (at your option) any later version.
 *
 *    This program is distributed in the hope that it will be useful,
 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *    GNU General Public License for more details.
 *
 *    You should have received a copy of the GNU General Public License
 *    along with this program; if not, write to the Free Software
 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 */

/*
 *    J48.java
 *    Copyright (C) 1999 Eibe Frank
 *
 */

package weka.classifiers.trees;

import java.util.Enumeration;
import java.util.Vector;

import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.classifiers.Sourcable;
import weka.classifiers.trees.j48.BinC45ModelSelection;
import weka.classifiers.trees.j48.C45ModelSelection;
import weka.classifiers.trees.j48.C45PruneableClassifierTree;
import weka.classifiers.trees.j48.ClassifierTree;
import weka.classifiers.trees.j48.ModelSelection;
import weka.classifiers.trees.j48.PruneableClassifierTree;
import weka.core.AdditionalMeasureProducer;
import weka.core.Drawable;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Matchable;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.Summarizable;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;

/**
 * Class for generating an unpruned or a pruned C4.5 decision tree.
 * For more information, see<p>
 *
 * Ross Quinlan (1993). <i>C4.5: Programs for Machine Learning</i>, 
 * Morgan Kaufmann Publishers, San Mateo, CA. </p>
 *
 * Valid options are: <p>
 *
 * -U <br>
 * Use unpruned tree.<p>
 *
 * -C confidence <br>
 * Set confidence threshold for pruning. (Default: 0.25) <p>
 *
 * -M number <br>
 * Set minimum number of instances per leaf. (Default: 2) <p>
 *
 * -R <br>
 * Use reduced error pruning. No subtree raising is performed. <p>
 *
 * -N number <br>
 * Set number of folds for reduced error pruning. One fold is
 * used as the pruning set. (Default: 3) <p>
 *
 * -B <br>
 * Use binary splits for nominal attributes. <p>
 *
 * -S <br>
 * Don't perform subtree raising. <p>
 *
 * -L <br>
 * Do not clean up after the tree has been built. <p>
 *
 * -A <br>
 * If set, Laplace smoothing is used for predicted probabilites. <p>
 *
 * -Q <br>
 * The seed for reduced-error pruning. <p>
 *
 * @author Eibe Frank (eibe@cs.waikato.ac.nz)
 * @version $Revision$
 */
public class J48 extends Classifier implements OptionHandler, 
  Drawable, Matchable, Sourcable, WeightedInstancesHandler, Summarizable,
  AdditionalMeasureProducer {

  // To maintain the same version number after adding m_ClassAttribute
  static final long serialVersionUID = -217733168393644444L;

  /** The decision tree */
  private ClassifierTree m_root;
  
  /** Unpruned tree? */
  private boolean m_unpruned = false;

  /** Confidence level */
  private float m_CF = 0.25f;

  /** Minimum number of instances */
  private int m_minNumObj = 2;

  /** Determines whether probabilities are smoothed using
      Laplace correction when predictions are generated */
  private boolean m_useLaplace = false;

  /** Use reduced error pruning? */
  private boolean m_reducedErrorPruning = false;

  /** Number of folds for reduced error pruning. */
  private int m_numFolds = 3;

  /** Binary splits on nominal attributes? */
  private boolean m_binarySplits = false;

  /** Subtree raising to be performed? */
  private boolean m_subtreeRaising = true;

  /** Cleanup after the tree has been built. */
  private boolean m_noCleanup = false;

  /** Random number seed for reduced-error pruning. */
  private int m_Seed = 1;

  /**
   * Returns a string describing classifier
   * @return a description suitable for
   * displaying in the explorer/experimenter gui
   */
  public String globalInfo() {

    return  "Class for generating a pruned or unpruned C4.5 decision tree. For more "
      + "information, see\n\n"
      + "Ross Quinlan (1993). \"C4.5: Programs for Machine Learning\", "
      + "Morgan Kaufmann Publishers, San Mateo, CA.\n\n";
  }
  
  /**
   * Return the root of the J48 classifier.
   * By TWang. Jan 26, 2005.
   * @return
   */ 
	public ClassifierTree getRoot(){
	  	return m_root;  	
	}

	/**
	 * ONLY call this method after calling the classifyInstance(instance_A)
	 * method. It will return the probablity of classifiing  instance_A into
	 * the returned class. Can also pass in the instance_A explictly and 
	 * return after calling classifyInstance() in the method.
	 * 
	 * TWang.
	 * @param nodeContent The m_NodeContent to set.
	 */
	public double getProbability(){
		return m_root.getProbablilty();
	}
	
  /**
   * Generates the classifier.
   *
   * @exception Exception if classifier can't be built successfully
   */
  public void buildClassifier(Instances instances) 
       throws Exception {

    ModelSelection modSelection;	 

    if (m_binarySplits)
      modSelection = new BinC45ModelSelection(m_minNumObj, instances);
    else
      modSelection = new C45ModelSelection(m_minNumObj, instances);
    if (!m_reducedErrorPruning)
      m_root = new C45PruneableClassifierTree(modSelection, !m_unpruned, m_CF,
					    m_subtreeRaising, !m_noCleanup);
    else
      m_root = new PruneableClassifierTree(modSelection, !m_unpruned, m_numFolds,
					   !m_noCleanup, m_Seed);
    m_root.buildClassifier(instances);
    if (m_binarySplits) {
      ((BinC45ModelSelection)modSelection).cleanup();
    } else {
      ((C45ModelSelection)modSelection).cleanup();
    }
  }

  /**
   * Classifies an instance.
   *
   * @exception Exception if instance can't be classified successfully
   */
  public double classifyInstance(Instance instance) throws Exception {

    return m_root.classifyInstance(instance);
  }

  /** 
   * Returns class probabilities for an instance.
   *
   * @exception Exception if distribution can't be computed successfully
   */
  public final double [] distributionForInstance(Instance instance) 
       throws Exception {

    return m_root.distributionForInstance(instance, m_useLaplace);
  }

  /**
   *  Returns the type of graph this classifier
   *  represents.
   *  @return Drawable.TREE
   */   
  public int graphType() {
      return Drawable.TREE;
  }

  /**
   * Returns graph describing the tree.
   *
   * @exception Exception if graph can't be computed
   */
  public String graph() throws Exception {

    return m_root.graph();
  }

  /**
   * Returns tree in prefix order.
   *
   * @exception Exception if something goes wrong
   */
  public String prefix() throws Exception {
    
    return m_root.prefix();
  }


  /**
   * Returns tree as an if-then statement.
   *
   * @return the tree as a Java if-then type statement
   * @exception Exception if something goes wrong
   */
  public String toSource(String className) throws Exception {

    StringBuffer [] source = m_root.toSource(className);
    return 
    "class " + className + " {\n\n"
    +"  public static double classify(Object [] i)\n"
    +"    throws Exception {\n\n"
    +"    double p = Double.NaN;\n"
    + source[0]  // Assignment code
    +"    return p;\n"
    +"  }\n"
    + source[1]  // Support code
    +"}\n";
  }

  /**
   * Returns an enumeration describing the available options.
   *
   * Valid options are: <p>
   *
   * -U <br>
   * Use unpruned tree.<p>
   *
   * -C confidence <br>
   * Set confidence threshold for pruning. (Default: 0.25) <p>
   *
   * -M number <br>
   * Set minimum number of instances per leaf. (Default: 2) <p>
   *
   * -R <br>
   * Use reduced error pruning. No subtree raising is performed. <p>
   *
   * -N number <br>
   * Set number of folds for reduced error pruning. One fold is
   * used as the pruning set. (Default: 3) <p>
   *
   * -B <br>
   * Use binary splits for nominal attributes. <p>
   *
   * -S <br>
   * Don't perform subtree raising. <p>
   *
   * -L <br>
   * Do not clean up after the tree has been built.
   *
   * -A <br>
   * If set, Laplace smoothing is used for predicted probabilites. <p>
   *
   * -Q <br>
   * The seed for reduced-error pruning. <p>
   *
   * @return an enumeration of all the available options.
   */
  public Enumeration<Option> listOptions() {

    Vector<Option> newVector = new Vector<Option>(9);

    newVector.
	addElement(new Option("\tUse unpruned tree.",
			      "U", 0, "-U"));
    newVector.
	addElement(new Option("\tSet confidence threshold for pruning.\n" +
			      "\t(default 0.25)",
			      "C", 1, "-C <pruning confidence>"));
    newVector.
	addElement(new Option("\tSet minimum number of instances per leaf.\n" +
			      "\t(default 2)",
			      "M", 1, "-M <minimum number of instances>"));
    newVector.
	addElement(new Option("\tUse reduced error pruning.",
			      "R", 0, "-R"));
    newVector.
	addElement(new Option("\tSet number of folds for reduced error\n" +
			      "\tpruning. One fold is used as pruning set.\n" +
			      "\t(default 3)",
			      "N", 1, "-N <number of folds>"));
    newVector.
	addElement(new Option("\tUse binary splits only.",
			      "B", 0, "-B"));
    newVector.
        addElement(new Option("\tDon't perform subtree raising.",
			      "S", 0, "-S"));
    newVector.
        addElement(new Option("\tDo not clean up after the tree has been built.",
			      "L", 0, "-L"));
   newVector.
        addElement(new Option("\tLaplace smoothing for predicted probabilities.",
			      "A", 0, "-A"));
    newVector.
      addElement(new Option("\tSeed for random data shuffling (default 1).",
			    "Q", 1, "-Q <seed>"));

    return newVector.elements();
  }

  /**
   * Parses a given list of options.
   *
   * @param options the list of options as an array of strings
   * @exception Exception if an option is not supported
   */
  public void setOptions(String[] options) throws Exception {
    
    // Other options
    String minNumString = Utils.getOption('M', options);
    if (minNumString.length() != 0) {
      m_minNumObj = Integer.parseInt(minNumString);
    } else {
      m_minNumObj = 2;
    }
    m_binarySplits = Utils.getFlag('B', options);
    m_useLaplace = Utils.getFlag('A', options);

    // Pruning options
    m_unpruned = Utils.getFlag('U', options);
    m_subtreeRaising = !Utils.getFlag('S', options);
    m_noCleanup = Utils.getFlag('L', options);
    if ((m_unpruned) && (!m_subtreeRaising)) {
      throw new Exception("Subtree raising doesn't need to be unset for unpruned tree!");
    }
    m_reducedErrorPruning = Utils.getFlag('R', options);
    if ((m_unpruned) && (m_reducedErrorPruning)) {
      throw new Exception("Unpruned tree and reduced error pruning can't be selected " +
			  "simultaneously!");
    }
    String confidenceString = Utils.getOption('C', options);
    if (confidenceString.length() != 0) {
      if (m_reducedErrorPruning) {
	throw new Exception("Setting the confidence doesn't make sense " +
			    "for reduced error pruning.");
      } else if (m_unpruned) {
	throw new Exception("Doesn't make sense to change confidence for unpruned "
			    +"tree!");
      } else {
	m_CF = (new Float(confidenceString)).floatValue();
	if ((m_CF <= 0) || (m_CF >= 1)) {
	  throw new Exception("Confidence has to be greater than zero and smaller " +
			      "than one!");
	}
      }
    } else {
      m_CF = 0.25f;
    }
    String numFoldsString = Utils.getOption('N', options);
    if (numFoldsString.length() != 0) {
      if (!m_reducedErrorPruning) {
	throw new Exception("Setting the number of folds" +
			    " doesn't make sense if" +
			    " reduced error pruning is not selected.");
      } else {
	m_numFolds = Integer.parseInt(numFoldsString);
      }
    } else {
      m_numFolds = 3;
    }
    String seedString = Utils.getOption('Q', options);
    if (seedString.length() != 0) {
      m_Seed = Integer.parseInt(seedString);
    } else {
      m_Seed = 1;
    }
  }

  /**
   * Gets the current settings of the Classifier.
   *
   * @return an array of strings suitable for passing to setOptions
   */
  public String [] getOptions() {

    String [] options = new String [14];
    int current = 0;

    if (m_noCleanup) {
      options[current++] = "-L";
    }
    if (m_unpruned) {
      options[current++] = "-U";

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
亚洲精品免费播放| 久久久久久久久久久久电影| 7777精品久久久大香线蕉| 日韩欧美亚洲一区二区| 中文字幕av一区二区三区免费看 | 国产乱子轮精品视频| 国产99精品国产| 欧美系列在线观看| 精品久久免费看| 亚洲男同1069视频| 久久国产精品区| 色综合天天综合网国产成人综合天| 91.com视频| 国产精品免费人成网站| 天堂成人免费av电影一区| 国产精品一区一区| 欧美日韩国产美| 国产精品久久久久久户外露出| 五月综合激情婷婷六月色窝| 国产成人在线免费| 欧美精品日日鲁夜夜添| 国产精品激情偷乱一区二区∴| 日韩中文字幕一区二区三区| jlzzjlzz亚洲日本少妇| 日韩一区二区在线免费观看| 一区二区三区产品免费精品久久75| 国产毛片一区二区| 91精品国产综合久久小美女| 中文字幕一区在线观看视频| 久久精品国产一区二区| 欧美日韩一区三区四区| 国产精品女主播av| 国内成+人亚洲+欧美+综合在线| 欧美在线一区二区三区| 国产精品久久久久影院亚瑟 | 国产日韩视频一区二区三区| 日韩高清不卡一区二区三区| 色婷婷亚洲综合| 国产日韩精品一区二区三区 | 久久久国产一区二区三区四区小说| 亚洲成人在线网站| 色综合天天综合网天天狠天天 | 色综合视频一区二区三区高清| 精品国产免费一区二区三区香蕉| 亚洲小说春色综合另类电影| av一区二区不卡| 国产三级一区二区三区| 韩国精品免费视频| 日韩一区二区三区观看| 视频精品一区二区| 欧美亚洲禁片免费| 亚洲精品成人在线| 91免费小视频| 国产精品国产三级国产a| 国产一区福利在线| 精品日本一线二线三线不卡| 免费人成网站在线观看欧美高清| 欧美视频一二三区| 亚洲大型综合色站| 精品视频全国免费看| 亚洲狠狠丁香婷婷综合久久久| 99国产麻豆精品| 中文字幕中文在线不卡住| 成人网页在线观看| 国产精品欧美久久久久一区二区 | 亚洲精品视频在线| 91美女视频网站| 一区二区三区91| 欧美日韩免费在线视频| 亚洲动漫第一页| 欧美天堂亚洲电影院在线播放| 亚洲最新在线观看| 在线观看免费成人| 亚洲国产cao| 欧美丰满美乳xxx高潮www| 丝袜美腿一区二区三区| 欧美精品v日韩精品v韩国精品v| 日产国产欧美视频一区精品| 日韩精品一区二区三区视频 | 国产喂奶挤奶一区二区三区| 国产毛片一区二区| 国产精品卡一卡二卡三| 成人h动漫精品一区二区| 亚洲欧美在线观看| 色偷偷成人一区二区三区91| 亚洲一区中文日韩| 欧美日韩一本到| 婷婷丁香激情综合| 91精品国产高清一区二区三区蜜臀| 青青草91视频| 2020国产成人综合网| 国产91在线|亚洲| 亚洲欧美日韩国产综合在线| 欧美色综合影院| 久久国产精品露脸对白| 日本一区二区视频在线观看| 色婷婷综合久色| 日本亚洲欧美天堂免费| 亚洲日本一区二区| 欧美三级午夜理伦三级中视频| 男人的天堂亚洲一区| 久久久亚洲综合| 91啪在线观看| 蜜臀av国产精品久久久久| 国产日韩精品一区二区浪潮av | 亚洲国产美女搞黄色| 欧美一区二区三区在线电影| 国产精品996| 夜夜亚洲天天久久| 日韩精品中文字幕一区 | 亚洲一区在线免费观看| 日韩亚洲欧美一区| 99riav一区二区三区| 日日摸夜夜添夜夜添国产精品 | av在线不卡免费看| 日韩黄色片在线观看| 国产欧美一区二区三区在线看蜜臀 | 日韩一级片网址| 成人精品小蝌蚪| 婷婷丁香久久五月婷婷| 国产精品麻豆久久久| 4438x亚洲最大成人网| 成人免费高清视频在线观看| 午夜视黄欧洲亚洲| 国产精品三级视频| 日韩欧美色电影| 91片在线免费观看| 黄页视频在线91| 亚洲成a人v欧美综合天堂| 国产色爱av资源综合区| 欧美日韩一区二区在线观看| 中文字幕第一区综合| 欧美精品少妇一区二区三区| 国产一区二区三区免费看| 亚洲电影在线播放| 国产精品区一区二区三区| 欧美高清一级片在线| 99久久精品免费精品国产| 久久精品国产免费| 亚洲成av人综合在线观看| 国产精品福利影院| 精品999在线播放| 777亚洲妇女| 色婷婷久久99综合精品jk白丝| 国产精品自产自拍| 免费成人在线网站| 亚洲成人免费观看| 亚洲三级在线看| 中文字幕国产一区| 久久蜜臀中文字幕| 在线91免费看| 91国偷自产一区二区开放时间 | 国产精品美女一区二区在线观看| 日韩精品一区二区三区中文不卡 | 美女在线视频一区| 亚洲成a天堂v人片| 一级做a爱片久久| 成人免费在线视频| 国产欧美一区二区三区网站| 精品国产在天天线2019| 91精品国产aⅴ一区二区| 欧美亚洲丝袜传媒另类| 91丝袜呻吟高潮美腿白嫩在线观看| 国产一二三精品| 伦理电影国产精品| 免费的国产精品| 青青草原综合久久大伊人精品 | 日韩欧美国产综合| 91精品国产综合久久福利软件| 欧美性猛交一区二区三区精品| 99热精品一区二区| 成人高清视频免费观看| 成人a区在线观看| 成人sese在线| 不卡一区二区中文字幕| 成人免费看的视频| youjizz国产精品| 91在线丨porny丨国产| 91香蕉视频污| 日本高清免费不卡视频| 在线免费视频一区二区| 欧美日韩在线播放| 欧美人伦禁忌dvd放荡欲情| 91精品免费在线观看| 日韩欧美精品三级| 精品国产一二三| 国产清纯在线一区二区www| 国产情人综合久久777777| 国产精品女上位| 亚洲狠狠丁香婷婷综合久久久| 一区二区在线观看免费 | 欧美一卡2卡3卡4卡| 日韩视频永久免费| 精品国产成人系列| 久久综合九色欧美综合狠狠| 久久久久久久精| 国产精品久久国产精麻豆99网站| 国产精品国产成人国产三级| 亚洲天堂中文字幕| 天堂在线亚洲视频|