亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? j48.java

?? 為了下東西 隨便發了個 datamining 的源代碼
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
/*
 *    This program is free software; you can redistribute it and/or modify
 *    it under the terms of the GNU General Public License as published by
 *    the Free Software Foundation; either version 2 of the License, or
 *    (at your option) any later version.
 *
 *    This program is distributed in the hope that it will be useful,
 *    but WITHOUT ANY WARRANTY; without even the implied warranty of
 *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 *    GNU General Public License for more details.
 *
 *    You should have received a copy of the GNU General Public License
 *    along with this program; if not, write to the Free Software
 *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
 */

/*
 *    J48.java
 *    Copyright (C) 1999 Eibe Frank
 *
 */

package weka.classifiers.trees;

import java.util.Enumeration;
import java.util.Vector;

import weka.classifiers.Classifier;
import weka.classifiers.Evaluation;
import weka.classifiers.Sourcable;
import weka.classifiers.trees.j48.BinC45ModelSelection;
import weka.classifiers.trees.j48.C45ModelSelection;
import weka.classifiers.trees.j48.C45PruneableClassifierTree;
import weka.classifiers.trees.j48.ClassifierTree;
import weka.classifiers.trees.j48.ModelSelection;
import weka.classifiers.trees.j48.PruneableClassifierTree;
import weka.core.AdditionalMeasureProducer;
import weka.core.Drawable;
import weka.core.Instance;
import weka.core.Instances;
import weka.core.Matchable;
import weka.core.Option;
import weka.core.OptionHandler;
import weka.core.Summarizable;
import weka.core.Utils;
import weka.core.WeightedInstancesHandler;

/**
 * Class for generating an unpruned or a pruned C4.5 decision tree.
 * For more information, see<p>
 *
 * Ross Quinlan (1993). <i>C4.5: Programs for Machine Learning</i>, 
 * Morgan Kaufmann Publishers, San Mateo, CA. </p>
 *
 * Valid options are: <p>
 *
 * -U <br>
 * Use unpruned tree.<p>
 *
 * -C confidence <br>
 * Set confidence threshold for pruning. (Default: 0.25) <p>
 *
 * -M number <br>
 * Set minimum number of instances per leaf. (Default: 2) <p>
 *
 * -R <br>
 * Use reduced error pruning. No subtree raising is performed. <p>
 *
 * -N number <br>
 * Set number of folds for reduced error pruning. One fold is
 * used as the pruning set. (Default: 3) <p>
 *
 * -B <br>
 * Use binary splits for nominal attributes. <p>
 *
 * -S <br>
 * Don't perform subtree raising. <p>
 *
 * -L <br>
 * Do not clean up after the tree has been built. <p>
 *
 * -A <br>
 * If set, Laplace smoothing is used for predicted probabilites. <p>
 *
 * -Q <br>
 * The seed for reduced-error pruning. <p>
 *
 * @author Eibe Frank (eibe@cs.waikato.ac.nz)
 * @version $Revision$
 */
public class J48 extends Classifier implements OptionHandler, 
  Drawable, Matchable, Sourcable, WeightedInstancesHandler, Summarizable,
  AdditionalMeasureProducer {

  // To maintain the same version number after adding m_ClassAttribute
  static final long serialVersionUID = -217733168393644444L;

  /** The decision tree */
  private ClassifierTree m_root;
  
  /** Unpruned tree? */
  private boolean m_unpruned = false;

  /** Confidence level */
  private float m_CF = 0.25f;

  /** Minimum number of instances */
  private int m_minNumObj = 2;

  /** Determines whether probabilities are smoothed using
      Laplace correction when predictions are generated */
  private boolean m_useLaplace = false;

  /** Use reduced error pruning? */
  private boolean m_reducedErrorPruning = false;

  /** Number of folds for reduced error pruning. */
  private int m_numFolds = 3;

  /** Binary splits on nominal attributes? */
  private boolean m_binarySplits = false;

  /** Subtree raising to be performed? */
  private boolean m_subtreeRaising = true;

  /** Cleanup after the tree has been built. */
  private boolean m_noCleanup = false;

  /** Random number seed for reduced-error pruning. */
  private int m_Seed = 1;

  /**
   * Returns a string describing classifier
   * @return a description suitable for
   * displaying in the explorer/experimenter gui
   */
  public String globalInfo() {

    return  "Class for generating a pruned or unpruned C4.5 decision tree. For more "
      + "information, see\n\n"
      + "Ross Quinlan (1993). \"C4.5: Programs for Machine Learning\", "
      + "Morgan Kaufmann Publishers, San Mateo, CA.\n\n";
  }
  
  /**
   * Return the root of the J48 classifier.
   * By TWang. Jan 26, 2005.
   * @return
   */ 
	public ClassifierTree getRoot(){
	  	return m_root;  	
	}

	/**
	 * ONLY call this method after calling the classifyInstance(instance_A)
	 * method. It will return the probablity of classifiing  instance_A into
	 * the returned class. Can also pass in the instance_A explictly and 
	 * return after calling classifyInstance() in the method.
	 * 
	 * TWang.
	 * @param nodeContent The m_NodeContent to set.
	 */
	public double getProbability(){
		return m_root.getProbablilty();
	}
	
  /**
   * Generates the classifier.
   *
   * @exception Exception if classifier can't be built successfully
   */
  public void buildClassifier(Instances instances) 
       throws Exception {

    ModelSelection modSelection;	 

    if (m_binarySplits)
      modSelection = new BinC45ModelSelection(m_minNumObj, instances);
    else
      modSelection = new C45ModelSelection(m_minNumObj, instances);
    if (!m_reducedErrorPruning)
      m_root = new C45PruneableClassifierTree(modSelection, !m_unpruned, m_CF,
					    m_subtreeRaising, !m_noCleanup);
    else
      m_root = new PruneableClassifierTree(modSelection, !m_unpruned, m_numFolds,
					   !m_noCleanup, m_Seed);
    m_root.buildClassifier(instances);
    if (m_binarySplits) {
      ((BinC45ModelSelection)modSelection).cleanup();
    } else {
      ((C45ModelSelection)modSelection).cleanup();
    }
  }

  /**
   * Classifies an instance.
   *
   * @exception Exception if instance can't be classified successfully
   */
  public double classifyInstance(Instance instance) throws Exception {

    return m_root.classifyInstance(instance);
  }

  /** 
   * Returns class probabilities for an instance.
   *
   * @exception Exception if distribution can't be computed successfully
   */
  public final double [] distributionForInstance(Instance instance) 
       throws Exception {

    return m_root.distributionForInstance(instance, m_useLaplace);
  }

  /**
   *  Returns the type of graph this classifier
   *  represents.
   *  @return Drawable.TREE
   */   
  public int graphType() {
      return Drawable.TREE;
  }

  /**
   * Returns graph describing the tree.
   *
   * @exception Exception if graph can't be computed
   */
  public String graph() throws Exception {

    return m_root.graph();
  }

  /**
   * Returns tree in prefix order.
   *
   * @exception Exception if something goes wrong
   */
  public String prefix() throws Exception {
    
    return m_root.prefix();
  }


  /**
   * Returns tree as an if-then statement.
   *
   * @return the tree as a Java if-then type statement
   * @exception Exception if something goes wrong
   */
  public String toSource(String className) throws Exception {

    StringBuffer [] source = m_root.toSource(className);
    return 
    "class " + className + " {\n\n"
    +"  public static double classify(Object [] i)\n"
    +"    throws Exception {\n\n"
    +"    double p = Double.NaN;\n"
    + source[0]  // Assignment code
    +"    return p;\n"
    +"  }\n"
    + source[1]  // Support code
    +"}\n";
  }

  /**
   * Returns an enumeration describing the available options.
   *
   * Valid options are: <p>
   *
   * -U <br>
   * Use unpruned tree.<p>
   *
   * -C confidence <br>
   * Set confidence threshold for pruning. (Default: 0.25) <p>
   *
   * -M number <br>
   * Set minimum number of instances per leaf. (Default: 2) <p>
   *
   * -R <br>
   * Use reduced error pruning. No subtree raising is performed. <p>
   *
   * -N number <br>
   * Set number of folds for reduced error pruning. One fold is
   * used as the pruning set. (Default: 3) <p>
   *
   * -B <br>
   * Use binary splits for nominal attributes. <p>
   *
   * -S <br>
   * Don't perform subtree raising. <p>
   *
   * -L <br>
   * Do not clean up after the tree has been built.
   *
   * -A <br>
   * If set, Laplace smoothing is used for predicted probabilites. <p>
   *
   * -Q <br>
   * The seed for reduced-error pruning. <p>
   *
   * @return an enumeration of all the available options.
   */
  public Enumeration<Option> listOptions() {

    Vector<Option> newVector = new Vector<Option>(9);

    newVector.
	addElement(new Option("\tUse unpruned tree.",
			      "U", 0, "-U"));
    newVector.
	addElement(new Option("\tSet confidence threshold for pruning.\n" +
			      "\t(default 0.25)",
			      "C", 1, "-C <pruning confidence>"));
    newVector.
	addElement(new Option("\tSet minimum number of instances per leaf.\n" +
			      "\t(default 2)",
			      "M", 1, "-M <minimum number of instances>"));
    newVector.
	addElement(new Option("\tUse reduced error pruning.",
			      "R", 0, "-R"));
    newVector.
	addElement(new Option("\tSet number of folds for reduced error\n" +
			      "\tpruning. One fold is used as pruning set.\n" +
			      "\t(default 3)",
			      "N", 1, "-N <number of folds>"));
    newVector.
	addElement(new Option("\tUse binary splits only.",
			      "B", 0, "-B"));
    newVector.
        addElement(new Option("\tDon't perform subtree raising.",
			      "S", 0, "-S"));
    newVector.
        addElement(new Option("\tDo not clean up after the tree has been built.",
			      "L", 0, "-L"));
   newVector.
        addElement(new Option("\tLaplace smoothing for predicted probabilities.",
			      "A", 0, "-A"));
    newVector.
      addElement(new Option("\tSeed for random data shuffling (default 1).",
			    "Q", 1, "-Q <seed>"));

    return newVector.elements();
  }

  /**
   * Parses a given list of options.
   *
   * @param options the list of options as an array of strings
   * @exception Exception if an option is not supported
   */
  public void setOptions(String[] options) throws Exception {
    
    // Other options
    String minNumString = Utils.getOption('M', options);
    if (minNumString.length() != 0) {
      m_minNumObj = Integer.parseInt(minNumString);
    } else {
      m_minNumObj = 2;
    }
    m_binarySplits = Utils.getFlag('B', options);
    m_useLaplace = Utils.getFlag('A', options);

    // Pruning options
    m_unpruned = Utils.getFlag('U', options);
    m_subtreeRaising = !Utils.getFlag('S', options);
    m_noCleanup = Utils.getFlag('L', options);
    if ((m_unpruned) && (!m_subtreeRaising)) {
      throw new Exception("Subtree raising doesn't need to be unset for unpruned tree!");
    }
    m_reducedErrorPruning = Utils.getFlag('R', options);
    if ((m_unpruned) && (m_reducedErrorPruning)) {
      throw new Exception("Unpruned tree and reduced error pruning can't be selected " +
			  "simultaneously!");
    }
    String confidenceString = Utils.getOption('C', options);
    if (confidenceString.length() != 0) {
      if (m_reducedErrorPruning) {
	throw new Exception("Setting the confidence doesn't make sense " +
			    "for reduced error pruning.");
      } else if (m_unpruned) {
	throw new Exception("Doesn't make sense to change confidence for unpruned "
			    +"tree!");
      } else {
	m_CF = (new Float(confidenceString)).floatValue();
	if ((m_CF <= 0) || (m_CF >= 1)) {
	  throw new Exception("Confidence has to be greater than zero and smaller " +
			      "than one!");
	}
      }
    } else {
      m_CF = 0.25f;
    }
    String numFoldsString = Utils.getOption('N', options);
    if (numFoldsString.length() != 0) {
      if (!m_reducedErrorPruning) {
	throw new Exception("Setting the number of folds" +
			    " doesn't make sense if" +
			    " reduced error pruning is not selected.");
      } else {
	m_numFolds = Integer.parseInt(numFoldsString);
      }
    } else {
      m_numFolds = 3;
    }
    String seedString = Utils.getOption('Q', options);
    if (seedString.length() != 0) {
      m_Seed = Integer.parseInt(seedString);
    } else {
      m_Seed = 1;
    }
  }

  /**
   * Gets the current settings of the Classifier.
   *
   * @return an array of strings suitable for passing to setOptions
   */
  public String [] getOptions() {

    String [] options = new String [14];
    int current = 0;

    if (m_noCleanup) {
      options[current++] = "-L";
    }
    if (m_unpruned) {
      options[current++] = "-U";

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
午夜激情综合网| 成a人片亚洲日本久久| 国产成人免费av在线| 日韩免费观看2025年上映的电影| 亚洲第一av色| 精品国产乱码久久久久久图片| 91免费视频大全| 欧美精品一区二区三区蜜臀| 亚洲精品免费播放| 国产91丝袜在线播放| 日韩女优制服丝袜电影| 亚洲自拍偷拍av| 成人h版在线观看| 久久久91精品国产一区二区三区| 天天色天天操综合| 欧美色网一区二区| 亚洲精品少妇30p| 91麻豆6部合集magnet| 中文字幕欧美国产| 久久99精品久久久久婷婷| 884aa四虎影成人精品一区| 一区二区三区四区视频精品免费| 成人av在线资源网站| 久久久久久久久久久久久久久99 | 亚洲国产sm捆绑调教视频| www.亚洲色图| 中文字幕va一区二区三区| 国产在线精品视频| 精品国产一区二区三区不卡 | 天堂av在线一区| 欧美日韩国产bt| 日韩专区一卡二卡| 在线不卡a资源高清| 亚洲电影在线免费观看| 欧美色综合网站| 日本在线不卡一区| 欧美成人激情免费网| 麻豆91精品91久久久的内涵| 日韩欧美国产高清| 韩国av一区二区三区| 中文一区在线播放| 91在线免费视频观看| 亚洲精品中文字幕在线观看| 欧美在线观看18| 日日噜噜夜夜狠狠视频欧美人| 日韩一区二区在线观看| 久久91精品国产91久久小草| 欧美精品一区二区三区高清aⅴ | 精品国产一区二区三区久久久蜜月 | 国产精品91xxx| 国产精品乱人伦| 色又黄又爽网站www久久| 亚洲成人动漫在线观看| 日韩丝袜情趣美女图片| 国产一区二区免费在线| 国产精品盗摄一区二区三区| 欧洲在线/亚洲| 精品写真视频在线观看| 亚洲视频免费看| 欧美精品视频www在线观看 | 另类人妖一区二区av| 久久久精品天堂| 91国偷自产一区二区开放时间| 亚洲一区免费观看| 欧美精品一区二区不卡| 99re亚洲国产精品| 日韩成人免费电影| 国产精品电影一区二区| 日韩一级二级三级精品视频| 国产成人免费在线观看不卡| 亚洲午夜电影在线观看| 国产三级三级三级精品8ⅰ区| 在线观看av一区二区| 国产一区 二区| 天堂蜜桃一区二区三区| 国产精品毛片a∨一区二区三区| 制服丝袜亚洲播放| eeuss鲁片一区二区三区在线观看 eeuss鲁片一区二区三区在线看 | 欧美亚洲日本一区| 国产精品一区免费视频| 亚洲成av人片在线| 中文字幕亚洲一区二区va在线| 日韩欧美一区在线| 91国产成人在线| 成人动漫一区二区| 国内精品嫩模私拍在线| 天天操天天干天天综合网| 中文字幕一区二区三| 精品久久久久久久久久久久久久久久久 | 欧美区在线观看| 97精品久久久久中文字幕| 久久黄色级2电影| 日韩电影网1区2区| 一区二区三区中文字幕在线观看| 日本一区二区三区高清不卡| 日韩欧美国产一区在线观看| 在线观看免费一区| 色先锋资源久久综合| 国产高清亚洲一区| 九九精品一区二区| 日本不卡不码高清免费观看 | **性色生活片久久毛片| 久久久国产综合精品女国产盗摄| 日韩一区二区三区观看| 欧美精品自拍偷拍| 在线观看日韩电影| 欧美性大战久久久久久久| 色网站国产精品| 一本久久精品一区二区| 99精品视频中文字幕| 成人小视频免费在线观看| 国产精品一区在线观看你懂的| 国模无码大尺度一区二区三区| 久久成人免费网| 国产一区二区在线观看免费| 国产在线精品一区在线观看麻豆| 国产在线不卡一区| 激情欧美一区二区| 国产成人av影院| 成人av在线看| 在线国产亚洲欧美| 在线电影国产精品| 欧美一区二区三区四区久久| 日韩女优制服丝袜电影| 久久亚洲欧美国产精品乐播| 国产日韩欧美a| 亚洲乱码国产乱码精品精可以看| 亚洲综合在线电影| 日欧美一区二区| 国产主播一区二区| av在线播放成人| 欧美日韩在线一区二区| 91精品免费在线| 久久久久久97三级| 一区二区三区日韩| 男女激情视频一区| 成人午夜在线播放| 色婷婷av一区二区三区大白胸| 欧美中文字幕久久| 精品国精品国产| 亚洲视频小说图片| 亚洲午夜精品17c| 久久99九九99精品| 91蜜桃免费观看视频| 欧美电影影音先锋| 中文在线一区二区| 亚洲韩国精品一区| 久久超级碰视频| 不卡电影免费在线播放一区| 色妞www精品视频| 精品少妇一区二区三区在线播放| 亚洲国产精品成人久久综合一区| 亚洲综合免费观看高清完整版 | 奇米一区二区三区| voyeur盗摄精品| 日韩欧美一级在线播放| 中文字幕中文乱码欧美一区二区| 丝袜亚洲另类丝袜在线| 成人教育av在线| 欧美一级在线观看| 亚洲精品高清视频在线观看| 国产传媒一区在线| 欧美日韩高清一区二区| 国产精品国产三级国产三级人妇 | 国产精品嫩草影院com| 日韩一区精品视频| 不卡影院免费观看| 精品国产免费视频| 天堂在线一区二区| 91免费版pro下载短视频| 久久综合色天天久久综合图片| 亚洲一本大道在线| 99久久精品免费精品国产| 精品国产乱码久久久久久夜甘婷婷| 亚洲午夜免费电影| 成人的网站免费观看| 国产午夜精品一区二区三区嫩草 | 五月婷婷综合在线| 色偷偷一区二区三区| 国产精品久久三| 国产成人综合精品三级| 日韩欧美一级二级三级久久久| 香蕉成人伊视频在线观看| 色菇凉天天综合网| 亚洲欧美影音先锋| 成人国产精品免费观看动漫| 久久久久久9999| 国产盗摄视频一区二区三区| 精品国产一区二区三区四区四| 日本91福利区| 日韩欧美你懂的| 久久精品久久久精品美女| 91麻豆精品国产| 日本免费新一区视频| 3d成人动漫网站| 秋霞成人午夜伦在线观看| 欧美日韩aaa| 美腿丝袜一区二区三区| 日韩一区二区在线观看| 美美哒免费高清在线观看视频一区二区 |