亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? id3.java

?? Weka
?? JAVA
字號:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    Id3.java *    Copyright (C) 1999 University of Waikato, Hamilton, New Zealand * */package weka.classifiers.trees;import weka.classifiers.Classifier;import weka.classifiers.Sourcable;import weka.core.Attribute;import weka.core.Capabilities;import weka.core.Instance;import weka.core.Instances;import weka.core.NoSupportForMissingValuesException;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import java.util.Enumeration;/** <!-- globalinfo-start --> * Class for constructing an unpruned decision tree based on the ID3 algorithm. Can only deal with nominal attributes. No missing values allowed. Empty leaves may result in unclassified instances. For more information see: <br/> * <br/> * R. Quinlan (1986). Induction of decision trees. Machine Learning. 1(1):81-106. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;article{Quinlan1986, *    author = {R. Quinlan}, *    journal = {Machine Learning}, *    number = {1}, *    pages = {81-106}, *    title = {Induction of decision trees}, *    volume = {1}, *    year = {1986} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  <!-- options-end --> * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @version $Revision: 1.22 $  */public class Id3   extends Classifier   implements TechnicalInformationHandler, Sourcable {  /** for serialization */  static final long serialVersionUID = -2693678647096322561L;    /** The node's successors. */   private Id3[] m_Successors;  /** Attribute used for splitting. */  private Attribute m_Attribute;  /** Class value if node is leaf. */  private double m_ClassValue;  /** Class distribution if node is leaf. */  private double[] m_Distribution;  /** Class attribute of dataset. */  private Attribute m_ClassAttribute;  /**   * Returns a string describing the classifier.   * @return a description suitable for the GUI.   */  public String globalInfo() {    return  "Class for constructing an unpruned decision tree based on the ID3 "      + "algorithm. Can only deal with nominal attributes. No missing values "      + "allowed. Empty leaves may result in unclassified instances. For more "      + "information see: \n\n"      + getTechnicalInformation().toString();  }  /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;        result = new TechnicalInformation(Type.ARTICLE);    result.setValue(Field.AUTHOR, "R. Quinlan");    result.setValue(Field.YEAR, "1986");    result.setValue(Field.TITLE, "Induction of decision trees");    result.setValue(Field.JOURNAL, "Machine Learning");    result.setValue(Field.VOLUME, "1");    result.setValue(Field.NUMBER, "1");    result.setValue(Field.PAGES, "81-106");        return result;  }  /**   * Returns default capabilities of the classifier.   *   * @return      the capabilities of this classifier   */  public Capabilities getCapabilities() {    Capabilities result = super.getCapabilities();    // attributes    result.enable(Capability.NOMINAL_ATTRIBUTES);    // class    result.enable(Capability.NOMINAL_CLASS);    result.enable(Capability.MISSING_CLASS_VALUES);    // instances    result.setMinimumNumberInstances(0);        return result;  }  /**   * Builds Id3 decision tree classifier.   *   * @param data the training data   * @exception Exception if classifier can't be built successfully   */  public void buildClassifier(Instances data) throws Exception {    // can classifier handle the data?    getCapabilities().testWithFail(data);    // remove instances with missing class    data = new Instances(data);    data.deleteWithMissingClass();        makeTree(data);  }  /**   * Method for building an Id3 tree.   *   * @param data the training data   * @exception Exception if decision tree can't be built successfully   */  private void makeTree(Instances data) throws Exception {    // Check if no instances have reached this node.    if (data.numInstances() == 0) {      m_Attribute = null;      m_ClassValue = Instance.missingValue();      m_Distribution = new double[data.numClasses()];      return;    }    // Compute attribute with maximum information gain.    double[] infoGains = new double[data.numAttributes()];    Enumeration attEnum = data.enumerateAttributes();    while (attEnum.hasMoreElements()) {      Attribute att = (Attribute) attEnum.nextElement();      infoGains[att.index()] = computeInfoGain(data, att);    }    m_Attribute = data.attribute(Utils.maxIndex(infoGains));        // Make leaf if information gain is zero.     // Otherwise create successors.    if (Utils.eq(infoGains[m_Attribute.index()], 0)) {      m_Attribute = null;      m_Distribution = new double[data.numClasses()];      Enumeration instEnum = data.enumerateInstances();      while (instEnum.hasMoreElements()) {        Instance inst = (Instance) instEnum.nextElement();        m_Distribution[(int) inst.classValue()]++;      }      Utils.normalize(m_Distribution);      m_ClassValue = Utils.maxIndex(m_Distribution);      m_ClassAttribute = data.classAttribute();    } else {      Instances[] splitData = splitData(data, m_Attribute);      m_Successors = new Id3[m_Attribute.numValues()];      for (int j = 0; j < m_Attribute.numValues(); j++) {        m_Successors[j] = new Id3();        m_Successors[j].makeTree(splitData[j]);      }    }  }  /**   * Classifies a given test instance using the decision tree.   *   * @param instance the instance to be classified   * @return the classification   * @throws NoSupportForMissingValuesException if instance has missing values   */  public double classifyInstance(Instance instance)     throws NoSupportForMissingValuesException {    if (instance.hasMissingValue()) {      throw new NoSupportForMissingValuesException("Id3: no missing values, "                                                   + "please.");    }    if (m_Attribute == null) {      return m_ClassValue;    } else {      return m_Successors[(int) instance.value(m_Attribute)].        classifyInstance(instance);    }  }  /**   * Computes class distribution for instance using decision tree.   *   * @param instance the instance for which distribution is to be computed   * @return the class distribution for the given instance   * @throws NoSupportForMissingValuesException if instance has missing values   */  public double[] distributionForInstance(Instance instance)     throws NoSupportForMissingValuesException {    if (instance.hasMissingValue()) {      throw new NoSupportForMissingValuesException("Id3: no missing values, "                                                   + "please.");    }    if (m_Attribute == null) {      return m_Distribution;    } else {       return m_Successors[(int) instance.value(m_Attribute)].        distributionForInstance(instance);    }  }  /**   * Prints the decision tree using the private toString method from below.   *   * @return a textual description of the classifier   */  public String toString() {    if ((m_Distribution == null) && (m_Successors == null)) {      return "Id3: No model built yet.";    }    return "Id3\n\n" + toString(0);  }  /**   * Computes information gain for an attribute.   *   * @param data the data for which info gain is to be computed   * @param att the attribute   * @return the information gain for the given attribute and data   * @throws Exception if computation fails   */  private double computeInfoGain(Instances data, Attribute att)     throws Exception {    double infoGain = computeEntropy(data);    Instances[] splitData = splitData(data, att);    for (int j = 0; j < att.numValues(); j++) {      if (splitData[j].numInstances() > 0) {        infoGain -= ((double) splitData[j].numInstances() /                     (double) data.numInstances()) *          computeEntropy(splitData[j]);      }    }    return infoGain;  }  /**   * Computes the entropy of a dataset.   *    * @param data the data for which entropy is to be computed   * @return the entropy of the data's class distribution   * @throws Exception if computation fails   */  private double computeEntropy(Instances data) throws Exception {    double [] classCounts = new double[data.numClasses()];    Enumeration instEnum = data.enumerateInstances();    while (instEnum.hasMoreElements()) {      Instance inst = (Instance) instEnum.nextElement();      classCounts[(int) inst.classValue()]++;    }    double entropy = 0;    for (int j = 0; j < data.numClasses(); j++) {      if (classCounts[j] > 0) {        entropy -= classCounts[j] * Utils.log2(classCounts[j]);      }    }    entropy /= (double) data.numInstances();    return entropy + Utils.log2(data.numInstances());  }  /**   * Splits a dataset according to the values of a nominal attribute.   *   * @param data the data which is to be split   * @param att the attribute to be used for splitting   * @return the sets of instances produced by the split   */  private Instances[] splitData(Instances data, Attribute att) {    Instances[] splitData = new Instances[att.numValues()];    for (int j = 0; j < att.numValues(); j++) {      splitData[j] = new Instances(data, data.numInstances());    }    Enumeration instEnum = data.enumerateInstances();    while (instEnum.hasMoreElements()) {      Instance inst = (Instance) instEnum.nextElement();      splitData[(int) inst.value(att)].add(inst);    }    for (int i = 0; i < splitData.length; i++) {      splitData[i].compactify();    }    return splitData;  }  /**   * Outputs a tree at a certain level.   *   * @param level the level at which the tree is to be printed   * @return the tree as string at the given level   */  private String toString(int level) {    StringBuffer text = new StringBuffer();        if (m_Attribute == null) {      if (Instance.isMissingValue(m_ClassValue)) {        text.append(": null");      } else {        text.append(": " + m_ClassAttribute.value((int) m_ClassValue));      }     } else {      for (int j = 0; j < m_Attribute.numValues(); j++) {        text.append("\n");        for (int i = 0; i < level; i++) {          text.append("|  ");        }        text.append(m_Attribute.name() + " = " + m_Attribute.value(j));        text.append(m_Successors[j].toString(level + 1));      }    }    return text.toString();  }  /**   * Adds this tree recursively to the buffer.   *    * @param id          the unqiue id for the method   * @param buffer      the buffer to add the source code to   * @return            the last ID being used   * @throws Exception  if something goes wrong   */  protected int toSource(int id, StringBuffer buffer) throws Exception {    int                 result;    int                 i;    int                 newID;    StringBuffer[]      subBuffers;        buffer.append("\n");    buffer.append("  protected static double node" + id + "(Object[] i) {\n");        // leaf?    if (m_Attribute == null) {      result = id;      if (Double.isNaN(m_ClassValue))        buffer.append("    return Double.NaN;");      else        buffer.append("    return " + m_ClassValue + ";");      if (m_ClassAttribute != null)        buffer.append(" // " + m_ClassAttribute.value((int) m_ClassValue));      buffer.append("\n");      buffer.append("  }\n");    }    else {      buffer.append("    // " + m_Attribute.name() + "\n");            // subtree calls      subBuffers = new StringBuffer[m_Attribute.numValues()];      newID      = id;      for (i = 0; i < m_Attribute.numValues(); i++) {        newID++;        buffer.append("    ");        if (i > 0)          buffer.append("else ");        buffer.append("if (((String) i[" + m_Attribute.index() + "]).equals(\"" + m_Attribute.value(i) + "\"))\n");        buffer.append("      return node" + newID + "(i);\n");        subBuffers[i] = new StringBuffer();        newID         = m_Successors[i].toSource(newID, subBuffers[i]);      }      buffer.append("    else\n");      buffer.append("      throw new IllegalArgumentException(\"Value '\" + i[" + m_Attribute.index() + "] + \"' is not allowed!\");\n");      buffer.append("  }\n");      // output subtree code      for (i = 0; i < m_Attribute.numValues(); i++) {        buffer.append(subBuffers[i].toString());      }      subBuffers = null;            result = newID;    }        return result;  }    /**   * Returns a string that describes the classifier as source. The   * classifier will be contained in a class with the given name (there may   * be auxiliary classes),   * and will contain a method with the signature:   * <pre><code>   * public static double classify(Object[] i);   * </code></pre>   * where the array <code>i</code> contains elements that are either   * Double, String, with missing values represented as null. The generated   * code is public domain and comes with no warranty. <br/>   * Note: works only if class attribute is the last attribute in the dataset.   *   * @param className the name that should be given to the source class.   * @return the object source described by a string   * @throws Exception if the souce can't be computed   */  public String toSource(String className) throws Exception {    StringBuffer        result;    int                 id;        result = new StringBuffer();    result.append("class " + className + " {\n");    result.append("  public static double classify(Object[] i) {\n");    id = 0;    result.append("    return node" + id + "(i);\n");    result.append("  }\n");    toSource(id, result);    result.append("}\n");    return result.toString();  }  /**   * Main method.   *   * @param args the options for the classifier   */  public static void main(String[] args) {    runClassifier(new Id3(), args);  }}

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
激情六月婷婷综合| 中文字幕中文字幕在线一区| 91国产丝袜在线播放| 丁香一区二区三区| 成人妖精视频yjsp地址| 国产美女精品人人做人人爽 | 欧美一区二区久久久| 欧美色视频在线| 在线精品视频一区二区三四| 色乱码一区二区三区88| 色综合久久久久综合| 欧美a一区二区| 九九精品视频在线看| 国产一区二区美女| 99久精品国产| 欧洲国内综合视频| 欧美精品一级二级三级| 日韩欧美中文一区二区| 欧美精品一区二区在线播放| 欧美国产成人精品| 一区二区三区在线视频播放| 一区二区三区在线影院| 奇米精品一区二区三区在线观看一 | 一区二区高清免费观看影视大全| 亚洲综合一区二区| 日产国产高清一区二区三区| 奇米777欧美一区二区| 男人操女人的视频在线观看欧美| 麻豆免费精品视频| 91影视在线播放| 欧美精品高清视频| 国产性做久久久久久| 亚洲美女少妇撒尿| 日本亚洲视频在线| av在线一区二区三区| 欧美在线小视频| 日韩精品一区二| 亚洲精品欧美二区三区中文字幕| 日本不卡视频在线观看| 99综合电影在线视频| 欧美一卡二卡三卡| 亚洲色图视频网站| 久久av资源网| 欧美日韩美少妇| 国产亚洲欧美日韩俺去了| 亚洲一区二区视频在线| 国产精品18久久久久久久久久久久| 色偷偷成人一区二区三区91| xfplay精品久久| 亚洲综合色成人| 久久精品国产精品亚洲综合| 91电影在线观看| 国产精品污污网站在线观看| 日精品一区二区| 日本国产一区二区| 欧美草草影院在线视频| 亚洲制服丝袜在线| www.爱久久.com| 国产欧美日韩激情| 久久99精品久久久久久国产越南 | 欧美亚洲图片小说| 国产精品高清亚洲| 国产成人精品三级| 精品电影一区二区三区| 日韩电影在线看| 欧美精品 日韩| 亚洲图片欧美一区| 欧美做爰猛烈大尺度电影无法无天| 中文字幕av不卡| 国产精品一卡二| 精品国一区二区三区| 五月婷婷综合激情| 在线精品视频免费播放| 亚洲一区在线观看网站| 国产成人aaa| 久久久国产午夜精品| 蜜臀91精品一区二区三区| 欧美三级电影精品| 亚洲成人777| 欧美精品丝袜久久久中文字幕| 亚洲男人天堂av网| 日本久久一区二区| 午夜伊人狠狠久久| 日韩三级视频在线观看| 另类小说欧美激情| 久久久一区二区三区| 国产精品一区二区免费不卡| 国产欧美日韩视频在线观看| 99久久久无码国产精品| 亚洲精品欧美激情| 欧美人动与zoxxxx乱| 蓝色福利精品导航| 久久久精品蜜桃| eeuss鲁片一区二区三区| 亚洲另类在线视频| 欧美日产在线观看| 国产成人精品亚洲午夜麻豆| 亚洲欧美日韩国产中文在线| 欧美精品粉嫩高潮一区二区| 激情成人综合网| 亚洲欧美区自拍先锋| 欧美三级蜜桃2在线观看| 秋霞电影网一区二区| 国产亚洲欧美日韩在线一区| 欧美怡红院视频| 久久国产精品99久久人人澡| 欧美国产欧美亚州国产日韩mv天天看完整 | 蜜臀久久久久久久| 日韩欧美一区在线| 成人福利视频网站| 亚洲麻豆国产自偷在线| 精品1区2区3区| 日韩精品免费专区| 国产精品久久久久久妇女6080| 色94色欧美sute亚洲线路一ni | 国产欧美一区二区三区在线老狼| 色哟哟国产精品| 成人综合在线观看| 亚洲高清免费一级二级三级| 日韩欧美一区二区久久婷婷| 色综合久久久久综合体| 国模无码大尺度一区二区三区| 国产网站一区二区| 欧美日韩高清一区| 高清日韩电视剧大全免费| 亚洲成人免费在线观看| 日本一区二区三区久久久久久久久不| 99麻豆久久久国产精品免费| 精品影视av免费| 午夜视频在线观看一区二区三区| 国产精品嫩草影院com| 日韩免费视频一区二区| 在线免费一区三区| 99精品欧美一区二区三区小说 | 精品一区二区三区免费观看| 一区二区三区成人| 国产精品女同互慰在线看| 日韩小视频在线观看专区| 欧美亚洲尤物久久| 99在线精品一区二区三区| 激情综合网av| 三级一区在线视频先锋| 亚洲一级二级三级| 亚洲美女视频在线| 国产精品全国免费观看高清 | 蜜桃一区二区三区在线| 亚洲电影一级片| 亚洲国产精品自拍| 亚洲欧洲另类国产综合| **网站欧美大片在线观看| 国产日韩欧美不卡| 久久精品亚洲精品国产欧美kt∨| 欧美videofree性高清杂交| 日韩一区二区三区在线视频| 欧美日韩国产电影| 欧美卡1卡2卡| 91精品国产综合久久精品图片| 精品视频免费看| 欧美日韩国产区一| 欧美日韩不卡在线| 91精品麻豆日日躁夜夜躁| 91精品婷婷国产综合久久竹菊| 欧美一区二区私人影院日本| 欧美一区二区在线免费播放 | 成人国产精品免费观看| 99综合影院在线| 在线中文字幕一区| 欧美日韩精品专区| 欧美一二区视频| 久久先锋影音av鲁色资源| 中文字幕欧美国产| 中文字幕亚洲区| 亚洲国产aⅴ成人精品无吗| 日韩高清不卡一区二区三区| 久久精品国内一区二区三区| 国产在线精品一区二区夜色 | 亚洲大片在线观看| 日韩国产精品久久久久久亚洲| 久久成人免费日本黄色| 高清av一区二区| 日本黄色一区二区| 日韩一区二区三区视频| 久久精品日产第一区二区三区高清版| 国产精品电影院| 日韩精品91亚洲二区在线观看| 国产呦萝稀缺另类资源| 91在线观看高清| 日韩午夜激情av| 国产精品久久久久久福利一牛影视| 亚洲成av人**亚洲成av**| 国产91精品一区二区麻豆网站| 在线观看视频91| 国产日韩欧美电影| 午夜电影久久久| 99精品桃花视频在线观看| 欧美日韩在线亚洲一区蜜芽| 国产精品丝袜黑色高跟| 日韩精彩视频在线观看| 岛国精品在线观看| 精品视频1区2区|