亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? bftree.java

?? Weka
?? JAVA
?? 第 1 頁 / 共 5 頁
字號:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* * BFTree.java * Copyright (C) 2007 Haijian Shi * */package weka.classifiers.trees;import weka.classifiers.Evaluation;import weka.classifiers.RandomizableClassifier;import weka.core.AdditionalMeasureProducer;import weka.core.Attribute;import weka.core.Capabilities;import weka.core.FastVector;import weka.core.Instance;import weka.core.Instances;import weka.core.Option;import weka.core.SelectedTag;import weka.core.Tag;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import weka.core.matrix.Matrix;import java.util.Arrays;import java.util.Enumeration;import java.util.Random;import java.util.Vector;/** <!-- globalinfo-start --> * Class for building a best-first decision tree classifier. This class uses binary split for both nominal and numeric attributes. For missing values, the method of 'fractional' instances is used.<br/> * <br/> * For more information, see:<br/> * <br/> * Haijian Shi (2007). Best-first decision tree learning. Hamilton, NZ.<br/> * <br/> * Jerome Friedman, Trevor Hastie, Robert Tibshirani (2000). Additive logistic regression : A statistical view of boosting. Annals of statistics. 28(2):337-407. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;mastersthesis{Shi2007, *    address = {Hamilton, NZ}, *    author = {Haijian Shi}, *    note = {COMP594}, *    school = {University of Waikato}, *    title = {Best-first decision tree learning}, *    year = {2007} * } *  * &#64;article{Friedman2000, *    author = {Jerome Friedman and Trevor Hastie and Robert Tibshirani}, *    journal = {Annals of statistics}, *    number = {2}, *    pages = {337-407}, *    title = {Additive logistic regression : A statistical view of boosting}, *    volume = {28}, *    year = {2000}, *    ISSN = {0090-5364} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -S &lt;num&gt; *  Random number seed. *  (default 1)</pre> *  * <pre> -D *  If set, classifier is run in debug mode and *  may output additional info to the console</pre> *  * <pre> -P &lt;UNPRUNED|POSTPRUNED|PREPRUNED&gt; *  The pruning strategy. *  (default: POSTPRUNED)</pre> *  * <pre> -M &lt;min no&gt; *  The minimal number of instances at the terminal nodes. *  (default 2)</pre> *  * <pre> -N &lt;num folds&gt; *  The number of folds used in the pruning. *  (default 5)</pre> *  * <pre> -H *  Don't use heuristic search for nominal attributes in multi-class *  problem (default yes). * </pre> *  * <pre> -G *  Don't use Gini index for splitting (default yes), *  if not information is used.</pre> *  * <pre> -R *  Don't use error rate in internal cross-validation (default yes),  *  but root mean squared error.</pre> *  * <pre> -A *  Use the 1 SE rule to make pruning decision. *  (default no).</pre> *  * <pre> -C *  Percentage of training data size (0-1] *  (default 1).</pre> *  <!-- options-end --> * * @author Haijian Shi (hs69@cs.waikato.ac.nz) * @version $Revision: 1.3 $ */public class BFTree  extends RandomizableClassifier  implements AdditionalMeasureProducer, TechnicalInformationHandler {  /** For serialization.	 */  private static final long serialVersionUID = -7035607375962528217L;  /** pruning strategy: un-pruned */  public static final int PRUNING_UNPRUNED = 0;  /** pruning strategy: post-pruning */  public static final int PRUNING_POSTPRUNING = 1;  /** pruning strategy: pre-pruning */  public static final int PRUNING_PREPRUNING = 2;  /** pruning strategy */  public static final Tag[] TAGS_PRUNING = {    new Tag(PRUNING_UNPRUNED, "unpruned", "Un-pruned"),    new Tag(PRUNING_POSTPRUNING, "postpruned", "Post-pruning"),    new Tag(PRUNING_PREPRUNING, "prepruned", "Pre-pruning")  };    /** the pruning strategy */  protected int m_PruningStrategy = PRUNING_POSTPRUNING;  /** Successor nodes. */  protected BFTree[] m_Successors;  /** Attribute used for splitting. */  protected Attribute m_Attribute;  /** Split point (for numeric attributes). */  protected double m_SplitValue;  /** Split subset (for nominal attributes). */  protected String m_SplitString;  /** Class value for a node. */  protected double m_ClassValue;  /** Class attribute of a dataset. */  protected Attribute m_ClassAttribute;  /** Minimum number of instances at leaf nodes. */  protected int m_minNumObj = 2;  /** Number of folds for the pruning. */  protected int m_numFoldsPruning = 5;  /** If the ndoe is leaf node. */  protected boolean m_isLeaf;  /** Number of expansions. */  protected static int m_Expansion;  /** Fixed number of expansions (if no pruning method is used, its value is -1. Otherwise,   *  its value is gotten from internal cross-validation).   */  protected int m_FixedExpansion = -1;  /** If use huristic search for binary split (default true). Note even if its value is true, it is only   * used when the number of values of a nominal attribute is larger than 4. */  protected boolean m_Heuristic = true;  /** If use Gini index as the splitting criterion - default (if not, information is used). */  protected boolean m_UseGini = true;  /** If use error rate in internal cross-validation to fix the number of expansions - default   *  (if not, root mean squared error is used). */  protected boolean m_UseErrorRate = true;  /** If use the 1SE rule to make the decision. */  protected boolean m_UseOneSE = false;  /** Class distributions.  */  protected double[] m_Distribution;  /** Branch proportions. */  protected double[] m_Props;  /** Sorted indices. */  protected int[][] m_SortedIndices;  /** Sorted weights. */  protected double[][] m_Weights;  /** Distributions of each attribute for two successor nodes. */  protected double[][][] m_Dists;  /** Class probabilities. */  protected double[] m_ClassProbs;  /** Total weights. */  protected double m_TotalWeight;  /** The training data size (0-1). Default 1. */  protected double m_SizePer = 1;  /**   * Returns a string describing classifier   *    * @return 		a description suitable for displaying in the    * 			explorer/experimenter gui   */  public String globalInfo() {    return          "Class for building a best-first decision tree classifier. "      + "This class uses binary split for both nominal and numeric attributes. "      + "For missing values, the method of 'fractional' instances is used.\n\n"      + "For more information, see:\n\n"      + getTechnicalInformation().toString();  }    /**   * Returns an instance of a TechnicalInformation object, containing    * detailed information about the technical background of this class,   * e.g., paper reference or book this class is based on.   *    * @return the technical information about this class   */  public TechnicalInformation getTechnicalInformation() {    TechnicalInformation 	result;    TechnicalInformation 	additional;        result = new TechnicalInformation(Type.MASTERSTHESIS);    result.setValue(Field.AUTHOR, "Haijian Shi");    result.setValue(Field.YEAR, "2007");    result.setValue(Field.TITLE, "Best-first decision tree learning");    result.setValue(Field.SCHOOL, "University of Waikato");    result.setValue(Field.ADDRESS, "Hamilton, NZ");    result.setValue(Field.NOTE, "COMP594");        additional = result.add(Type.ARTICLE);    additional.setValue(Field.AUTHOR, "Jerome Friedman and Trevor Hastie and Robert Tibshirani");    additional.setValue(Field.YEAR, "2000");    additional.setValue(Field.TITLE, "Additive logistic regression : A statistical view of boosting");    additional.setValue(Field.JOURNAL, "Annals of statistics");    additional.setValue(Field.VOLUME, "28");    additional.setValue(Field.NUMBER, "2");    additional.setValue(Field.PAGES, "337-407");    additional.setValue(Field.ISSN, "0090-5364");        return result;  }  /**   * Returns default capabilities of the classifier.   *    * @return 		the capabilities of this classifier   */  public Capabilities getCapabilities() {    Capabilities result = super.getCapabilities();    // attributes    result.enable(Capability.NOMINAL_ATTRIBUTES);    result.enable(Capability.NUMERIC_ATTRIBUTES);    result.enable(Capability.MISSING_VALUES);    // class    result.enable(Capability.NOMINAL_CLASS);    return result;  }  /**   * Method for building a BestFirst decision tree classifier.   *   * @param data 	set of instances serving as training data   * @throws Exception 	if decision tree cannot be built successfully   */  public void buildClassifier(Instances data) throws Exception {    getCapabilities().testWithFail(data);    data = new Instances(data);    data.deleteWithMissingClass();    // build an unpruned tree    if (m_PruningStrategy == PRUNING_UNPRUNED) {      // calculate sorted indices, weights and initial class probabilities      int[][] sortedIndices = new int[data.numAttributes()][0];      double[][] weights = new double[data.numAttributes()][0];      double[] classProbs = new double[data.numClasses()];      double totalWeight = computeSortedInfo(data,sortedIndices, weights,classProbs);      // Compute information of the best split for this node (include split attribute,      // split value and gini gain (or information gain)). At the same time, compute      // variables dists, props and totalSubsetWeights.      double[][][] dists = new double[data.numAttributes()][2][data.numClasses()];      double[][] props = new double[data.numAttributes()][2];      double[][] totalSubsetWeights = new double[data.numAttributes()][2];      FastVector nodeInfo = computeSplitInfo(this, data, sortedIndices, weights, dists,	  props, totalSubsetWeights, m_Heuristic, m_UseGini);      // add the node (with all split info) into BestFirstElements      FastVector BestFirstElements = new FastVector();      BestFirstElements.addElement(nodeInfo);      // Make the best-first decision tree.      int attIndex = ((Attribute)nodeInfo.elementAt(1)).index();      m_Expansion = 0;      makeTree(BestFirstElements, data, sortedIndices, weights, dists, classProbs,	  totalWeight, props[attIndex] ,m_minNumObj, m_Heuristic, m_UseGini, m_FixedExpansion);      return;    }    // the following code is for pre-pruning and post-pruning methods    // Compute train data, test data, sorted indices, sorted weights, total weights,    // class probabilities, class distributions, branch proportions and total subset    // weights for root nodes of each fold for prepruning and postpruning.    int expansion = 0;    Random random = new Random(m_Seed);    Instances cvData = new Instances(data);    cvData.randomize(random);    cvData = new Instances(cvData,0,(int)(cvData.numInstances()*m_SizePer)-1);    cvData.stratify(m_numFoldsPruning);    Instances[] train = new Instances[m_numFoldsPruning];    Instances[] test = new Instances[m_numFoldsPruning];    FastVector[] parallelBFElements = new FastVector [m_numFoldsPruning];    BFTree[] m_roots = new BFTree[m_numFoldsPruning];    int[][][] sortedIndices = new int[m_numFoldsPruning][data.numAttributes()][0];    double[][][] weights = new double[m_numFoldsPruning][data.numAttributes()][0];    double[][] classProbs = new double[m_numFoldsPruning][data.numClasses()];    double[] totalWeight = new double[m_numFoldsPruning];    double[][][][] dists =      new double[m_numFoldsPruning][data.numAttributes()][2][data.numClasses()];    double[][][] props =      new double[m_numFoldsPruning][data.numAttributes()][2];    double[][][] totalSubsetWeights =      new double[m_numFoldsPruning][data.numAttributes()][2];    FastVector[] nodeInfo = new FastVector[m_numFoldsPruning];

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
www.久久精品| 欧美系列亚洲系列| 天使萌一区二区三区免费观看| 精品国产乱码久久久久久久| 欧美性大战久久久久久久蜜臀| 加勒比av一区二区| 亚洲一卡二卡三卡四卡五卡| 久久久久久久久97黄色工厂| 在线不卡a资源高清| 日本乱码高清不卡字幕| 大胆欧美人体老妇| 韩国中文字幕2020精品| 偷拍一区二区三区| 亚洲欧美日韩国产另类专区| 欧美激情一区二区三区不卡| 久久夜色精品国产欧美乱极品| 欧美一区欧美二区| 欧美日本一道本在线视频| 色综合久久99| 99re6这里只有精品视频在线观看| 国产麻豆午夜三级精品| 麻豆久久久久久久| 视频在线观看一区二区三区| 亚洲一区国产视频| 亚洲综合色噜噜狠狠| 亚洲手机成人高清视频| 国产精品免费丝袜| 亚洲国产经典视频| 中文字幕乱码日本亚洲一区二区| 精品久久久久久久人人人人传媒| 日韩欧美国产wwwww| 91麻豆精品国产91久久久久久| 欧美丝袜丝交足nylons图片| 在线视频欧美精品| 欧美日韩中文字幕精品| 欧美三级韩国三级日本一级| 欧美日韩专区在线| 91精品国产麻豆| 日韩午夜激情免费电影| 欧美成va人片在线观看| 精品久久一区二区| 久久久精品国产免大香伊| 国产色一区二区| 亚洲国产成人私人影院tom| 欧美激情一区二区三区在线| 国产精品久久久久婷婷二区次| 国产精品久久三| 亚洲手机成人高清视频| 亚洲综合区在线| 亚洲国产精品精华液网站| 亚洲h动漫在线| 美女久久久精品| 国产在线麻豆精品观看| 成人一区在线看| 97久久超碰国产精品电影| 在线观看免费成人| 777奇米成人网| 久久综合精品国产一区二区三区 | 亚洲免费观看在线视频| 亚洲精品第1页| 日韩二区在线观看| 国产一区二区三区综合| av亚洲精华国产精华精华| 欧美在线一区二区| 91麻豆精品国产91久久久 | 91.成人天堂一区| 精品国产91久久久久久久妲己| 国产日韩精品一区| 亚洲六月丁香色婷婷综合久久 | 一区二区三区电影在线播| 视频在线观看一区| 国产成人自拍网| 欧美三区免费完整视频在线观看| 欧美电影免费观看高清完整版在| 欧美激情在线看| 日韩精品欧美精品| 国产福利91精品| 欧美三级在线视频| 国产亚洲美州欧州综合国| 亚洲一区二区三区视频在线播放 | 久久欧美中文字幕| 一区二区三区在线免费播放| 美女视频黄a大片欧美| 91亚洲精品久久久蜜桃网站| 日韩一级免费一区| 亚洲欧美另类图片小说| 精品一区中文字幕| 91久久精品网| 精品国偷自产国产一区| 亚洲一区二区三区影院| 国产精品 欧美精品| 91在线视频在线| 2023国产精品自拍| 夜夜嗨av一区二区三区中文字幕 | 精品中文av资源站在线观看| 国产乱码精品一区二区三区忘忧草| 91啪九色porn原创视频在线观看| 制服.丝袜.亚洲.另类.中文| 国产精品无圣光一区二区| 日本网站在线观看一区二区三区| 不卡影院免费观看| 日韩欧美高清一区| 一区二区三区蜜桃网| 国产裸体歌舞团一区二区| 欧美精品欧美精品系列| 国产精品国产三级国产a| 久久国产精品区| 91欧美激情一区二区三区成人| 久久66热偷产精品| 成人avav影音| 精品少妇一区二区三区在线播放 | 国产成人高清在线| 欧美一区二区三区在| 亚洲国产精品麻豆| 色综合久久中文综合久久牛| www国产亚洲精品久久麻豆| 五月婷婷另类国产| 91片黄在线观看| 中文字幕精品一区二区精品绿巨人 | 欧美一区二区三区播放老司机| 欧美日韩成人综合在线一区二区| 国产精品久久久久影视| 国产成人精品亚洲777人妖| 日韩精品在线看片z| 日韩av电影天堂| 欧美日韩国产天堂| 日韩理论片中文av| 日韩vs国产vs欧美| 欧美日韩精品综合在线| 亚洲一区在线观看免费观看电影高清 | 成人av资源站| 中文字幕av不卡| 大胆欧美人体老妇| 中文字幕国产一区二区| 丁香天五香天堂综合| 国产精品三级久久久久三级| 成人一区二区三区视频| 欧美国产一区二区| 国产激情一区二区三区四区 | 欧美成人r级一区二区三区| 日本免费新一区视频| 日韩欧美色综合| 黄色精品一二区| 国产日韩一级二级三级| 丁香一区二区三区| 中文字幕日韩精品一区| 色综合久久久久综合| 亚洲高清免费一级二级三级| 欧美高清hd18日本| 久久成人羞羞网站| 久久久不卡影院| jizz一区二区| 亚洲最新视频在线观看| 欧美日韩国产不卡| 看国产成人h片视频| 久久精品日韩一区二区三区| 成人午夜又粗又硬又大| 日韩理论电影院| 欧美色图一区二区三区| 三级精品在线观看| 久久综合九色综合97婷婷| av不卡免费在线观看| 亚洲国产欧美在线人成| 精品国产污污免费网站入口| 国产一二精品视频| 自拍av一区二区三区| 欧美日韩精品一二三区| 国内精品视频一区二区三区八戒| 日本一区二区三区久久久久久久久不| 波多野结衣亚洲一区| 亚洲视频网在线直播| 欧美日韩综合在线免费观看| 国产一区在线看| 一区二区久久久久久| 日韩小视频在线观看专区| av在线播放成人| 青青草97国产精品免费观看无弹窗版| 国产欧美中文在线| 欧美电影在哪看比较好| av电影在线观看不卡| 五月婷婷激情综合| 亚洲国产精品二十页| 欧美精品自拍偷拍| 欧美亚洲综合网| 九一久久久久久| 亚洲黄色片在线观看| 精品国产一区二区三区不卡| 波多野结衣中文字幕一区二区三区 | 99视频一区二区| 日韩福利电影在线| 《视频一区视频二区| 日韩欧美国产午夜精品| 在线观看亚洲成人| 国产白丝网站精品污在线入口| 夜夜爽夜夜爽精品视频| 国产亚洲成av人在线观看导航| 欧美日韩成人一区二区| 国产精品亚洲人在线观看| 亚洲成a人片综合在线| 国产欧美精品区一区二区三区|