亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? lmt.java

?? JAVA的一個程序
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    LMT.java *    Copyright (C) 2003 Niels Landwehr * */package weka.classifiers.trees;import weka.classifiers.Classifier;import weka.classifiers.Evaluation;import weka.classifiers.trees.j48.C45ModelSelection;import weka.classifiers.trees.j48.ModelSelection;import weka.classifiers.trees.lmt.LMTNode;import weka.classifiers.trees.lmt.ResidualModelSelection;import weka.core.AdditionalMeasureProducer;import weka.core.Capabilities;import weka.core.Drawable;import weka.core.Instance;import weka.core.Instances;import weka.core.Option;import weka.core.OptionHandler;import weka.core.TechnicalInformation;import weka.core.TechnicalInformation.Type;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.Capabilities.Capability;import weka.filters.Filter;import weka.filters.supervised.attribute.NominalToBinary;import weka.filters.unsupervised.attribute.ReplaceMissingValues;import java.util.Enumeration;import java.util.Vector;/** <!-- globalinfo-start --> * Classifier for building 'logistic model trees', which are classification trees with logistic regression functions at the leaves. The algorithm can deal with binary and multi-class target variables, numeric and nominal attributes and missing values.<br/> * <br/> * For more information see: <br/> * <br/> * Niels Landwehr, Mark Hall, Eibe Frank (2005). Logistic Model Trees.<br/> * <br/> * Marc Sumner, Eibe Frank, Mark Hall: Speeding up Logistic Model Tree Induction. In: 9th European Conference on Principles and Practice of Knowledge Discovery in Databases, 675-683, 2005. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;article{Landwehr2005, *    author = {Niels Landwehr and Mark Hall and Eibe Frank}, *    booktitle = {Machine Learning}, *    number = {1-2}, *    pages = {161-205}, *    title = {Logistic Model Trees}, *    volume = {95}, *    year = {2005} * } *  * &#64;inproceedings{Sumner2005, *    author = {Marc Sumner and Eibe Frank and Mark Hall}, *    booktitle = {9th European Conference on Principles and Practice of Knowledge Discovery in Databases}, *    pages = {675-683}, *    publisher = {Springer}, *    title = {Speeding up Logistic Model Tree Induction}, *    year = {2005} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -B *  Binary splits (convert nominal attributes to binary ones)</pre> *  * <pre> -R *  Split on residuals instead of class values</pre> *  * <pre> -C *  Use cross-validation for boosting at all nodes (i.e., disable heuristic)</pre> *  * <pre> -P *  Use error on probabilities instead of misclassification error for stopping criterion of LogitBoost.</pre> *  * <pre> -I &lt;numIterations&gt; *  Set fixed number of iterations for LogitBoost (instead of using cross-validation)</pre> *  * <pre> -M &lt;numInstances&gt; *  Set minimum number of instances at which a node can be split (default 15)</pre> *  * <pre> -W &lt;beta&gt; *  Set beta for weight trimming for LogitBoost. Set to 0 (default) for no weight trimming.</pre> *  * <pre> -A *  The AIC is used to choose the best iteration.</pre> *  <!-- options-end --> * * @author Niels Landwehr  * @author Marc Sumner  * @version $Revision: 1.8 $ */public class LMT   extends Classifier   implements OptionHandler, AdditionalMeasureProducer, Drawable,             TechnicalInformationHandler {      /** for serialization */  static final long serialVersionUID = -1113212459618104943L;    /** Filter to replace missing values*/  protected ReplaceMissingValues m_replaceMissing;      /** Filter to replace nominal attributes*/  protected NominalToBinary m_nominalToBinary;      /** root of the logistic model tree*/  protected LMTNode m_tree;      /** use heuristic that determines the number of LogitBoost iterations only once in the beginning?*/  protected boolean m_fastRegression;  /** convert nominal attributes to binary ?*/  protected boolean m_convertNominal;  /** split on residuals?*/  protected boolean m_splitOnResiduals;      /**use error on probabilties instead of misclassification for stopping criterion of LogitBoost?*/  protected boolean m_errorOnProbabilities;  /**minimum number of instances at which a node is considered for splitting*/  protected int m_minNumInstances;  /**if non-zero, use fixed number of iterations for LogitBoost*/  protected int m_numBoostingIterations;      /**Threshold for trimming weights. Instances with a weight lower than this (as a percentage   * of total weights) are not included in the regression fit.   **/  protected double m_weightTrimBeta;    /** If true, the AIC is used to choose the best LogitBoost iteration*/  private boolean m_useAIC = false;    /**   * Creates an instance of LMT with standard options   */  public LMT() {    m_fastRegression = true;    m_numBoostingIterations = -1;    m_minNumInstances = 15;    m_weightTrimBeta = 0;    m_useAIC = false;  }      /**   * Returns default capabilities of the classifier.   *   * @return      the capabilities of this classifier   */  public Capabilities getCapabilities() {    Capabilities result = super.getCapabilities();    // attributes    result.enable(Capability.NOMINAL_ATTRIBUTES);    result.enable(Capability.NUMERIC_ATTRIBUTES);    result.enable(Capability.DATE_ATTRIBUTES);    result.enable(Capability.MISSING_VALUES);    // class    result.enable(Capability.NOMINAL_CLASS);    result.enable(Capability.MISSING_CLASS_VALUES);        return result;  }  /**   * Builds the classifier.   *   * @param data the data to train with   * @throws Exception if classifier can't be built successfully   */  public void buildClassifier(Instances data) throws Exception{	    // can classifier handle the data?    getCapabilities().testWithFail(data);    // remove instances with missing class    Instances filteredData = new Instances(data);    filteredData.deleteWithMissingClass();        //replace missing values    m_replaceMissing = new ReplaceMissingValues();    m_replaceMissing.setInputFormat(filteredData);	    filteredData = Filter.useFilter(filteredData, m_replaceMissing);		    //possibly convert nominal attributes globally    if (m_convertNominal) {	          m_nominalToBinary = new NominalToBinary();      m_nominalToBinary.setInputFormat(filteredData);	      filteredData = Filter.useFilter(filteredData, m_nominalToBinary);    }    int minNumInstances = 2;	    //create ModelSelection object, either for splits on the residuals or for splits on the class value     ModelSelection modSelection;	    if (m_splitOnResiduals) {      modSelection = new ResidualModelSelection(minNumInstances);    } else {      modSelection = new C45ModelSelection(minNumInstances, filteredData);    }	    //create tree root    m_tree = new LMTNode(modSelection, m_numBoostingIterations, m_fastRegression, 			 m_errorOnProbabilities, m_minNumInstances, m_weightTrimBeta, m_useAIC);    //build tree    m_tree.buildClassifier(filteredData);    if (modSelection instanceof C45ModelSelection) ((C45ModelSelection)modSelection).cleanup();  }  /**    * Returns class probabilities for an instance.   *   * @param instance the instance to compute the distribution for   * @return the class probabilities   * @throws Exception if distribution can't be computed successfully   */  public double [] distributionForInstance(Instance instance) throws Exception {	    //replace missing values    m_replaceMissing.input(instance);    instance = m_replaceMissing.output();		    //possibly convert nominal attributes    if (m_convertNominal) {      m_nominalToBinary.input(instance);      instance = m_nominalToBinary.output();    }	    return m_tree.distributionForInstance(instance);  }  /**   * Classifies an instance.   *   * @param instance the instance to classify   * @return the classification   * @throws Exception if instance can't be classified successfully   */  public double classifyInstance(Instance instance) throws Exception {    double maxProb = -1;    int maxIndex = 0;          //classify by maximum probability    double[] probs = distributionForInstance(instance);           for (int j = 0; j < instance.numClasses(); j++) {      if (Utils.gr(probs[j], maxProb)) {	maxIndex = j;	maxProb = probs[j];      }    }         return (double)maxIndex;        }           /**   * Returns a description of the classifier.   *    * @return a string representation of the classifier   */  public String toString() {    if (m_tree!=null) {      return "Logistic model tree \n------------------\n" + m_tree.toString();    } else {      return "No tree build";    }  }          /**   * Returns an enumeration describing the available options.   *   * @return an enumeration of all the available options.   */  public Enumeration listOptions() {    Vector newVector = new Vector(8);        newVector.addElement(new Option("\tBinary splits (convert nominal attributes to binary ones)",                                    "B", 0, "-B"));        newVector.addElement(new Option("\tSplit on residuals instead of class values",                                    "R", 0, "-R"));        newVector.addElement(new Option("\tUse cross-validation for boosting at all nodes (i.e., disable heuristic)",                                    "C", 0, "-C"));        newVector.addElement(new Option("\tUse error on probabilities instead of misclassification error "+                                    "for stopping criterion of LogitBoost.",                                    "P", 0, "-P"));        newVector.addElement(new Option("\tSet fixed number of iterations for LogitBoost (instead of using "+                                    "cross-validation)",                                    "I",1,"-I <numIterations>"));        newVector.addElement(new Option("\tSet minimum number of instances at which a node can be split (default 15)",                                    "M",1,"-M <numInstances>"));        newVector.addElement(new Option("\tSet beta for weight trimming for LogitBoost. Set to 0 (default) for no weight trimming.",                                    "W",1,"-W <beta>"));        newVector.addElement(new Option("\tThe AIC is used to choose the best iteration.",                                    "A", 0, "-A"));        return newVector.elements();  }      /**   * Parses a given list of options. <p/>   *    <!-- options-start -->   * Valid options are: <p/>   *    * <pre> -B   *  Binary splits (convert nominal attributes to binary ones)</pre>   *    * <pre> -R   *  Split on residuals instead of class values</pre>   *    * <pre> -C   *  Use cross-validation for boosting at all nodes (i.e., disable heuristic)</pre>   *    * <pre> -P   *  Use error on probabilities instead of misclassification error for stopping criterion of LogitBoost.</pre>   *    * <pre> -I &lt;numIterations&gt;   *  Set fixed number of iterations for LogitBoost (instead of using cross-validation)</pre>   *    * <pre> -M &lt;numInstances&gt;   *  Set minimum number of instances at which a node can be split (default 15)</pre>   *    * <pre> -W &lt;beta&gt;   *  Set beta for weight trimming for LogitBoost. Set to 0 (default) for no weight trimming.</pre>   *    * <pre> -A   *  The AIC is used to choose the best iteration.</pre>   *    <!-- options-end -->   *   * @param options the list of options as an array of strings   * @throws Exception if an option is not supported   */  public void setOptions(String[] options) throws Exception {    setConvertNominal(Utils.getFlag('B', options));    setSplitOnResiduals(Utils.getFlag('R', options));    setFastRegression(!Utils.getFlag('C', options));    setErrorOnProbabilities(Utils.getFlag('P', options));    String optionString = Utils.getOption('I', options);    if (optionString.length() != 0) {      setNumBoostingIterations((new Integer(optionString)).intValue());    }	    optionString = Utils.getOption('M', options);    if (optionString.length() != 0) {      setMinNumInstances((new Integer(optionString)).intValue());    }    optionString = Utils.getOption('W', options);    if (optionString.length() != 0) {      setWeightTrimBeta((new Double(optionString)).doubleValue());    }        setUseAIC(Utils.getFlag('A', options));                Utils.checkForRemainingOptions(options);	  }       /**

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
日韩欧美成人午夜| 成人黄色一级视频| 国产一区二区三区不卡在线观看 | eeuss国产一区二区三区| 91在线观看污| 在线成人av网站| 国产欧美日韩精品在线| 亚洲激情在线激情| 麻豆91在线播放免费| 9i看片成人免费高清| 欧美日韩一级片在线观看| 久久中文娱乐网| 亚洲精品免费播放| 久久97超碰色| 色av一区二区| 亚洲精品一区二区在线观看| 亚洲欧美日韩系列| 国内一区二区在线| 欧美中文字幕一区二区三区亚洲| 日韩欧美专区在线| 一区视频在线播放| 美女一区二区在线观看| 91蜜桃在线观看| 精品精品国产高清一毛片一天堂| 亚洲欧美视频一区| 国产综合久久久久久鬼色 | 亚洲激情在线激情| 国产一区激情在线| 欧美日韩国产色站一区二区三区| 国产欧美日韩精品一区| 香蕉久久夜色精品国产使用方法 | 亚洲视频资源在线| 久久成人免费电影| 欧美伊人久久久久久久久影院 | 99久久99久久综合| 2020国产精品久久精品美国| 亚洲成人资源网| 成人av影院在线| 精品久久一区二区三区| 亚洲va中文字幕| 91福利在线观看| 国产清纯白嫩初高生在线观看91| 奇米四色…亚洲| 欧美日韩免费电影| 亚洲桃色在线一区| 成人教育av在线| 久久久久久黄色| 久久精品国产99久久6| 欧美日本在线视频| 樱花影视一区二区| 91色视频在线| 国产精品乱人伦一区二区| 国精品**一区二区三区在线蜜桃| 91精品在线麻豆| 亚洲成人一区二区| 欧美亚洲一区二区在线观看| 日韩一区在线播放| 成人av网站免费观看| 国产日韩欧美麻豆| 精品一区二区日韩| 欧美成人精精品一区二区频| 日本在线不卡一区| 欧美一级欧美三级在线观看| 亚洲第四色夜色| 欧美巨大另类极品videosbest| 亚洲人成网站精品片在线观看| 成人h版在线观看| 国产精品久久久久桃色tv| 成人精品小蝌蚪| 欧美激情艳妇裸体舞| 成人黄色免费短视频| 欧美国产成人在线| 99九九99九九九视频精品| 中文字幕在线不卡| 99九九99九九九视频精品| 亚洲天堂精品视频| 欧美私模裸体表演在线观看| 亚洲高清中文字幕| 欧美一区中文字幕| 久久精品国产久精国产| 久久品道一品道久久精品| 国产乱码精品一区二区三 | 成人一区二区三区视频在线观看 | 97精品久久久午夜一区二区三区| 国产精品欧美久久久久一区二区| 成人黄页在线观看| 亚洲黄色片在线观看| 欧美吻胸吃奶大尺度电影| 亚洲成人av电影| 欧美一二三在线| 国产福利91精品一区| 国产精品福利一区二区三区| 波多野结衣中文字幕一区| 日韩毛片一二三区| 欧美日韩国产高清一区二区三区| 日本不卡视频在线| 国产日韩v精品一区二区| 99精品国产99久久久久久白柏| 一区二区在线观看av| 欧美日本在线播放| 精品在线你懂的| 国产精品入口麻豆原神| 在线观看三级视频欧美| 轻轻草成人在线| 久久精品亚洲国产奇米99| 成人av电影在线播放| 亚洲成年人影院| 久久久天堂av| 日本韩国一区二区| 老司机一区二区| 1区2区3区精品视频| 欧美日产国产精品| 国产激情视频一区二区三区欧美| 中文字幕一区二区三| 欧美麻豆精品久久久久久| 国产一本一道久久香蕉| 亚洲美女免费视频| 日韩欧美国产午夜精品| jlzzjlzz欧美大全| 日韩二区三区在线观看| 国产精品理伦片| 日韩视频免费直播| 成人自拍视频在线观看| 亚洲成人黄色影院| 国产欧美一区二区三区鸳鸯浴| 在线观看av一区二区| 国产一区二区三区免费观看| 亚洲一区在线视频| 久久精品视频一区二区三区| 欧美日韩在线精品一区二区三区激情| 精品一区二区三区视频| 亚洲欧美激情视频在线观看一区二区三区| 欧美一区二区三区在线观看| 99国产欧美久久久精品| 久久国产精品99精品国产| 亚洲三级免费电影| 日韩欧美国产一区二区三区 | 亚洲最色的网站| 2欧美一区二区三区在线观看视频| av电影天堂一区二区在线| 日韩高清一区在线| 亚洲欧洲99久久| 26uuu成人网一区二区三区| 欧美视频中文一区二区三区在线观看| 国产成人在线观看| 日日摸夜夜添夜夜添精品视频| 综合电影一区二区三区| 久久人人超碰精品| 欧美理论电影在线| 99re视频精品| 国产传媒日韩欧美成人| 奇米888四色在线精品| 亚洲一二三四在线观看| 中文字幕国产一区二区| 欧美大片日本大片免费观看| 欧美精品一二三四| 91在线无精精品入口| 国产成人在线视频网址| 国产在线播放一区三区四| 日本免费新一区视频| 国产一区日韩二区欧美三区| 免费黄网站欧美| 日本不卡视频一二三区| 亚洲高清免费视频| 亚洲午夜影视影院在线观看| 日韩理论电影院| 亚洲女同一区二区| 自拍偷拍欧美精品| 中文欧美字幕免费| 久久久精品一品道一区| 久久综合久久综合亚洲| 日韩欧美亚洲一区二区| 欧美精品色综合| 欧美一区三区四区| 欧美久久一二区| 欧美一区二区私人影院日本| 91精品国模一区二区三区| 欧美精品777| 欧美一区二区在线播放| 欧美一区欧美二区| 日韩午夜在线影院| 日韩一区二区三区视频在线观看 | 另类成人小视频在线| 日韩不卡在线观看日韩不卡视频| 午夜精品久久久久影视| 亚洲gay无套男同| 日韩在线一区二区| 天天亚洲美女在线视频| 蜜乳av一区二区| 久久国产精品第一页| 国产一区二区三区在线观看精品 | 国产日产亚洲精品系列| 中文字幕欧美激情| 国产精品美女视频| 亚洲人成在线播放网站岛国| 亚洲免费在线电影| 水野朝阳av一区二区三区| 久久精品国产精品亚洲精品 | 国产亚洲自拍一区| 日本一区二区三区高清不卡|