亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來到蟲蟲下載站! | ?? 資源下載 ?? 資源專輯 ?? 關于我們
? 蟲蟲下載站

?? simplelogistic.java

?? Weka
?? JAVA
?? 第 1 頁 / 共 2 頁
字號:
/* *    This program is free software; you can redistribute it and/or modify *    it under the terms of the GNU General Public License as published by *    the Free Software Foundation; either version 2 of the License, or *    (at your option) any later version. * *    This program is distributed in the hope that it will be useful, *    but WITHOUT ANY WARRANTY; without even the implied warranty of *    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the *    GNU General Public License for more details. * *    You should have received a copy of the GNU General Public License *    along with this program; if not, write to the Free Software *    Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. *//* *    SimpleLogistic.java *    Copyright (C) 2003 University of Waikato, Hamilton, New Zealand * */package weka.classifiers.functions;import weka.classifiers.Classifier;import weka.classifiers.trees.lmt.LogisticBase;import weka.core.AdditionalMeasureProducer;import weka.core.Capabilities;import weka.core.Instance;import weka.core.Instances;import weka.core.Option;import weka.core.OptionHandler;import weka.core.TechnicalInformation;import weka.core.TechnicalInformationHandler;import weka.core.Utils;import weka.core.WeightedInstancesHandler;import weka.core.Capabilities.Capability;import weka.core.TechnicalInformation.Field;import weka.core.TechnicalInformation.Type;import weka.filters.Filter;import weka.filters.unsupervised.attribute.NominalToBinary;import weka.filters.unsupervised.attribute.ReplaceMissingValues;import java.util.Enumeration;import java.util.Vector;/** <!-- globalinfo-start --> * Classifier for building linear logistic regression models. LogitBoost with simple regression functions as base learners is used for fitting the logistic models. The optimal number of LogitBoost iterations to perform is cross-validated, which leads to automatic attribute selection. For more information see:<br/> * Niels Landwehr, Mark Hall, Eibe Frank (2005). Logistic Model Trees.<br/> * <br/> * Marc Sumner, Eibe Frank, Mark Hall: Speeding up Logistic Model Tree Induction. In: 9th European Conference on Principles and Practice of Knowledge Discovery in Databases, 675-683, 2005. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;article{Landwehr2005, *    author = {Niels Landwehr and Mark Hall and Eibe Frank}, *    booktitle = {Machine Learning}, *    number = {1-2}, *    pages = {161-205}, *    title = {Logistic Model Trees}, *    volume = {95}, *    year = {2005} * } *  * &#64;inproceedings{Sumner2005, *    author = {Marc Sumner and Eibe Frank and Mark Hall}, *    booktitle = {9th European Conference on Principles and Practice of Knowledge Discovery in Databases}, *    pages = {675-683}, *    publisher = {Springer}, *    title = {Speeding up Logistic Model Tree Induction}, *    year = {2005} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> *  * <pre> -I &lt;iterations&gt; *  Set fixed number of iterations for LogitBoost</pre> *  * <pre> -S *  Use stopping criterion on training set (instead of *  cross-validation)</pre> *  * <pre> -P *  Use error on probabilities (rmse) instead of *  misclassification error for stopping criterion</pre> *  * <pre> -M &lt;iterations&gt; *  Set maximum number of boosting iterations</pre> *  * <pre> -H &lt;iterations&gt; *  Set parameter for heuristic for early stopping of *  LogitBoost. *  If enabled, the minimum is selected greedily, stopping *  if the current minimum has not changed for iter iterations. *  By default, heuristic is enabled with value 50. Set to *  zero to disable heuristic.</pre> *  * <pre> -W &lt;beta&gt; *  Set beta for weight trimming for LogitBoost. Set to 0 for no weight trimming. * </pre> *  * <pre> -A *  The AIC is used to choose the best iteration (instead of CV or training error). * </pre> *  <!-- options-end --> * * @author Niels Landwehr  * @author Marc Sumner  * @version $Revision: 1.12 $ */public class SimpleLogistic   extends Classifier   implements OptionHandler, AdditionalMeasureProducer, WeightedInstancesHandler,             TechnicalInformationHandler {    /** for serialization */    static final long serialVersionUID = 7397710626304705059L;      /**The actual logistic regression model */    protected LogisticBase m_boostedModel;        /**Filter for converting nominal attributes to binary ones*/    protected NominalToBinary m_NominalToBinary = null;    /**Filter for replacing missing values*/    protected ReplaceMissingValues m_ReplaceMissingValues = null;        /**If non-negative, use this as fixed number of LogitBoost iterations*/     protected int m_numBoostingIterations;        /**Maximum number of iterations for LogitBoost*/    protected int m_maxBoostingIterations = 500;        /**Parameter for the heuristic for early stopping of LogitBoost*/    protected int m_heuristicStop = 50;    /**If true, cross-validate number of LogitBoost iterations*/    protected boolean m_useCrossValidation;    /**If true, use minimize error on probabilities instead of misclassification error*/    protected boolean m_errorOnProbabilities;        /**Threshold for trimming weights. Instances with a weight lower than this (as a percentage     * of total weights) are not included in the regression fit.     */    protected double m_weightTrimBeta = 0;        /** If true, the AIC is used to choose the best iteration*/    private boolean m_useAIC = false;    /**     * Constructor for creating SimpleLogistic object with standard options.     */    public SimpleLogistic() {	m_numBoostingIterations = 0;	m_useCrossValidation = true;	m_errorOnProbabilities = false;        m_weightTrimBeta = 0;        m_useAIC = false;    }    /**     * Constructor for creating SimpleLogistic object.     * @param numBoostingIterations if non-negative, use this as fixed number of iterations for LogitBoost     * @param useCrossValidation cross-validate number of LogitBoost iterations.     * @param errorOnProbabilities minimize error on probabilities instead of misclassification error     */    public SimpleLogistic(int numBoostingIterations, boolean useCrossValidation, 			      boolean errorOnProbabilities) {   	m_numBoostingIterations = numBoostingIterations;	m_useCrossValidation = useCrossValidation;	m_errorOnProbabilities = errorOnProbabilities;        m_weightTrimBeta = 0;        m_useAIC = false;    }    /**     * Returns default capabilities of the classifier.     *     * @return      the capabilities of this classifier     */    public Capabilities getCapabilities() {      Capabilities result = super.getCapabilities();      // attributes      result.enable(Capability.NOMINAL_ATTRIBUTES);      result.enable(Capability.NUMERIC_ATTRIBUTES);      result.enable(Capability.DATE_ATTRIBUTES);      result.enable(Capability.MISSING_VALUES);      // class      result.enable(Capability.NOMINAL_CLASS);      result.enable(Capability.MISSING_CLASS_VALUES);            return result;    }    /**     * Builds the logistic regression using LogitBoost.     * @param data the training data     * @throws Exception if something goes wrong      */    public void buildClassifier(Instances data) throws Exception {      // can classifier handle the data?      getCapabilities().testWithFail(data);      // remove instances with missing class      data = new Instances(data);      data.deleteWithMissingClass();	//replace missing values	m_ReplaceMissingValues = new ReplaceMissingValues();	m_ReplaceMissingValues.setInputFormat(data);	data = Filter.useFilter(data, m_ReplaceMissingValues);		//convert nominal attributes	m_NominalToBinary = new NominalToBinary();	m_NominalToBinary.setInputFormat(data);	data = Filter.useFilter(data, m_NominalToBinary);		//create actual logistic model	m_boostedModel = new LogisticBase(m_numBoostingIterations, m_useCrossValidation, m_errorOnProbabilities);	m_boostedModel.setMaxIterations(m_maxBoostingIterations);	m_boostedModel.setHeuristicStop(m_heuristicStop);        m_boostedModel.setWeightTrimBeta(m_weightTrimBeta);        m_boostedModel.setUseAIC(m_useAIC);		//build logistic model	m_boostedModel.buildClassifier(data);    }        /**      * Returns class probabilities for an instance.     *     * @param inst the instance to compute the probabilities for     * @return the probabilities     * @throws Exception if distribution can't be computed successfully     */    public double[] distributionForInstance(Instance inst) 	throws Exception {		//replace missing values / convert nominal atts	m_ReplaceMissingValues.input(inst);	inst = m_ReplaceMissingValues.output();	m_NominalToBinary.input(inst);	inst = m_NominalToBinary.output();			//obtain probs from logistic model	return m_boostedModel.distributionForInstance(inst);	    }    /**     * Returns an enumeration describing the available options.     *     * @return an enumeration of all the available options.     */    public Enumeration listOptions() {	Vector newVector = new Vector();		newVector.addElement(new Option(	    "\tSet fixed number of iterations for LogitBoost",	    "I",1,"-I <iterations>"));		newVector.addElement(new Option(	    "\tUse stopping criterion on training set (instead of\n"	    + "\tcross-validation)",	    "S",0,"-S"));		newVector.addElement(new Option(	    "\tUse error on probabilities (rmse) instead of\n"	    + "\tmisclassification error for stopping criterion",	    "P",0,"-P"));	newVector.addElement(new Option(	    "\tSet maximum number of boosting iterations",	    "M",1,"-M <iterations>"));	newVector.addElement(new Option(	    "\tSet parameter for heuristic for early stopping of\n"	    + "\tLogitBoost.\n"	    + "\tIf enabled, the minimum is selected greedily, stopping\n"	    + "\tif the current minimum has not changed for iter iterations.\n"	    + "\tBy default, heuristic is enabled with value 50. Set to\n"	    + "\tzero to disable heuristic.",	    "H",1,"-H <iterations>"));                newVector.addElement(new Option("\tSet beta for weight trimming for LogitBoost. Set to 0 for no weight trimming.\n",                                        "W",1,"-W <beta>"));                newVector.addElement(new Option("\tThe AIC is used to choose the best iteration (instead of CV or training error).\n",                                        "A", 0, "-A"));		return newVector.elements();    }         /**     * Parses a given list of options. <p/>     *     <!-- options-start -->     * Valid options are: <p/>     *      * <pre> -I &lt;iterations&gt;     *  Set fixed number of iterations for LogitBoost</pre>     *      * <pre> -S     *  Use stopping criterion on training set (instead of     *  cross-validation)</pre>     *      * <pre> -P     *  Use error on probabilities (rmse) instead of     *  misclassification error for stopping criterion</pre>     *      * <pre> -M &lt;iterations&gt;     *  Set maximum number of boosting iterations</pre>     *      * <pre> -H &lt;iterations&gt;     *  Set parameter for heuristic for early stopping of     *  LogitBoost.     *  If enabled, the minimum is selected greedily, stopping     *  if the current minimum has not changed for iter iterations.     *  By default, heuristic is enabled with value 50. Set to     *  zero to disable heuristic.</pre>     *      * <pre> -W &lt;beta&gt;     *  Set beta for weight trimming for LogitBoost. Set to 0 for no weight trimming.     * </pre>     *      * <pre> -A     *  The AIC is used to choose the best iteration (instead of CV or training error).     * </pre>     *      <!-- options-end -->     *     * @param options the list of options as an array of strings     * @throws Exception if an option is not supported     */    public void setOptions(String[] options) throws Exception {	String optionString = Utils.getOption('I', options);	if (optionString.length() != 0) {	    setNumBoostingIterations((new Integer(optionString)).intValue());	}			setUseCrossValidation(!Utils.getFlag('S', options));	setErrorOnProbabilities(Utils.getFlag('P', options));		optionString = Utils.getOption('M', options);	if (optionString.length() != 0) {	    setMaxBoostingIterations((new Integer(optionString)).intValue());	}	optionString = Utils.getOption('H', options);	if (optionString.length() != 0) {	    setHeuristicStop((new Integer(optionString)).intValue());	}                optionString = Utils.getOption('W', options);

?? 快捷鍵說明

復制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號 Ctrl + =
減小字號 Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
成人性生交大片免费看在线播放| av高清久久久| 91在线视频免费观看| 91精品国产丝袜白色高跟鞋| 国产精品久久久久毛片软件| 日韩av午夜在线观看| 色综合天天综合色综合av | 色婷婷av一区二区| 久久综合色婷婷| 亚洲mv在线观看| 99久久久无码国产精品| 久久―日本道色综合久久| 午夜在线电影亚洲一区| 色综合天天在线| 国产午夜精品一区二区三区四区| 日本aⅴ精品一区二区三区 | 久久国产精品99久久人人澡| 欧美在线观看视频在线| 亚洲精品国产精品乱码不99| 粉嫩嫩av羞羞动漫久久久| 日韩精品一区二区三区中文精品| 亚洲国产视频直播| 91久久免费观看| 亚洲欧洲国产日本综合| 成人97人人超碰人人99| 国产欧美日韩在线| 国产福利精品一区| 久久影视一区二区| 国产麻豆精品一区二区| 精品国产髙清在线看国产毛片 | 欧美中文字幕久久| 国产精品国产自产拍在线| 成人激情黄色小说| 国产精品免费免费| 91亚洲精华国产精华精华液| 中文字幕免费不卡| 菠萝蜜视频在线观看一区| 欧美激情中文不卡| www.亚洲在线| 中文字幕综合网| 日本久久精品电影| 亚洲不卡av一区二区三区| 欧美色综合久久| 天天操天天色综合| 日韩一二三区视频| 久久 天天综合| ww久久中文字幕| 黑人巨大精品欧美一区| 日本一区二区三区四区在线视频| 99这里只有精品| 一区二区三区在线不卡| 在线电影欧美成精品| 另类成人小视频在线| 国产亚洲自拍一区| 95精品视频在线| 亚洲mv在线观看| 2023国产精品自拍| 99久久er热在这里只有精品66| 伊人性伊人情综合网| 欧美一区二区在线视频| 国产最新精品免费| 亚洲男人的天堂在线aⅴ视频| 欧美日韩色综合| 国产在线一区观看| 一区二区三区四区五区视频在线观看| 欧美丝袜丝交足nylons图片| 精品一二三四区| 亚洲视频网在线直播| 91麻豆精品国产91久久久久久 | 日韩精品乱码免费| 国产日韩精品一区二区浪潮av | 国产夫妻精品视频| 亚洲一区在线播放| 久久亚洲精华国产精华液| 91视频免费观看| 久久99精品久久久久久动态图| 国产精品系列在线| 91精品国产全国免费观看| 成人99免费视频| 免费成人在线视频观看| 国产精品国产三级国产普通话三级 | 欧洲精品一区二区三区在线观看| 日韩av高清在线观看| 国产精品九色蝌蚪自拍| 欧美一区二区在线免费播放| 97se亚洲国产综合自在线观| 久久99国产精品免费网站| 亚洲欧美日韩成人高清在线一区| 日韩精品专区在线影院重磅| 在线观看中文字幕不卡| 成人夜色视频网站在线观看| 麻豆一区二区在线| 亚洲高清免费视频| 欧美国产欧美综合| 精品国产乱码久久久久久老虎| 欧美色综合网站| jizzjizzjizz欧美| 国产成人精品三级| 久久99精品久久久久久久久久久久| 亚洲免费成人av| 国产天堂亚洲国产碰碰| 91麻豆精品久久久久蜜臀| 在线观看视频一区二区欧美日韩| 国产91丝袜在线播放九色| 免费在线看一区| 亚洲一区二区三区四区五区黄| 中文字幕亚洲在| 国产精品无遮挡| 久久久国产精华| 欧美精品一区二区久久婷婷| 日韩一区二区免费视频| 在线播放中文字幕一区| 欧美三日本三级三级在线播放| 成人av网址在线| 国产馆精品极品| 国产精品91一区二区| 国产精品综合视频| 国产乱人伦精品一区二区在线观看| 日韩高清在线不卡| 日韩中文字幕区一区有砖一区| 亚洲宅男天堂在线观看无病毒| 亚洲精品大片www| 亚洲一区在线观看免费| 亚洲成a天堂v人片| 琪琪一区二区三区| 国产在线不卡一卡二卡三卡四卡| 九九**精品视频免费播放| 久草在线在线精品观看| 精品亚洲成a人在线观看| 国产精品小仙女| 成人视屏免费看| 91麻豆国产自产在线观看| 欧美性感一区二区三区| 欧美精品久久一区| 亚洲精品在线网站| 欧美国产日韩亚洲一区| 亚洲麻豆国产自偷在线| 亚洲图片自拍偷拍| 另类小说图片综合网| 国产精品综合一区二区| av激情亚洲男人天堂| 欧美视频完全免费看| 日韩免费成人网| 国产精品久久久久久户外露出 | 日韩欧美中文字幕精品| 久久综合九色综合97婷婷女人| 中文在线一区二区| 亚洲黄色性网站| 美国毛片一区二区| 成人激情免费视频| 欧美日本免费一区二区三区| 欧美成人免费网站| 综合久久一区二区三区| 蜜桃久久久久久| 波多野结衣在线一区| 欧美日韩国产在线观看| 亚洲精品在线电影| 一区二区国产盗摄色噜噜| 麻豆成人av在线| 成人动漫av在线| 777亚洲妇女| 专区另类欧美日韩| 精品一区二区久久| 日本黄色一区二区| 国产婷婷色一区二区三区| 亚洲国产成人高清精品| 国产精品一区久久久久| 欧美性一二三区| 亚洲国产激情av| 视频在线观看一区二区三区| 99热99精品| 2024国产精品视频| 亚洲电影一级片| 国产成人精品亚洲午夜麻豆| 欧美另类久久久品| 亚洲欧美韩国综合色| 久久se这里有精品| 精品视频免费在线| ...xxx性欧美| 国产精品99久| 精品国产免费一区二区三区四区| 亚洲综合免费观看高清在线观看| 成人一道本在线| 久久精品亚洲精品国产欧美kt∨| 亚洲第一久久影院| 91丝袜高跟美女视频| 国产精品三级av| 国产成人一区在线| 精品毛片乱码1区2区3区| 亚洲va欧美va国产va天堂影院| 91麻豆国产精品久久| 国产精品日产欧美久久久久| 激情五月婷婷综合网| 91麻豆精品国产综合久久久久久| 一区二区在线观看不卡| 99久久久精品| 国产精品伦理一区二区| 粉嫩蜜臀av国产精品网站| 国产人妖乱国产精品人妖| 国产一区二区三区在线观看免费视频|