?? wrappersubseteval.java
字號(hào):
/**
*
* AgentAcademy - an open source Data Mining framework for
* training intelligent agents
*
* Copyright (C) 2001-2003 AA Consortium.
*
* This library is open source software; you can redistribute it
* and/or modify it under the terms of the GNU Lesser General
* Public License as published by the Free Software Foundation;
* either version 2.0 of the License, or (at your option) any later
* version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free
* Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
* MA 02111-1307 USA
*
*/
package org.agentacademy.modules.dataminer.attributeSelection;
import java.util.BitSet;
import java.util.Enumeration;
import java.util.Random;
import java.util.Vector;
import org.agentacademy.modules.dataminer.classifiers.ZeroR;
import org.agentacademy.modules.dataminer.classifiers.evaluation.Classifier;
import org.agentacademy.modules.dataminer.classifiers.evaluation.Evaluation;
import org.agentacademy.modules.dataminer.core.Instances;
import org.agentacademy.modules.dataminer.core.Option;
import org.agentacademy.modules.dataminer.core.OptionHandler;
import org.agentacademy.modules.dataminer.core.Utils;
import org.agentacademy.modules.dataminer.filters.AttributeFilter;
import org.agentacademy.modules.dataminer.filters.Filter;
import org.apache.log4j.Logger;
/**
* Wrapper attribute subset evaluator. <p>
* For more information see: <br>
*
* Kohavi, R., John G., Wrappers for Feature Subset Selection.
* In <i>Artificial Intelligence journal</i>, special issue on relevance,
* Vol. 97, Nos 1-2, pp.273-324. <p>
*
* Valid options are:<p>
*
* -B <base learner> <br>
* Class name of base learner to use for accuracy estimation.
* Place any classifier options last on the command line following a
* "--". Eg -B weka.classifiers.bayes.NaiveBayes ... -- -K <p>
*
* -F <num> <br>
* Number of cross validation folds to use for estimating accuracy.
* <default=5> <p>
*
* -T <num> <br>
* Threshold by which to execute another cross validation (standard deviation
* ---expressed as a percentage of the mean). <p>
*
* @author Mark Hall (mhall@cs.waikato.ac.nz)
* @version $Revision: 1.2 $
*/
public class WrapperSubsetEval
extends SubsetEvaluator
implements OptionHandler
{
public static Logger log = Logger.getLogger(WrapperSubsetEval.class);
/** training instances */
private Instances m_trainInstances;
/** class index */
private int m_classIndex;
/** number of attributes in the training data */
private int m_numAttribs;
/** number of instances in the training data */
private int m_numInstances;
/** holds an evaluation object */
private Evaluation m_Evaluation;
/** holds the base classifier object */
private Classifier m_BaseClassifier;
/** number of folds to use for cross validation */
private int m_folds;
/** random number seed */
private int m_seed;
/**
* the threshold by which to do further cross validations when
* estimating the accuracy of a subset
*/
private double m_threshold;
/**
* Returns a string describing this attribute evaluator
* @return a description of the evaluator suitable for
* displaying in the explorer/experimenter gui
*/
public String globalInfo() {
return "WrapperSubsetEval:\n\n"
+"Evaluates attribute sets by using a learning scheme. Cross "
+"validation is used to estimate the accuracy of the learning "
+"scheme for a set of attributes.\n";
}
/**
* Constructor. Calls restOptions to set default options
**/
public WrapperSubsetEval () {
resetOptions();
}
/**
* Returns an enumeration describing the available options.
* @return an enumeration of all the available options.
**/
public Enumeration listOptions () {
Vector newVector = new Vector(4);
newVector.addElement(new Option("\tclass name of base learner to use for"
+ "\n\taccuracy estimation. Place any"
+ "\n\tclassifier options LAST on the"
+ "\n\tcommand line following a \"--\"."
+ "\n\teg. -B weka.classifiers.bayes.NaiveBayes ... "
+ "-- -K", "B", 1, "-B <base learner>"));
newVector.addElement(new Option("\tnumber of cross validation folds to "
+ "use\n\tfor estimating accuracy."
+ "\n\t(default=5)", "F", 1, "-F <num>"));
newVector.addElement(new Option("\tSeed for cross validation accuracy "
+"\n\testimation."
+"\n\t(default = 1)", "S", 1,"-S <seed>"));
newVector.addElement(new Option("\tthreshold by which to execute "
+ "another cross validation"
+ "\n\t(standard deviation---"
+ "expressed as a percentage of the "
+ "mean).\n\t(default=0.01(1%))"
, "T", 1, "-T <num>"));
if ((m_BaseClassifier != null) &&
(m_BaseClassifier instanceof OptionHandler)) {
newVector.addElement(new Option("", "", 0, "\nOptions specific to"
+ "scheme "
+ m_BaseClassifier.getClass().getName()
+ ":"));
Enumeration enum = ((OptionHandler)m_BaseClassifier).listOptions();
while (enum.hasMoreElements()) {
newVector.addElement(enum.nextElement());
}
}
return newVector.elements();
}
/**
* Parses a given list of options.
*
* Valid options are:<p>
*
* -B <base learner> <br>
* Class name of base learner to use for accuracy estimation.
* Place any classifier options last on the command line following a
* "--". Eg -B weka.classifiers.bayes.NaiveBayes ... -- -K <p>
*
* -F <num> <br>
* Number of cross validation folds to use for estimating accuracy.
* <default=5> <p>
*
* -T <num> <br>
* Threshold by which to execute another cross validation (standard deviation
* ---expressed as a percentage of the mean). <p>
*
* @param options the list of options as an array of strings
* @exception Exception if an option is not supported
*
**/
public void setOptions (String[] options)
throws Exception
{
String optionString;
resetOptions();
optionString = Utils.getOption('B', options);
if (optionString.length() == 0) {
throw new Exception("A learning scheme must be specified with"
+ "-B option");
}
setClassifier(Classifier.forName(optionString,
Utils.partitionOptions(options)));
optionString = Utils.getOption('F', options);
if (optionString.length() != 0) {
setFolds(Integer.parseInt(optionString));
}
optionString = Utils.getOption('S', options);
if (optionString.length() != 0) {
setSeed(Integer.parseInt(optionString));
}
// optionString = Utils.getOption('S',options);
// if (optionString.length() != 0)
// {
// seed = Integer.parseInt(optionString);
// }
optionString = Utils.getOption('T', options);
if (optionString.length() != 0) {
Double temp;
temp = Double.valueOf(optionString);
setThreshold(temp.doubleValue());
}
}
/**
* Returns the tip text for this property
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String thresholdTipText() {
return "Repeat xval if stdev of mean exceeds this value.";
}
/**
* Set the value of the threshold for repeating cross validation
*
* @param t the value of the threshold
*/
public void setThreshold (double t) {
m_threshold = t;
}
/**
* Get the value of the threshold
*
* @return the threshold as a double
*/
public double getThreshold () {
return m_threshold;
}
/**
* Returns the tip text for this property
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String foldsTipText() {
return "Number of xval folds to use when estimating subset accuracy.";
}
/**
* Set the number of folds to use for accuracy estimation
*
* @param f the number of folds
*/
public void setFolds (int f) {
m_folds = f;
}
/**
* Get the number of folds used for accuracy estimation
*
* @return the number of folds
*/
public int getFolds () {
return m_folds;
}
/**
* Returns the tip text for this property
* @return tip text for this property suitable for
* displaying in the explorer/experimenter gui
*/
public String seedTipText() {
return "Seed to use for randomly generating xval splits.";
}
/**
?? 快捷鍵說(shuō)明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號(hào)
Ctrl + =
減小字號(hào)
Ctrl + -