?? sample2.m
字號:
global preprocess;
preprocess.Vebosity = 1;
preprocess.Message = '';
%preprocess.SizeFactor = 0.5;
%Shuffle the data set
preprocess.Shuffled = 0;
%Normalize the data
preprocess.Normalization = 1;
%Shot information, appended at the end
preprocess.ShotAvailable = 1;
preprocess.ValidateByShot = 0;
%Data Sampling Rate
preprocess.DataSampling = 0;
preprocess.DataSamplingRate = 2;
% %Ensemble Method
% %0: No Ensemble
% %1: Up-sampling
% %2: Down-sampling
% %3: Meta Classify with Majority Voting
% %4: Hierarchy Classify
% %5: Sum Rule
% %6: Stacked Classifcation
% %7: Active Learning
% preprocess.Ensemble = 0;
%Evaluation Method
%0: Train-Test Split
%1: Cross Validation
preprocess.Evaluation = 0;
preprocess.TrainTestSplitBoundary = 100;
preprocess.NumCrossFolder = 2;
% Multi-class classification
% 0: Classification
% 1: Multi-class Classification Wrapper
% 2: Multi-label Classification Wrapper
% 3: Multi-class Active Learning Wrapper
preprocess.MultiClassType = 1;
% Input format of multiple class
% 0: Multiple label input, y:(0,1,0........)
% 1: Single label input, y:0..n
preprocess.MultiClass.LabelType = 1;
% preprocess.MultiClass.NumClass = 11; % only useful when MultiClass.LabelType = 0
%
preprocess.MultiClass.CodeType = 0;
preprocess.MultiClass.LossFuncType = 2;
preprocess.MultiClass.UncertaintyFuncType = 2;
preprocess.MultiClass.ProbEstimation = 0;
% Active Learning
preprocess.ActiveLearning.Iteration = 4;
preprocess.ActiveLearning.IncrementSize = 10;
% Pairwise Constraints
preprocess.ConstraintAvailable = 0;
%preprocess.ConstraintFileName = sprintf('%s/test_constraints.txt', root);
%preprocess.root = 'E:/Research/MyPaper/ACMMM042';
preprocess.root = 'k:/users/yanrong/feature/ActiveLearning/Data/CNN';
preprocess.output_file = sprintf('%s/_Result', preprocess.root);
%preprocess.input_file = sprintf('%s/logreg-newssubject.txt', preprocess.root);
preprocess.input_file = sprintf('%s/TREC03_com.CNN.hstat1', preprocess.root);
%%%%%%%%%%%%%%%Classification%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%run = test_classify('NeuralNet -NumNode 10');
%run = test_classify('kNN -k 1 -d 1');
%run = test_classify('kNN -k 5 -d 1');
run = test_classify('SVM_LIGHT -Kernel 2 -KernelParam 0.01');
%run = test_classify('SVM_LIGHT -Kernel 5 -KernelParam 0.01 -CostFactor 3');
%run = test_classify('SVM_LIGHT -Kernel 0 -KernelParam 0 -CostFactor 1');
%run = test_classify('SVM_LIGHT -Kernel 1 -KernelParam 2 -CostFactor 1');
%run = test_classify('SVM_LIGHT -Kernel 2 -KernelParam 0.05 -CostFactor 3');
%run = test_classify('SVM_LIGHT -Kernel 2 -KernelParam 0.05 -CostFactor 6');
%run = test_classify('SVM_LIGHT -Kernel 2 -KernelParam 0.05 -CostFactor 3');
%run = test_classify('SVM_LIGHT -Kernel 2 -KernelParam 0.05 -CostFactor 7');
%run = test_classify('SVM_LIGHT -Kernel 2 -KernelParam 0.05 -CostFactor 1');
%run = test_classify('SVM_LIGHT -Kernel 3 -KernelParam 0.5 -CostFactor 1');
%run = test_classify('SVM_LIGHT_TRANSDUCTIVE -Kernel 5 -KernelParam 0.01 -CostFactor 1');
%run = test_classify('SVM_LIGHT_TRANSDUCTIVE -Kernel 5 -KernelParam 0.01 -CostFactor 1');
%run = test_classify('IIS_classify');
%run = test_classify('WekaClassify -c SMO');
%run = test_classify('WekaClassify -E 2 -c SMO');
%run = test_classify('WekaClassify -R -c j48.J48');
%run = test_classify('WekaClassify -c NaiveBayes');
%run = test_classify('cross_validate -f 2 -- mySVM -Config Param_1Class.dat');
%run = test_classify('mySVM -Config Param_RBF.dat');
%run = test_classify('LogitReg');
%run = test_classify('LogitRegKernel -Kernel 2 -KernelParam 0.1 -RegFactor 0.01');
%classifier = 'AdaBoostM1 -P 100 -I 10 -S 1 -W weka.classifiers.SMO';
%run = test_classify(classifier);
?? 快捷鍵說明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -