?? multilayerperceptron.java
字號:
m_outputs = new NeuralEnd[0];
m_inputs = new NeuralEnd[0];
m_numAttributes = 0;
m_numClasses = 0;
m_neuralNodes = new NeuralConnection[0];
m_selected = new FastVector(4);
m_graphers = new FastVector(2);
m_nextId = 0;
m_stopIt = true;
m_stopped = true;
m_accepted = false;
m_numeric = false;
m_random = null;
m_nominalToBinaryFilter = new NominalToBinary();
m_sigmoidUnit = new SigmoidUnit();
m_linearUnit = new LinearUnit();
//setting all the options to their defaults. To completely change these
//defaults they will also need to be changed down the bottom in the
//setoptions function (the text info in the accompanying functions should
//also be changed to reflect the new defaults
m_normalizeClass = true;
m_normalizeAttributes = true;
m_autoBuild = true;
m_gui = false;
m_useNomToBin = true;
m_driftThreshold = 20;
m_numEpochs = 500;
m_valSize = 0;
m_randomSeed = 0;
m_hiddenLayers = "a";
m_learningRate = .3;
m_momentum = .2;
m_reset = true;
m_decay = false;
}
/**
* @param d True if the learning rate should decay.
*/
public void setDecay(boolean d) {
m_decay = d;
}
/**
* @return the flag for having the learning rate decay.
*/
public boolean getDecay() {
return m_decay;
}
/**
* This sets the network up to be able to reset itself with the current
* settings and the learning rate at half of what it is currently. This
* will only happen if the network creates NaN or infinite errors. Also this
* will continue to happen until the network is trained properly. The
* learning rate will also get set back to it's original value at the end of
* this. This can only be set to true if the GUI is not brought up.
* @param r True if the network should restart with it's current options
* and set the learning rate to half what it currently is.
*/
public void setReset(boolean r) {
if (m_gui) {
r = false;
}
m_reset = r;
}
/**
* @return The flag for reseting the network.
*/
public boolean getReset() {
return m_reset;
}
/**
* @param c True if the class should be normalized (the class will only ever
* be normalized if it is numeric). (Normalization puts the range between
* -1 - 1).
*/
public void setNormalizeNumericClass(boolean c) {
m_normalizeClass = c;
}
/**
* @return The flag for normalizing a numeric class.
*/
public boolean getNormalizeNumericClass() {
return m_normalizeClass;
}
/**
* @param a True if the attributes should be normalized (even nominal
* attributes will get normalized here) (range goes between -1 - 1).
*/
public void setNormalizeAttributes(boolean a) {
m_normalizeAttributes = a;
}
/**
* @return The flag for normalizing attributes.
*/
public boolean getNormalizeAttributes() {
return m_normalizeAttributes;
}
/**
* @param f True if a nominalToBinary filter should be used on the
* data.
*/
public void setNominalToBinaryFilter(boolean f) {
m_useNomToBin = f;
}
/**
* @return The flag for nominal to binary filter use.
*/
public boolean getNominalToBinaryFilter() {
return m_useNomToBin;
}
/**
* This seeds the random number generator, that is used when a random
* number is needed for the network.
* @param l The seed.
*/
public void setRandomSeed(long l) {
if (l >= 0) {
m_randomSeed = l;
}
}
/**
* @return The seed for the random number generator.
*/
public long getRandomSeed() {
return m_randomSeed;
}
/**
* This sets the threshold to use for when validation testing is being done.
* It works by ending testing once the error on the validation set has
* consecutively increased a certain number of times.
* @param t The threshold to use for this.
*/
public void setValidationThreshold(int t) {
if (t > 0) {
m_driftThreshold = t;
}
}
/**
* @return The threshold used for validation testing.
*/
public int getValidationThreshold() {
return m_driftThreshold;
}
/**
* The learning rate can be set using this command.
* NOTE That this is a static variable so it affect all networks that are
* running.
* Must be greater than 0 and no more than 1.
* @param l The New learning rate.
*/
public void setLearningRate(double l) {
if (l > 0 && l <= 1) {
m_learningRate = l;
if (m_controlPanel != null) {
m_controlPanel.m_changeLearning.setText("" + l);
}
}
}
/**
* @return The learning rate for the nodes.
*/
public double getLearningRate() {
return m_learningRate;
}
/**
* The momentum can be set using this command.
* THE same conditions apply to this as to the learning rate.
* @param m The new Momentum.
*/
public void setMomentum(double m) {
if (m >= 0 && m <= 1) {
m_momentum = m;
if (m_controlPanel != null) {
m_controlPanel.m_changeMomentum.setText("" + m);
}
}
}
/**
* @return The momentum for the nodes.
*/
public double getMomentum() {
return m_momentum;
}
/**
* This will set whether the network is automatically built
* or if it is left up to the user. (there is nothing to stop a user
* from altering an autobuilt network however).
* @param a True if the network should be auto built.
*/
public void setAutoBuild(boolean a) {
if (!m_gui) {
a = true;
}
m_autoBuild = a;
}
/**
* @return The auto build state.
*/
public boolean getAutoBuild() {
return m_autoBuild;
}
/**
* This will set what the hidden layers are made up of when auto build is
* enabled. Note to have no hidden units, just put a single 0, Any more
* 0's will indicate that the string is badly formed and make it unaccepted.
* Negative numbers, and floats will do the same. There are also some
* wildcards. These are 'a' = (number of attributes + number of classes) / 2,
* 'i' = number of attributes, 'o' = number of classes, and 't' = number of
* attributes + number of classes.
* @param h A string with a comma seperated list of numbers. Each number is
* the number of nodes to be on a hidden layer.
*/
public void setHiddenLayers(String h) {
String tmp = "";
StringTokenizer tok = new StringTokenizer(h, ",");
if (tok.countTokens() == 0) {
return;
}
double dval;
int val;
String c;
boolean first = true;
while (tok.hasMoreTokens()) {
c = tok.nextToken().trim();
if (c.equals("a") || c.equals("i") || c.equals("o") ||
c.equals("t")) {
tmp += c;
}
else {
dval = Double.valueOf(c).doubleValue();
val = (int)dval;
if ((val == dval && (val != 0 || (tok.countTokens() == 0 && first)) &&
val >= 0)) {
tmp += val;
}
else {
return;
}
}
first = false;
if (tok.hasMoreTokens()) {
tmp += ", ";
}
}
m_hiddenLayers = tmp;
}
/**
* @return A string representing the hidden layers, each number is the number
* of nodes on a hidden layer.
*/
public String getHiddenLayers() {
return m_hiddenLayers;
}
/**
* This will set whether A GUI is brought up to allow interaction by the user
* with the neural network during training.
* @param a True if gui should be created.
*/
public void setGUI(boolean a) {
m_gui = a;
if (!a) {
setAutoBuild(true);
}
else {
setReset(false);
}
}
/**
* @return The true if should show gui.
*/
public boolean getGUI() {
return m_gui;
}
/**
* This will set the size of the validation set.
* @param a The size of the validation set, as a percentage of the whole.
*/
public void setValidationSetSize(int a) {
if (a < 0 || a > 99) {
return;
}
m_valSize = a;
}
/**
* @return The percentage size of the validation set.
*/
public int getValidationSetSize() {
return m_valSize;
}
/**
* Set the number of training epochs to perform.
* Must be greater than 0.
* @param n The number of epochs to train through.
*/
public void setTrainingTime(int n) {
if (n > 0) {
m_numEpochs = n;
}
}
/**
* @return The number of epochs to train through.
*/
public int getTrainingTime() {
return m_numEpochs;
}
/**
* Call this function to place a node into the network list.
* @param n The node to place in the list.
*/
private void addNode(NeuralConnection n) {
NeuralConnection[] temp1 = new NeuralConnection[m_neuralNodes.length + 1];
for (int noa = 0; noa < m_neuralNodes.length; noa++) {
temp1[noa] = m_neuralNodes[noa];
}
temp1[temp1.length-1] = n;
m_neuralNodes = temp1;
}
/**
* Call this function to remove the passed node from the list.
* This will only remove the node if it is in the neuralnodes list.
* @param n The neuralConnection to remove.
* @return True if removed false if not (because it wasn't there).
*/
private boolean removeNode(NeuralConnection n) {
NeuralConnection[] temp1 = new NeuralConnection[m_neuralNodes.length - 1];
int skip = 0;
for (int noa = 0; noa < m_neuralNodes.length; noa++) {
if (n == m_neuralNodes[noa]) {
skip++;
}
else if (!((noa - skip) >= temp1.length)) {
temp1[noa - skip] = m_neuralNodes[noa];
}
else {
return false;
}
}
m_neuralNodes = temp1;
return true;
}
/**
* This function sets what the m_numeric flag to represent the passed class
* it also performs the normalization of the attributes if applicable
* and sets up the info to normalize the class. (note that regardless of
* the options it will fill an array with the range and base, set to
* normalize all attributes and the class to be between -1 and 1)
* @param inst the instances.
* @return The modified instances. This needs to be done. If the attributes
* are normalized then deep copies will be made of all the instances which
* will need to be passed back out.
*/
private Instances setClassType(Instances inst) throws Exception {
if (inst != null) {
// x bounds
double min=Double.POSITIVE_INFINITY;
double max=Double.NEGATIVE_INFINITY;
double value;
m_attributeRanges = new double[inst.numAttributes()];
m_attributeBases = new double[inst.numAttributes()];
for (int noa = 0; noa < inst.numAttributes(); noa++) {
min = Double.POSITIVE_INFINITY;
max = Double.NEGATIVE_INFINITY;
for (int i=0; i < inst.numInstances();i++) {
if (!inst.instance(i).isMissing(noa)) {
value = inst.instance(i).value(noa);
if (value < min) {
min = value;
}
if (value > max) {
max = value;
}
}
}
m_attributeRanges[noa] = (max - min) / 2;
m_attributeBases[noa] = (max + min) / 2;
if (noa != inst.classIndex() && m_normalizeAttributes) {
for (int i = 0; i < inst.numInstances(); i++) {
if (m_attributeRanges[noa] != 0) {
inst.instance(i).setValue(noa, (inst.instance(i).value(noa)
- m_attributeBases[noa]) /
m_attributeRanges[noa]);
}
else {
inst.instance(i).setValue(noa, inst.instance(i).value(noa) -
m_attributeBases[noa]);
}
}
}
}
if (inst.classAttribute().isNumeric()) {
m_numeric = true;
}
else {
m_numeric = false;
}
}
return inst;
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -