亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? network.java

?? Prune a JOONE neural network with a selective prune algorithm.
?? JAVA
字號(hào):
/**
 * Network
 * Copyright 2005 by Jeff Heaton(jeff@jeffheaton.com)
 *
 * Example program from Chapter 11
 * Programming Neural Networks in Java
 * http://www.heatonresearch.com/articles/series/1/
 *
 * This software is copyrighted. You may use it in programs
 * of your own, without restriction, but you may not
 * publish the source code without the author's permission.
 * For more information on distributing this code, please
 * visit:
 *    http://www.heatonresearch.com/hr_legal.php
 *
 * @author Jeff Heaton
 * @version 1.1
 */

public class Network {

  /**
   * The global error for the training.
   */
  protected double globalError;


  /**
   * The number of input neurons.
   */
  protected int inputCount;

  /**
   * The number of hidden neurons.
   */
  protected int hiddenCount;

  /**
   * The number of output neurons
   */
  protected int outputCount;

  /**
   * The total number of neurons in the network.
   */
  protected int neuronCount;

  /**
   * The number of weights in the network.
   */
  protected int weightCount;

  /**
   * The learning rate.
   */
  protected double learnRate;

  /**
   * The outputs from the various levels.
   */
  protected double fire[];

  /**
   * The weight matrix this, along with the thresholds can be
   * thought of as the "memory" of the neural network.
   */
  protected double matrix[];

  /**
   * The errors from the last calculation.
   */
  protected double error[];

  /**
   * Accumulates matrix delta's for training.
   */
  protected double accMatrixDelta[];

  /**
   * The thresholds, this value, along with the weight matrix
   * can be thought of as the memory of the neural network.
   */
  protected double thresholds[];

  /**
   * The changes that should be applied to the weight
   * matrix.
   */
  protected double matrixDelta[];

  /**
   * The accumulation of the threshold deltas.
   */
  protected double accThresholdDelta[];

  /**
   * The threshold deltas.
   */
  protected double thresholdDelta[];

  /**
   * The momentum for training.
   */
  protected double momentum;

  /**
   * The changes in the errors.
   */
  protected double errorDelta[];


  /**
   * Construct the neural network.
   *
   * @param inputCount The number of input neurons.
   * @param hiddenCount The number of hidden neurons
   * @param outputCount The number of output neurons
   * @param learnRate The learning rate to be used when training.
   * @param momentum The momentum to be used when training.
   */
  public Network(int inputCount,
                 int hiddenCount,
                 int outputCount,
                 double learnRate,
                 double momentum) {

    this.learnRate = learnRate;
    this.momentum = momentum;

    this.inputCount = inputCount;
    this.hiddenCount = hiddenCount;
    this.outputCount = outputCount;
    neuronCount = inputCount + hiddenCount + outputCount;
    weightCount = (inputCount * hiddenCount) + (hiddenCount * outputCount);

    fire        = new double[neuronCount];
    matrix      = new double[weightCount];
    matrixDelta = new double[weightCount];
    thresholds  = new double[neuronCount];
    errorDelta  = new double[neuronCount];
    error       = new double[neuronCount];
    accThresholdDelta = new double[neuronCount];
    accMatrixDelta = new double[weightCount];
    thresholdDelta = new double[neuronCount];

    reset();
  }



  /**
   * Returns the root mean square error for a complete training set.
   *
   * @param len The length of a complete training set.
   * @return The current error for the neural network.
   */
  public double getError(int len) {
    double err = Math.sqrt(globalError / (len * outputCount));
    globalError = 0;  // clear the accumulator
    return err;

  }

  /**
   * The threshold method. You may wish to override this class to provide other
   * threshold methods.
   *
   * @param sum The activation from the neuron.
   * @return The activation applied to the threshold method.
   */
  public double threshold(double sum) {
    return 1.0 / (1 + Math.exp(-1.0 * sum));
  }

  /**
   * Compute the output for a given input to the neural network.
   *
   * @param input The input provide to the neural network.
   * @return The results from the output neurons.
   */
  public double []computeOutputs(double input[]) {
    int i, j;
    final int hiddenIndex = inputCount;
    final int outIndex = inputCount + hiddenCount;

    for (i = 0; i < inputCount; i++) {
      fire[i] = input[i];
    }

    // first layer
    int inx = 0;

    for (i = hiddenIndex; i < outIndex; i++) {
      double sum = thresholds[i];

      for (j = 0; j < inputCount; j++) {
        sum += fire[j] * matrix[inx++];
      }
      fire[i] = threshold(sum);
    }

    // hidden layer

    double result[] = new double[outputCount];

    for (i = outIndex; i < neuronCount; i++) {
      double sum = thresholds[i];

      for (j = hiddenIndex; j < outIndex; j++) {
        sum += fire[j] * matrix[inx++];
      }
      fire[i] = threshold(sum);
      result[i-outIndex] = fire[i];
    }

    return result;
  }


  /**
   * Calculate the error for the recognition just done.
   *
   * @param ideal What the output neurons should have yielded.
   */
  public void calcError(double ideal[]) {
    int i, j;
    final int hiddenIndex = inputCount;
    final int outputIndex = inputCount + hiddenCount;

    // clear hidden layer errors
    for (i = inputCount; i < neuronCount; i++) {
      error[i] = 0;
    }

    // layer errors and deltas for output layer
    for (i = outputIndex; i < neuronCount; i++) {
      error[i] = ideal[i - outputIndex] - fire[i];
      globalError += error[i] * error[i];
      errorDelta[i] = error[i] * fire[i] * (1 - fire[i]);
    }

    // hidden layer errors
    int winx = inputCount * hiddenCount;

    for (i = outputIndex; i < neuronCount; i++) {
      for (j = hiddenIndex; j < outputIndex; j++) {
        accMatrixDelta[winx] += errorDelta[i] * fire[j];
        error[j] += matrix[winx] * errorDelta[i];
        winx++;
      }
      accThresholdDelta[i] += errorDelta[i];
    }

    // hidden layer deltas
    for (i = hiddenIndex; i < outputIndex; i++) {
      errorDelta[i] = error[i] * fire[i] * (1 - fire[i]);
    }

    // input layer errors
    winx = 0;  // offset into weight array
    for (i = hiddenIndex; i < outputIndex; i++) {
      for (j = 0; j < hiddenIndex; j++) {
        accMatrixDelta[winx] += errorDelta[i] * fire[j];
        error[j] += matrix[winx] * errorDelta[i];
        winx++;
      }
      accThresholdDelta[i] += errorDelta[i];
    }
  }

  /**
   * Modify the weight matrix and thresholds based on the last call to
   * calcError.
   */
  public void learn() {
    int i;

    // process the matrix
    for (i = 0; i < matrix.length; i++) {
      matrixDelta[i] = (learnRate * accMatrixDelta[i]) + (momentum * matrixDelta[i]);
      matrix[i] += matrixDelta[i];
      accMatrixDelta[i] = 0;
    }

    // process the thresholds
    for (i = inputCount; i < neuronCount; i++) {
      thresholdDelta[i] = learnRate * accThresholdDelta[i] + (momentum * thresholdDelta[i]);
      thresholds[i] += thresholdDelta[i];
      accThresholdDelta[i] = 0;
    }
  }

  /**
   * Reset the weight matrix and the thresholds.
   */
  public void reset() {
    int i;

    for (i = 0; i < neuronCount; i++) {
      thresholds[i] = 0.5 - (Math.random());
      thresholdDelta[i] = 0;
      accThresholdDelta[i] = 0;
    }
    for (i = 0; i < matrix.length; i++) {
      matrix[i] = 0.5 - (Math.random());
      matrixDelta[i] = 0;
      accMatrixDelta[i] = 0;
    }
  }

  /**
   * Convert to an array. This is used with some training algorithms
   * that require that the "memory" of the neuron(the weight and threshold
   * values) be expressed as a linear array.
   *
   * @return The memory of the neuron.
   */
  public double []toArray()
  {
    double result[] = new double[matrix.length+thresholds.length];
    for (int i=0;i<matrix.length;i++)
      result[i] = matrix[i];
    for (int i=0;i<thresholds.length;i++)
      result[matrix.length+i] = thresholds[i];
    return result;
  }

  /**
   * Use an array to populate the memory of the neural network.
   *
   * @param array An array of doubles.
   */
  public void fromArray(double array[])
  {
    for (int i=0;i<matrix.length;i++)
      matrix[i] = array[i];
    for (int i=0;i<thresholds.length;i++)
      thresholds[i] = array[matrix.length+i];
  }

  /**
   * Get the number of input neurons.
   *
   * @return The number of input neurons.
   */
  public int getInputCount()
  {
    return inputCount;
  }

  /**
   * Get the number of output neurons.
   *
   * @return The number of output neurons.
   */
  public int getOutputCount()
  {
    return outputCount;
  }

  /**
   * Get the number of hidden neurons.
   *
   * @return The number of hidden neurons.
   */
  public int getHiddenCount()
  {
    return hiddenCount;
  }

  /**
   * Get the learning rate.
   *
   * @return The learning rate.
   */
  public double getLearnRate()
  {
    return learnRate;
  }

  /**
   * Get the momentum.
   *
   * @return The momentum.
   */
  public double getMomentum()
  {
    return momentum;
  }
}

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
91久久精品一区二区三| 国产精品一级二级三级| 欧美一区二区久久| 日韩精品欧美精品| 欧美日韩国产一级| 综合在线观看色| 国产欧美一区视频| 国产亚洲精品久| 一本久久精品一区二区| 免费的成人av| 亚洲欧洲国产日韩| 日韩一级二级三级精品视频| 色综合久久88色综合天天| 精品中文字幕一区二区 | 在线观看一区日韩| 日韩亚洲欧美中文三级| 国产自产视频一区二区三区| 亚洲欧美一区二区久久| 欧美国产综合一区二区| 制服丝袜亚洲精品中文字幕| 成人午夜私人影院| 久久国产精品72免费观看| 亚洲成国产人片在线观看| 国产日韩欧美精品在线| 日韩欧美在线观看一区二区三区| 91啪在线观看| 一本一道久久a久久精品| aaa亚洲精品一二三区| 粗大黑人巨茎大战欧美成人| 91蜜桃视频在线| 94-欧美-setu| 欧美精品 国产精品| 在线观看欧美黄色| 欧美高清性hdvideosex| 欧美xxxxx牲另类人与| 国产清纯白嫩初高生在线观看91| 中文字幕av不卡| 亚洲第一搞黄网站| 国产91综合网| 欧美日产在线观看| 国产亚洲美州欧州综合国| 亚洲欧洲另类国产综合| 亚洲国产日产av| 狠狠v欧美v日韩v亚洲ⅴ| 成人国产亚洲欧美成人综合网 | 国产精品丝袜91| 午夜国产不卡在线观看视频| 免费观看成人av| 99精品欧美一区| 7777精品伊人久久久大香线蕉最新版| 欧美变态tickling挠脚心| 欧美一区二区三区男人的天堂| 久久毛片高清国产| 日本一不卡视频| www.亚洲人| 欧美精品一区视频| 亚洲高清在线精品| 色哟哟国产精品| 国产精品国产三级国产普通话蜜臀| 亚洲高清久久久| 色噜噜狠狠色综合中国| 国产精品毛片高清在线完整版 | 国产一区二区三区国产| 欧美三级电影在线观看| 综合久久久久久| 成人免费视频免费观看| 国产丝袜在线精品| 99久久免费视频.com| 国产精品久久久久婷婷二区次| 国产精品亚洲午夜一区二区三区 | 久久精品夜夜夜夜久久| 日韩高清不卡一区二区| 欧美无乱码久久久免费午夜一区 | 日韩毛片一二三区| 国产成人免费高清| 国产人久久人人人人爽| 国产在线精品一区在线观看麻豆| 欧美三级午夜理伦三级中视频| 亚洲欧美另类小说视频| 91社区在线播放| 亚洲精品成a人| 欧美日本视频在线| 久久国产视频网| 国产精品嫩草久久久久| 日本乱码高清不卡字幕| 日本网站在线观看一区二区三区| 5858s免费视频成人| 国产麻豆精品在线| 亚洲色图19p| 欧美日韩aaa| 91色乱码一区二区三区| 日产欧产美韩系列久久99| 久久精品人人做人人综合| 欧美亚州韩日在线看免费版国语版| 亚洲第一激情av| 国产精品看片你懂得| 欧美三级视频在线播放| 久久99久国产精品黄毛片色诱| 综合自拍亚洲综合图不卡区| 欧美精品第1页| 色偷偷一区二区三区| 国产高清精品久久久久| 日韩精品免费专区| 伊人性伊人情综合网| 国产日韩成人精品| 久久综合九色综合97婷婷女人| 91免费观看视频| 99久久婷婷国产精品综合| 成人一区二区三区视频在线观看| 男男视频亚洲欧美| 日本美女一区二区三区视频| 综合激情网...| 国产精品成人免费| 国产精品久久久久久一区二区三区 | 日韩女优av电影| 欧美人狂配大交3d怪物一区| 欧美亚洲综合色| 欧美影视一区在线| 欧美夫妻性生活| 日韩欧美自拍偷拍| 在线不卡一区二区| 日韩一区二区三区在线| 久久综合色综合88| 日本一区二区免费在线观看视频| 国产视频一区二区三区在线观看| 国产精品视频一二三区| 亚洲女人的天堂| 日韩精彩视频在线观看| 1024亚洲合集| 亚洲综合丝袜美腿| 日韩黄色在线观看| 午夜成人在线视频| 国产激情一区二区三区四区| jizzjizzjizz欧美| 欧美日韩一级大片网址| 日韩久久免费av| 亚洲综合一区二区三区| 国产一区二区三区观看| 欧美视频在线观看一区| 国产人成亚洲第一网站在线播放 | 国产午夜精品一区二区三区视频| 国产精品污www在线观看| 亚洲一区二区四区蜜桃| 国产成人av资源| 久久青草欧美一区二区三区| 亚洲欧美国产毛片在线| 成人开心网精品视频| 精品国精品自拍自在线| 亚洲成人一区二区| 欧美影视一区二区三区| 中文字幕中文字幕在线一区| 精品一区二区精品| 欧美一区二区三区色| 午夜电影网一区| 欧美精品久久99久久在免费线| 亚洲精品大片www| 欧美在线观看你懂的| 综合色天天鬼久久鬼色| 成人国产精品免费观看动漫| 国产日本欧洲亚洲| k8久久久一区二区三区| 亚洲人精品一区| 欧美色图免费看| 美女视频免费一区| 久久久久久久综合日本| 粉嫩嫩av羞羞动漫久久久| 国产精品久久久久久久浪潮网站 | 97久久精品人人做人人爽| 亚洲欧美一区二区三区国产精品 | 亚洲精品你懂的| 日韩欧美久久久| 91小视频免费观看| 日韩av在线发布| |精品福利一区二区三区| 69堂亚洲精品首页| av午夜一区麻豆| 久久99国产精品久久99| 日韩免费电影一区| 在线观看www91| 国产综合久久久久影院| 夜夜夜精品看看| 久久综合国产精品| 欧美久久久久久久久久| 成人国产一区二区三区精品| 奇米影视一区二区三区| 亚洲国产欧美日韩另类综合| wwwwww.欧美系列| 欧美群妇大交群的观看方式| 成人福利电影精品一区二区在线观看 | 亚洲福利一区二区三区| 国产精品久久久久一区二区三区 | 337p日本欧洲亚洲大胆色噜噜| 在线精品国精品国产尤物884a| 国产一区二区免费看| 欧美96一区二区免费视频| 亚洲福利视频一区| 亚洲精品视频在线| 亚洲欧美福利一区二区| 欧美日韩激情在线| 欧美伦理电影网|