亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频

? 歡迎來(lái)到蟲(chóng)蟲(chóng)下載站! | ?? 資源下載 ?? 資源專輯 ?? 關(guān)于我們
? 蟲(chóng)蟲(chóng)下載站

?? network.java

?? Prune a JOONE neural network with a selective prune algorithm.
?? JAVA
字號(hào):
/**
 * Network
 * Copyright 2005 by Jeff Heaton(jeff@jeffheaton.com)
 *
 * Example program from Chapter 11
 * Programming Neural Networks in Java
 * http://www.heatonresearch.com/articles/series/1/
 *
 * This software is copyrighted. You may use it in programs
 * of your own, without restriction, but you may not
 * publish the source code without the author's permission.
 * For more information on distributing this code, please
 * visit:
 *    http://www.heatonresearch.com/hr_legal.php
 *
 * @author Jeff Heaton
 * @version 1.1
 */

public class Network {

  /**
   * The global error for the training.
   */
  protected double globalError;


  /**
   * The number of input neurons.
   */
  protected int inputCount;

  /**
   * The number of hidden neurons.
   */
  protected int hiddenCount;

  /**
   * The number of output neurons
   */
  protected int outputCount;

  /**
   * The total number of neurons in the network.
   */
  protected int neuronCount;

  /**
   * The number of weights in the network.
   */
  protected int weightCount;

  /**
   * The learning rate.
   */
  protected double learnRate;

  /**
   * The outputs from the various levels.
   */
  protected double fire[];

  /**
   * The weight matrix this, along with the thresholds can be
   * thought of as the "memory" of the neural network.
   */
  protected double matrix[];

  /**
   * The errors from the last calculation.
   */
  protected double error[];

  /**
   * Accumulates matrix delta's for training.
   */
  protected double accMatrixDelta[];

  /**
   * The thresholds, this value, along with the weight matrix
   * can be thought of as the memory of the neural network.
   */
  protected double thresholds[];

  /**
   * The changes that should be applied to the weight
   * matrix.
   */
  protected double matrixDelta[];

  /**
   * The accumulation of the threshold deltas.
   */
  protected double accThresholdDelta[];

  /**
   * The threshold deltas.
   */
  protected double thresholdDelta[];

  /**
   * The momentum for training.
   */
  protected double momentum;

  /**
   * The changes in the errors.
   */
  protected double errorDelta[];


  /**
   * Construct the neural network.
   *
   * @param inputCount The number of input neurons.
   * @param hiddenCount The number of hidden neurons
   * @param outputCount The number of output neurons
   * @param learnRate The learning rate to be used when training.
   * @param momentum The momentum to be used when training.
   */
  public Network(int inputCount,
                 int hiddenCount,
                 int outputCount,
                 double learnRate,
                 double momentum) {

    this.learnRate = learnRate;
    this.momentum = momentum;

    this.inputCount = inputCount;
    this.hiddenCount = hiddenCount;
    this.outputCount = outputCount;
    neuronCount = inputCount + hiddenCount + outputCount;
    weightCount = (inputCount * hiddenCount) + (hiddenCount * outputCount);

    fire        = new double[neuronCount];
    matrix      = new double[weightCount];
    matrixDelta = new double[weightCount];
    thresholds  = new double[neuronCount];
    errorDelta  = new double[neuronCount];
    error       = new double[neuronCount];
    accThresholdDelta = new double[neuronCount];
    accMatrixDelta = new double[weightCount];
    thresholdDelta = new double[neuronCount];

    reset();
  }



  /**
   * Returns the root mean square error for a complete training set.
   *
   * @param len The length of a complete training set.
   * @return The current error for the neural network.
   */
  public double getError(int len) {
    double err = Math.sqrt(globalError / (len * outputCount));
    globalError = 0;  // clear the accumulator
    return err;

  }

  /**
   * The threshold method. You may wish to override this class to provide other
   * threshold methods.
   *
   * @param sum The activation from the neuron.
   * @return The activation applied to the threshold method.
   */
  public double threshold(double sum) {
    return 1.0 / (1 + Math.exp(-1.0 * sum));
  }

  /**
   * Compute the output for a given input to the neural network.
   *
   * @param input The input provide to the neural network.
   * @return The results from the output neurons.
   */
  public double []computeOutputs(double input[]) {
    int i, j;
    final int hiddenIndex = inputCount;
    final int outIndex = inputCount + hiddenCount;

    for (i = 0; i < inputCount; i++) {
      fire[i] = input[i];
    }

    // first layer
    int inx = 0;

    for (i = hiddenIndex; i < outIndex; i++) {
      double sum = thresholds[i];

      for (j = 0; j < inputCount; j++) {
        sum += fire[j] * matrix[inx++];
      }
      fire[i] = threshold(sum);
    }

    // hidden layer

    double result[] = new double[outputCount];

    for (i = outIndex; i < neuronCount; i++) {
      double sum = thresholds[i];

      for (j = hiddenIndex; j < outIndex; j++) {
        sum += fire[j] * matrix[inx++];
      }
      fire[i] = threshold(sum);
      result[i-outIndex] = fire[i];
    }

    return result;
  }


  /**
   * Calculate the error for the recognition just done.
   *
   * @param ideal What the output neurons should have yielded.
   */
  public void calcError(double ideal[]) {
    int i, j;
    final int hiddenIndex = inputCount;
    final int outputIndex = inputCount + hiddenCount;

    // clear hidden layer errors
    for (i = inputCount; i < neuronCount; i++) {
      error[i] = 0;
    }

    // layer errors and deltas for output layer
    for (i = outputIndex; i < neuronCount; i++) {
      error[i] = ideal[i - outputIndex] - fire[i];
      globalError += error[i] * error[i];
      errorDelta[i] = error[i] * fire[i] * (1 - fire[i]);
    }

    // hidden layer errors
    int winx = inputCount * hiddenCount;

    for (i = outputIndex; i < neuronCount; i++) {
      for (j = hiddenIndex; j < outputIndex; j++) {
        accMatrixDelta[winx] += errorDelta[i] * fire[j];
        error[j] += matrix[winx] * errorDelta[i];
        winx++;
      }
      accThresholdDelta[i] += errorDelta[i];
    }

    // hidden layer deltas
    for (i = hiddenIndex; i < outputIndex; i++) {
      errorDelta[i] = error[i] * fire[i] * (1 - fire[i]);
    }

    // input layer errors
    winx = 0;  // offset into weight array
    for (i = hiddenIndex; i < outputIndex; i++) {
      for (j = 0; j < hiddenIndex; j++) {
        accMatrixDelta[winx] += errorDelta[i] * fire[j];
        error[j] += matrix[winx] * errorDelta[i];
        winx++;
      }
      accThresholdDelta[i] += errorDelta[i];
    }
  }

  /**
   * Modify the weight matrix and thresholds based on the last call to
   * calcError.
   */
  public void learn() {
    int i;

    // process the matrix
    for (i = 0; i < matrix.length; i++) {
      matrixDelta[i] = (learnRate * accMatrixDelta[i]) + (momentum * matrixDelta[i]);
      matrix[i] += matrixDelta[i];
      accMatrixDelta[i] = 0;
    }

    // process the thresholds
    for (i = inputCount; i < neuronCount; i++) {
      thresholdDelta[i] = learnRate * accThresholdDelta[i] + (momentum * thresholdDelta[i]);
      thresholds[i] += thresholdDelta[i];
      accThresholdDelta[i] = 0;
    }
  }

  /**
   * Reset the weight matrix and the thresholds.
   */
  public void reset() {
    int i;

    for (i = 0; i < neuronCount; i++) {
      thresholds[i] = 0.5 - (Math.random());
      thresholdDelta[i] = 0;
      accThresholdDelta[i] = 0;
    }
    for (i = 0; i < matrix.length; i++) {
      matrix[i] = 0.5 - (Math.random());
      matrixDelta[i] = 0;
      accMatrixDelta[i] = 0;
    }
  }

  /**
   * Convert to an array. This is used with some training algorithms
   * that require that the "memory" of the neuron(the weight and threshold
   * values) be expressed as a linear array.
   *
   * @return The memory of the neuron.
   */
  public double []toArray()
  {
    double result[] = new double[matrix.length+thresholds.length];
    for (int i=0;i<matrix.length;i++)
      result[i] = matrix[i];
    for (int i=0;i<thresholds.length;i++)
      result[matrix.length+i] = thresholds[i];
    return result;
  }

  /**
   * Use an array to populate the memory of the neural network.
   *
   * @param array An array of doubles.
   */
  public void fromArray(double array[])
  {
    for (int i=0;i<matrix.length;i++)
      matrix[i] = array[i];
    for (int i=0;i<thresholds.length;i++)
      thresholds[i] = array[matrix.length+i];
  }

  /**
   * Get the number of input neurons.
   *
   * @return The number of input neurons.
   */
  public int getInputCount()
  {
    return inputCount;
  }

  /**
   * Get the number of output neurons.
   *
   * @return The number of output neurons.
   */
  public int getOutputCount()
  {
    return outputCount;
  }

  /**
   * Get the number of hidden neurons.
   *
   * @return The number of hidden neurons.
   */
  public int getHiddenCount()
  {
    return hiddenCount;
  }

  /**
   * Get the learning rate.
   *
   * @return The learning rate.
   */
  public double getLearnRate()
  {
    return learnRate;
  }

  /**
   * Get the momentum.
   *
   * @return The momentum.
   */
  public double getMomentum()
  {
    return momentum;
  }
}

?? 快捷鍵說(shuō)明

復(fù)制代碼 Ctrl + C
搜索代碼 Ctrl + F
全屏模式 F11
切換主題 Ctrl + Shift + D
顯示快捷鍵 ?
增大字號(hào) Ctrl + =
減小字號(hào) Ctrl + -
亚洲欧美第一页_禁久久精品乱码_粉嫩av一区二区三区免费野_久草精品视频
精品伊人久久久久7777人| 亚洲成a人片在线观看中文| 91久久精品一区二区三区| 理论电影国产精品| 亚洲欧美一区二区三区极速播放 | 在线观看日韩电影| 国产自产视频一区二区三区| 亚洲成人午夜电影| 国产精品乱人伦| 中文字幕成人网| 欧美日韩成人综合天天影院| 成人高清免费观看| 狠狠色狠狠色综合系列| 日本在线不卡一区| 亚洲六月丁香色婷婷综合久久| 精品久久久久久最新网址| 欧美性感一类影片在线播放| 成人动漫中文字幕| 国产精品一二三区| 免费成人av在线播放| 亚洲午夜国产一区99re久久| 国产精品美女久久久久久久久久久 | 久久精品国产77777蜜臀| 亚洲精品日韩综合观看成人91| 久久久久久久久久看片| 日韩欧美一二区| 欧美日韩国产成人在线91| 在线视频一区二区三区| 91香蕉视频污在线| 成人18精品视频| 国产精品一区二区不卡| 麻豆精品国产91久久久久久| 日韩精彩视频在线观看| 日韩精品三区四区| 亚洲国产精品一区二区久久| 亚洲欧美激情插| 亚洲欧洲日韩综合一区二区| 国产精品久久毛片av大全日韩| 国产日产欧美精品一区二区三区| 国产肉丝袜一区二区| 国产喷白浆一区二区三区| 久久久久久久久久久久电影| 精品国产免费一区二区三区香蕉| 亚洲精品一区在线观看| wwwwww.欧美系列| 久久久国际精品| 久久精品一区二区三区不卡| 国产欧美精品一区二区色综合| 国产午夜精品美女毛片视频| 国产欧美一区二区三区在线老狼| 久久精品水蜜桃av综合天堂| 国产日韩欧美精品电影三级在线| 国产欧美综合在线观看第十页| 中文字幕免费观看一区| 国产精品久久毛片| 亚洲乱码一区二区三区在线观看| 亚洲午夜免费电影| 99久久综合99久久综合网站| 福利电影一区二区| 一本大道久久a久久综合| 欧美三级韩国三级日本一级| 日韩一区二区三区电影在线观看| 精品对白一区国产伦| 国产视频911| 亚洲久草在线视频| 视频一区二区欧美| 国产麻豆精品久久一二三| 99视频有精品| 欧美美女一区二区| 久久久久国产精品厨房| 亚洲码国产岛国毛片在线| 天堂一区二区在线| 国产在线国偷精品免费看| jvid福利写真一区二区三区| 精品视频1区2区3区| 久久日韩精品一区二区五区| 亚洲人成在线观看一区二区| 日韩精品亚洲专区| 成人激情视频网站| 91精品国产综合久久久久久久| 久久久青草青青国产亚洲免观| 亚洲精品成人天堂一二三| 免费人成黄页网站在线一区二区| 成人永久看片免费视频天堂| 欧美男人的天堂一二区| 久久久电影一区二区三区| 亚洲小说春色综合另类电影| 黄一区二区三区| 欧美亚洲日本一区| 久久久精品综合| 亚洲va国产天堂va久久en| 粉嫩一区二区三区性色av| 91精品婷婷国产综合久久竹菊| 久久久国产精华| 亚洲成人777| 99re亚洲国产精品| 久久久久久99久久久精品网站| 亚洲成a人片综合在线| 北条麻妃一区二区三区| 日韩女同互慰一区二区| 亚洲国产欧美日韩另类综合 | 久久久久久麻豆| 秋霞影院一区二区| 色综合咪咪久久| 中文字幕国产一区| 精品一区二区三区视频在线观看 | 亚洲精品成人天堂一二三| 国产精品白丝jk黑袜喷水| 欧美日韩另类国产亚洲欧美一级| 国产精品国产自产拍高清av王其 | 欧美午夜精品一区二区蜜桃 | 国产午夜三级一区二区三| 视频一区二区不卡| 欧美性生活影院| 亚洲免费观看高清完整| 成人精品亚洲人成在线| 久久精子c满五个校花| 久久国产尿小便嘘嘘尿| 欧美精品一二三| 亚洲一区二区黄色| 色综合久久综合| 亚洲视频狠狠干| a4yy欧美一区二区三区| 久久久久国色av免费看影院| 精品写真视频在线观看| 精品国产百合女同互慰| 久久国产精品色婷婷| 欧美一区二区三区四区在线观看| 亚洲成人先锋电影| 欧美日韩亚洲综合一区二区三区| 亚洲影院久久精品| 欧美亚洲丝袜传媒另类| 无码av中文一区二区三区桃花岛| 欧美在线免费视屏| 亚洲国产精品一区二区www| 欧美亚洲动漫精品| 婷婷夜色潮精品综合在线| 欧美日韩一区高清| 日韩av中文字幕一区二区三区| 91.成人天堂一区| 日本不卡在线视频| 亚洲精品一区二区在线观看| 国产麻豆精品95视频| 欧美激情一区在线观看| 成人av片在线观看| 亚洲精品免费在线观看| 91网站在线观看视频| 樱花影视一区二区| 欧美另类videos死尸| 蜜桃视频在线一区| 久久综合色之久久综合| 国产大陆a不卡| 亚洲视频精选在线| 欧美日本在线观看| 精品一二三四在线| 国产精品久久久久国产精品日日| 一本在线高清不卡dvd| 五月天欧美精品| 欧美videossexotv100| 成人一级视频在线观看| 亚洲特黄一级片| 欧美精品一二三四| 国产成人三级在线观看| 亚洲欧美偷拍另类a∨色屁股| 欧美调教femdomvk| 国产一区二区网址| 亚洲人成小说网站色在线| 欧美日韩国产经典色站一区二区三区| 日韩av电影天堂| 欧美激情在线一区二区| 在线国产亚洲欧美| 精品在线播放免费| 亚洲黄网站在线观看| 制服丝袜成人动漫| 国产成人免费在线视频| 亚洲一级二级三级| 久久亚洲一级片| 欧美在线视频日韩| 国产福利电影一区二区三区| 一区二区三区高清| 中文字幕一区二| 日韩精品资源二区在线| 色综合天天天天做夜夜夜夜做| 亚洲电影在线免费观看| 欧美激情在线一区二区三区| 欧美日韩国产一区二区三区地区| 国产麻豆精品在线| 日韩国产在线观看一区| 国产精品丝袜黑色高跟| 日韩一级欧美一级| 色先锋久久av资源部| 国产一区二区免费看| 亚洲va韩国va欧美va| 成人欧美一区二区三区白人| 日韩欧美激情四射| 欧美视频一二三区| 白白色亚洲国产精品| 国产一区二区三区免费看| 水野朝阳av一区二区三区| 国产精品福利在线播放|