?? 1.cpp
字號:
/**************************************************************************************
====================================================
Network: Backpropagation Network with Bias Terms and Momentum
====================================================
Application: Time-Series Forecasting
Prediction of the tendency of stock market
Author: 郭正琪
Date: 11.08.2005
Reference: D.E. Rumelhart, G.E. Hinton, R.J. Williams
Learning Internal Representations by Error Propagation
in:
D.E. Rumelhart, J.L. McClelland (Eds.)
Parallel Distributed Processing, Volume 1
MIT Press, Cambridge, MA, pp. 318-362, 1986
**************************************************************************************/
/*****************************************************************************************
D E C L A R A T I O N S //各參數量的聲明
****************************************************************************************/
#include <stdlib.h>
#include <stdio.h>
#include <math.h>
typedef int BOOL;
typedef int INT;
typedef double REAL;
#define FALSE 0
#define TRUE 1
#define NOT !
#define AND &&
#define OR ||
#define MIN_REAL -HUGE_VAL
#define MAX_REAL +HUGE_VAL
#define MIN(x,y) ((x)<(y) ? (x) : (y))
#define MAX(x,y) ((x)>(y) ? (x) : (y))
#define LO 0.1
#define HI 0.9
#define BIAS 1
#define sqr(x) ((x)*(x))
typedef struct { /* A LAYER OF A NET: */
INT Units; /* - number of units in this layer //神經元 */
REAL* Output; /* - output of ith unit //第i個神經元的輸出 */
REAL* Error; /* - error term of ith unit //第i個神經元的誤差 */
REAL** Weight; /* - connection weights to ith unit //權值 */
REAL** WeightSave; /* - saved weights for stopped training //閾值 */
REAL** dWeight; /* - last weight deltas for momentum //權值的增量 */
} LAYER;
typedef struct { /* A NET: */
LAYER** Layer; /* - layers of this net //網絡的所有層 */
LAYER* InputLayer; /* - input layer //輸入層 */
LAYER* OutputLayer; /* - output layer //輸出層 */
REAL Alpha; /* - momentum factor //動量因子 */
REAL Eta; /* - learning rate //學習率 */
REAL Gain; /* - gain of sigmoid function //S型函數的權重 */
REAL Error; /* - total net error //輸出誤差 */
} NET;
/*****************************************************************************************
R A N D O M S D R A W N F R O M D I S T R I B U T I O N S //隨機函數部分
****************************************************************************************/
void InitializeRandoms()
{
srand(4711);
}
INT RandomEqualINT(INT Low, INT High)
{
return rand() % (High-Low+1) + Low;
}
REAL RandomEqualREAL(REAL Low, REAL High)
{
return ((REAL) rand() / RAND_MAX) * (High-Low) + Low;
}
/****************************************************************************************
A P P L I C A T I O N - S P E C I F I C C O D E //特殊數據
***************************************************************************************/
#define NUM_LAYERS 3
#define N 10
#define M 1
INT Units[NUM_LAYERS] = {N, 20, M};
#define FIRST_YEAR 1700
#define NUM_YEARS 136
#define TRAIN_LWB (N)
#define TRAIN_UPB (69)
#define TRAIN_YEARS (TRAIN_UPB - TRAIN_LWB + 1)
#define TEST_LWB (70)
#define TEST_UPB (119)
#define TEST_YEARS (TEST_UPB - TEST_LWB + 1)
#define EVAL_LWB (120)
#define EVAL_UPB (NUM_YEARS - 1)
#define EVAL_YEARS (EVAL_UPB - EVAL_LWB + 1)
REAL Sunspots_[NUM_YEARS];
REAL Sunspots [NUM_YEARS] = {
0.421917808, 0.41369863 , 0.476712329, 0.460273973,
0.408219178, 0.378082192, 0.367123288, 0.356164384,
0.326027397, 0.37260274 , 0.350684932, 0.334246575,
0.284931507, 0.309589041, 0.260273973, 0.150684932,
0.139726027, 0.2 , 0.145205479, 0.104109589,
0.063013699, 0.093150685, 0.095890411, 0.043835616,
0.032876712, 0.046575342, 0.01369863 , 0 ,
0.060273973, 0.167123288, 0.115068493, 0.191780822,
0.282191781, 0.257534247, 0.180821918, 0.197260274,
0.257534247, 0.238356164, 0.232876712, 0.268493151,
0.260273973, 0.224657534, 0.208219178, 0.260273973,
0.309589041, 0.342465753, 0.309589041, 0.287671233,
0.263013699, 0.210958904, 0.290410959, 0.306849315,
0.347945205, 0.306849315, 0.301369863, 0.257534247,
0.284931507, 0.232876712, 0.268493151, 0.345205479,
0.457534247, 0.473972603, 0.479452055, 0.375342466,
0.419178082, 0.416438356, 0.375342466, 0.394520548,
0.443835616, 0.512328767, 0.443835616, 0.550684932,
0.594520548, 0.643835616, 0.657534247, 0.780821918,
0.802739726, 0.843835616, 0.810958904, 0.780821918,
0.709589041, 0.805479452, 0.797260274, 0.78630137 ,
0.802739726, 0.947945205, 0.890410959, 0.808219178,
0.723287671, 0.734246575, 0.739726027, 0.742465753,
0.764383562, 0.808219178, 0.931506849, 0.824657534,
0.753424658, 0.75890411 , 0.82739726 , 0.857534247,
0.821917808, 0.906849315, 0.887671233, 0.978082192,
0.915068493, 0.961643836, 1 , 0.936986301,
0.884931507, 0.882191781, 0.873972603, 0.789041096,
0.810958904, 0.821917808, 0.830136986, 0.84109589 ,
0.802739726, 0.819178082, 0.780821918, 0.747945205,
0.706849315, 0.717808219, 0.61369863 , 0.536986301,
0.583561644, 0.57260274 , 0.463013699, 0.471232877,
0.509589041, 0.531506849, 0.471232877, 0.465753425,
0.416438356, 0.432876712, 0.432876712, 0.457534247,
};
REAL Mean;
REAL TrainError;
REAL TrainErrorPredictingMean;
REAL TestError;
REAL TestErrorPredictingMean;
FILE* f;
void NormalizeSunspots() //規(guī)格化處理
{
INT Year;
REAL Min, Max;
Min = MAX_REAL;
Max = MIN_REAL;
for (Year=0; Year<NUM_YEARS; Year++)
{
Min = MIN(Min, Sunspots[Year]); //選出Sunspots[]中最小的數給Min
Max = MAX(Max, Sunspots[Year]); //選出Sunspots[]中最大的數給Max
}
Mean = 0;
for (Year=0; Year<NUM_YEARS; Year++)
{
Sunspots_[Year] = //規(guī)格化處理公式
Sunspots [Year] = ((Sunspots[Year]-Min) / (Max-Min)) * (HI-LO) + LO;
Mean += Sunspots[Year] / NUM_YEARS; //Mean的最后值為Sunspots[]的平均值,即期望輸出值
}
}
//神經網絡的初始化
void InitializeApplication(NET* Net)
{
INT Year, i;
REAL Out, Err;
Net->Alpha = 0.5;
Net->Eta = 0.05;
Net->Gain = 1;
NormalizeSunspots();
TrainErrorPredictingMean = 0;
for (Year=TRAIN_LWB; Year<=TRAIN_UPB; Year++)
{
for (i=0; i<M; i++)
{
Out = Sunspots[Year+i];
Err = Mean - Out;
TrainErrorPredictingMean += 0.5 * sqr(Err);
//TrainErrorPredictingMean反映了神經網絡期望輸出與計算輸出之間誤差大小
}
}
TestErrorPredictingMean = 0;
for (Year=TEST_LWB; Year<=TEST_UPB; Year++)
{
for (i=0; i<M; i++)
{
Out = Sunspots[Year+i];
Err = Mean - Out;
TestErrorPredictingMean += 0.5 * sqr(Err);
//TestErrorPredictingMean反映了神經網絡期望輸出與計算輸出之間誤差大小
}
}
f = fopen("BPN1.txt", "w");
}
//寫入文件結束,關閉文檔
void FinalizeApplication(NET* Net)
{
fclose(f);
}
//建立一張神經網絡
void GenerateNetwork(NET* Net)
{
INT l,i;
Net->Layer = (LAYER**) calloc(NUM_LAYERS, sizeof(LAYER*));
for (l=0; l<NUM_LAYERS; l++)
{
Net->Layer[l] = (LAYER*) malloc(sizeof(LAYER));
Net->Layer[l]->Units = Units[l];
Net->Layer[l]->Output = (REAL*) calloc(Units[l]+1, sizeof(REAL));
Net->Layer[l]->Error = (REAL*) calloc(Units[l]+1, sizeof(REAL));
Net->Layer[l]->Weight = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
Net->Layer[l]->WeightSave = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
Net->Layer[l]->dWeight = (REAL**) calloc(Units[l]+1, sizeof(REAL*));
Net->Layer[l]->Output[0] = BIAS; //初始化Output[0]為1
if (l != 0)
{
for (i=1; i<=Units[l]; i++)
{
Net->Layer[l]->Weight[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
Net->Layer[l]->WeightSave[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
Net->Layer[l]->dWeight[i] = (REAL*) calloc(Units[l-1]+1, sizeof(REAL));
}
}
}
Net->InputLayer = Net->Layer[0];
Net->OutputLayer = Net->Layer[NUM_LAYERS - 1];
Net->Alpha = 0.9;
Net->Eta = 0.25;
Net->Gain = 1;
}
//隨機生成權重-0.5~0.5
void RandomWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++)
{
for (i=1; i<=Net->Layer[l]->Units; i++)
{
for (j=0; j<=Net->Layer[l-1]->Units; j++)
{
Net->Layer[l]->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
}
}
}
}
/****************************************************************************************
S U P P O R T F O R S T O P P E D T R A I N I N G //臨界值的修改
*****************************************************************************************/
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -