?? bp.m
字號:
clear;
clc;
AnalysisDataName='S1132MA';
%加載訓練的數據
load TrainData.txt;
[TrainData_Row,TrainData_Column]=size(TrainData);
P=TrainData(:,1:TrainData_Column-1);
T=TrainData(:,TrainData_Column);
P=P';
T=T';
%對數據進行歸一化處理
[AnalysisData_Assistant_Min_Max,AnalysisData_Min_Max]=WYH_Get_Min_Max(AnalysisDataName);
P=WYH_Unit(P,AnalysisData_Assistant_Min_Max);
T=WYH_Unit(T,AnalysisData_Min_Max);
%定義BP神經網絡
%----------------------------------------------------------------------------------------
%|************************構造三層(包括輸入層)BP神經網絡模型**************************|
%----------------------------------------------------------------------------------------
%1.生成BP網絡
R=size(AnalysisData_Assistant_Min_Max,1);%輸入層
S1=6;%隱層神經元個數
S2=1;%輸出層神經元個數
PR=AnalysisData_Assistant_Min_Max;
net=newff(PR,[S1,S2],{'tansig','purelin'});
%3.訓練和自適應調整
net.adaptFcn='trains';
net.trainFcn='trainlm';
net.trainParam.epochs=1000;
net.trainParam.goal=1e-2;
%net.trainParam.mu=0.001;
%net.trainParam.mu_dec=0.1;
%net.trainParam.mu_inc=10;
%net.trainParam.mu_max=1e10;
%net.trainParam.max_fail=5;
%net.trainParam.mem_reduc=1;
%net.trainParam.min_grad=1e-10;
%net.trainParam.show=25;
%net.trainParam.time=inf;
%4.性能評價函數
net.performFcn='mse';
Begin_Input_W=net.IW{1,1};
save SaveResult\Begin_Input_W.txt Begin_Input_W -ascii;
Begin_LayerOne_Bias=net.b{1};
save SaveResult\Begin_LayerOne_Bias.txt Begin_LayerOne_Bias -ascii;
Begin_LayerOneToTwo_W=net.LW{2,1};
save SaveResult\Begin_LayerOneToTwo_W.txt Begin_LayerOneToTwo_W -ascii;
Begin_LayerTwo_Bias=net.b{2};
save SaveResult\Begin_LayerTwo_Bias.txt Begin_LayerTwo_Bias -ascii;
%-------------------------------------------------------------------------
%-*******************************訓練網絡模型*****************************-
%--------------------------------------------------------------------------
[net,TR]=train(net,P,T);
%--------------------------------------------------------------------------
%-*******************************仿真網絡模型*****************************-
%--------------------------------------------------------------------------
figure(2);
Model_T=sim(net,P);
%反歸一化
Model_T=WYH_ReverseUnit(Model_T,AnalysisData_Min_Max);
Theory_T=WYH_ReverseUnit(T,AnalysisData_Min_Max);
%--------------------------------------------------------------------------
%-*******************************擬和數據作圖顯示*************************-
%--------------------------------------------------------------------------
%subplot(2,1,1);
xLabel=1:size(Theory_T,2);
plot(xLabel,Theory_T,'-K',xLabel,Model_T,':R');%黑色顯示是理論值,紅色顯示是模型計算值
title(AnalysisDataName);
%--------------------------------------------------------------------------
%-*******************************神經網絡的保存***************************-
%--------------------------------------------------------------------------
save SaveResult\net.dat net;
Input_W=net.IW{1,1};
save SaveResult\Input_W.txt Input_W -ascii;
LayerOne_Bias=net.b{1};
save SaveResult\LayerOne_Bias.txt LayerOne_Bias -ascii;
LayerOneToTwo_W=net.LW{2,1};
save SaveResult\LayerOneToTwo_W.txt LayerOneToTwo_W -ascii;
LayerTwo_Bias=net.b{2};
save SaveResult\LayerTwo_Bias.txt LayerTwo_Bias -ascii;
[Extend_Theory_T,Extend_Model_T]=ExtraPolate(net,AnalysisDataName);
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -