?? nnmodel.asv
字號:
else
nois = noilev*randn(size(xtr));
end
xtrnois = xtr+nois;
for i = 1:mxtr
xtrunc = 0*xtrnois; % All training input variables but one
xtrunc(i,:) = xtrnois(i,:); % are set to 0
% Training of the network. The monitoring set is used to stop the training
[w1n,w2n] = levmarq(topo,w1inref,w2inref,xtrnois,ytr,xm,ym,epochs,[]);
% Estimation of training responses using only one input in the model
[ytrunc1,ytrunc2] = lmeval(topo,w1n,w2n,xtrunc);
% Inverse-scaling of training responses estimated using one input only
ytrunchat = invrange(ytrunc2,0.2,0.8,tableout);
concyhtr2(i,:) = ytrunchat;
end
% The replicate y-values predicted with partial models are accumulated
conctrx = [conctrx;xtr']; % Matrix of replicate training x-values
concytrhat = [concytrhat;concyhtr2']; % Matrix of replicate training y-values
end
% For each input variable, an ANOVA-LOF is performed on the replicate y-values to
% check if they fit a linear regression model
for i = 1:mxtr
[ylhat,F,Ftab,lof,b0,b1] = linfit(conctrx(:,i),concytrhat(:,i));
FF(i) = F; % The calculated F-value for each variable
loff(i) = lof; % The ratio F/Fcritical for each variable
end
% Kolmogorov-Smirnov test to check that for each sample, replicate
% predicted values are normally distributed.
resultks = kolnorm(concytrhat,nxtr,32,0.05);
if any(resultks)
disp(' ANOVA for lack-of-fit may not be reliable')
disp(' Some replicate predicted values are not normally distributed')
disp(' ')
disp(' Percentage of samples not normally distributed for each variable:')
disp(' -----------------------------------------------------------------')
disp(resultks)
men3 = menu('ANOVA-LOF not reliable',...
'Continue');
end
bar(FF);
nax = axis;hold;clc;
line([nax(1) nax(2)],[Ftab Ftab],'linestyle','--','color','r');
xlabel('Variable');ylabel('ANOVA F-value');
set(gcf,'Position',setfig);
men3 = menu('ANOVA for lack-of-fit',...
'1. Print',...
'2. Next plot');
if men3 == 1
print
end;clc;close;close
elseif men2 == 5
continue = 0;
end
end
end
if men1 == 5 % Partial models (hidden nodes)
clc;close
for i = 1:mdef
rhn = corrplot(ytrain,partyhh(i,:)',1); % Correlation plot
xlabel('Observed values - Training set')
rensy = sprintf('Predicted values when hidden node %2.0f is removed',i);
ylabel(rensy)
set(gcf,'Position',setfig);
men2 = menu('Partial model - Training',...
'1. Print',...
'2. Next plot');
if men2 == 1
print
end;close
end
end
if men1 == 6 % Projection on hidden nodes
clc;close
continue = 1;
while continue
clc;close
men2 = menu('Projection on hidden nodes',...
'1. Training set only',...
'2. Training + monitoring set',...
'3. Training + monitoring + test set');
men3 = menu('Display',...
'1. Points only',...
'2. Show sample indices');
node1 = input('Which node on X-axis ? ');
node2 = input('Which node on Y-axis ? ');
plot(ytr1(node1,:),ytr1(node2,:),'o')
hold;clc
rensx = sprintf('Outputs of hidden node %2.0f (%3.0f%%)',[node1 varytrunch(node1)]);
rensy = sprintf('Outputs of hidden node %2.0f (%3.0f%%)',[node2 varytrunch(node2)]);
xlabel(rensx);ylabel(rensy);
title('o: Training set')
if men2 ~= 1
plot(ymon1(node1,:),ymon1(node2,:),'c*');
title('o: Training set * : Monitoring set')
if men2 == 3
if testflag
plot(ytes1(node1,:),ytes1(node2,:),'r+');
title('o: Training set * : Monitoring set +: Test set');
else
title('o: Training set * : Monitoring set No test set');
end
end
end
if men3 == 2
for i = 1:nxtr
intr = int2str(i);
textr = ['t' intr];
text(ytr1(node1,i),ytr1(node2,i),textr);
end
if men2 ~= 1
for i = 1:nxm
inm = int2str(i);
texm = ['m' inm];
text(ymon1(node1,i),ymon1(node2,i),texm);
end
if men2 == 3
if testflag
for i = 1:nxte
inte = int2str(i);
texte = ['n' inte];
text(ytes1(node1,i),ytes1(node2,i),texte);
end
end
end
end
end
set(gcf,'Position',setfig);
men4 = menu('Projection on hidden nodes',...
'1. Print',...
'2. Another plot',...
'3. Back to main menu');
if men4 == 1
print
elseif men4 ==3
continue = 0;
end;close
end
end
if men1 == 7 % Calculation of deviations from linearity
clc;close
continue = 1;
while continue
men2 = menu('Deviations from linearity',...
'1. Training set',...
'2. Monitoring set',...
'3. Test set',...
'4. Back to main menu');
if men2 == 1 % Training results
for i = 1:mdef
maxintr = 2*max(abs(intr1(i,:)));
axinp = [-maxintr:0.1:maxintr];
projmaxinp = pmntanh(axinp);
clc;close
devlin(i,:) = (ytr1(i,:)-intr1(i,:))./intr1(i,:);
meandev = round(100*mean(abs(devlin(i,:))));
plot(axinp,axinp,'r',axinp,projmaxinp)
hold;clc;
plot(intr1(i,:),ytr1(i,:),'co')
renstit = sprintf('Hidden node %2.0f - magnitude %3.0f%% - Mean deviation: %3.0f%%',[i varytrunch(i) meandev]);
title(renstit)
ylabel('Output')
xlabel('Input - Training set')
set(gcf,'Position',setfig);
men3 = menu('Deviations from linearity',...
'1. Print',...
'2. Next plot');
if men3 == 1
print
end;close
end
clear devlin
elseif men2 == 2 % Monitoring results
for i = 1:mdef
maxintr = 2*max(abs(inmon1(i,:)));
axinp = [-maxintr:0.1:maxintr];
projmaxinp = pmntanh(axinp);
clc;close
devlin(i,:) = (ymon1(i,:)-inmon1(i,:))./inmon1(i,:);
meandev = round(100*mean(abs(devlin(i,:))));
plot(axinp,axinp,'r',axinp,projmaxinp)
hold;clc;
plot(inmon1(i,:),ymon1(i,:),'co')
renstit = sprintf('Hidden node %2.0f - magnitude %3.0f%% - Mean deviation: %3.0f%%',[i varytrunch(i) meandev]);
title(renstit)
ylabel('Output')
xlabel('Input - Monitoring set')
set(gcf,'Position',setfig);
men3 = menu('Deviations from linearity',...
'1. Print',...
'2. Next plot');
if men3 == 1
print
end;close
end
clear devlin
elseif men2 == 3 % Test results
if testflag
for i = 1:mdef
maxintr = 2*max(abs(intes1(i,:)));
axinp = [-maxintr:0.1:maxintr];
projmaxinp = pmntanh(axinp);
clc;close
devlin(i,:) = (ytes1(i,:)-intes1(i,:))./intes1(i,:);
meandev = round(100*mean(abs(devlin(i,:))));
plot(axinp,axinp,'r',axinp,projmaxinp)
hold;clc;
plot(intes1(i,:),ytes1(i,:),'co')
renstit = sprintf('Hidden node %2.0f - magnitude %3.0f%% - Mean deviation: %3.0f%%',[i varytrunch(i) meandev]);
title(renstit)
ylabel('Output')
xlabel('Input - Test set')
set(gcf,'Position',setfig);
men3 = menu('Deviations from linearity',...
'1. Print',...
'2. Next plot');
if men3 == 1
print
end;close
end
else
men3 = menu('No test set was provided',...
'Back to previous menu');
end
clear devlin
elseif men2 == 4
continue = 0;
end
end
end
if men1 == 8 % Summary results
clc;close
disp(' Optimal number of epochs for each trial:')
disp(' ----------------------------------------')
disp(indisp)
disp(' ')
disp(' ')
disp(' Seeds:')
disp(' ------')
disp(rseed')
disp(' ')
disp(' ')
disp(' Seed for the solution retained:')
disp(' -------------------------------')
disp(rseed(indref))
disp(' ')
disp(' ')
disp(' RMSEC - RMSEM:')
disp(' ----------------')
disp([rmstra' rmsmon'])
disp(' ')
disp(' ')
if trials ~= 1
disp(' Median of RMSEM:')
disp(' ----------------')
disp(medrmsmon)
disp(' ')
disp(' ')
disp(' Average RSMSEC - Average RMSEM:')
disp(' ---------------------------------')
disp([mean(rmstra) mean(rmsmon)])
disp(' ')
disp(' ')
disp(' STD(RMSEC) - STD(RMSEM):')
disp(' --------------------------')
disp([std(rmstra) std(rmsmon)])
disp(' ')
disp(' ')
if testflag
disp(' RMSEP')
disp(' -----')
disp(rmstest')
disp(' ')
disp(' ')
disp(' Average RMSEP:')
disp(' --------------')
disp(mean(rmstest))
disp(' ')
disp(' ')
disp(' STD(RMSEP):')
disp(' -----------')
disp(std(rmstest))
disp(' ')
disp(' ')
end
end
[sensort,order] = sort(varytrunc); % Sensitivities ranked in ascending order
disp(' Variables ranked in order of increasing sensitivity:')
disp(' ----------------------------------------------------')
disp(order)
end
if men1 == 9 % Draw the neural network
clc;close
drawnn(topo,w1f,w2f);
set(gcf,'Position',setfig);
title('Topology of the neural network');
xlabel('Nodes with vertical bars indicate linear transfer functions')
men2 = menu('NN structure',...
'1. Print',...
'2. Back to main menu');
if men2 == 1
print
end;close
end
if men1 == 10 % End
contflag = 0;
clc;close
end
end
w1f = w1ref;
w2f = w2ref;
clear concymhat concyteshat partyhh conctrx concytrhat concyhtr concyhtr2
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -