?? updateparameters.asv
字號:
%~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
%
% updateParameters(network,ETA,ALPHA) -
%
% Parameters: network - neural network with matrix networks
% data - training data sample
% iterations - how many iterations to perform training
%
% Author: Povilas Daniu餴s, paralax@hacker.lt
% http://ai.hacker.lt - lithuanian site about Artificial Intelligence.
%
% TODO: weighted MSE minimization, maximal likelihood method, multiple
% activation function support.
% ----------------------------------------------------------------------
function f=updateParameters(network,ETA,ALPHA)
left_size = length(network.left(1).w);
right_size = length(network.right(1).w);
for j=1:network.regressors+1
network.weights(j) = network.weights(j) + ALPHA*network.dweights(j) - ETA*network.d(j);
network.dweights(j) = ALPHA*network.dweights(j) - ETA*network.d(j);
network.d(j) = 0;
if (j <= network.regressors) %internal weights
for k=1:left_size
network.left(j).w(k) = network.left(j).w(k) + ALPHA*network.dleft(j,k) - ETA*network.d_left(j,k);
network.dleft(j,k) = ALPHA*network.dleft(j,k) - ETA*network.d_left(j,k);
network.d_left(j,k) = 0;
end
for k=1:right_size
network.right(j).w(k) = network.right(j).w(k) + ALPHA*network.dright(j,k) - ETA*network.d_right(j,k);
network.dright(j,k) = -ETA*network.d_right(j,k);
network.d_right(j,k) = 0;
end
network.bias(j) = network.bias(j) + ALPHA*network.dbias(j,k) - ETA*network.d_b(j);
network.dbias(j) = ALPHA*network.dbias(j,k) - ETA*network.d_b(j);
network.d_b(j) = 0;
end
end
f=network;
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -