📄 mnn_device.m
字号:
%~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
%
% mNN_device(n,beta,msize) - create structure to work with.
%
% Parameters: n - number of neurones
% beta - initial weights from [-beta,beta]
% msize - rows and cols of input matrix
% alpha - inertia factor (if using gradient descent)
% eta - learning rate (also if using gradient descent)
% epsilon - stopping criteria (if error < epsilon - exit)
% epsilon1 - if weight deltas < epsilon1 then exit
% earlyStop - if error increases more thanEarlyStop iterations then stop
%
% Author: Povilas Daniu餴s, paralax@hacker.lt
% http://ai.hacker.lt - lithuanian site about Artificial Intelligence.
%
% TODO: weighted MSE minimization, maximal likelihood method, multiple
% activation function support.
% ----------------------------------------------------------------------
function f = mNN_device(n,msize,alpha,eta,epsilon,epsilon1,earlyStop)
ro = msize(1);
co = msize(2);
for i=1:n+1
left(i).w = (2*rand(1, ro) - 1);
right(i).w = (2*rand(co, 1) - 1);
d_left(i,:) = zeros(1,ro); %derivatives
d_right(i,:) = zeros(co,1);
dleft(i,:) = zeros(1,ro); %deltas
dright(i,:) = zeros(co,1);
end
f.regressors = n+1;
f.left = left;
f.right = right;
f.d_left = d_left;
f.d_right = d_right;
f.dleft = dleft;
f.dright = dright;
f.d_b = zeros(1,n+1);
f.weights = 2*rand(1,n+1) - 1;
f.dweights = zeros(1,n+1);
f.d = zeros(1,n+1);
f.bias = (2*rand(1,n+1) - 1);
f.dbias = zeros(1,n+1);
f.numparams = n+1 + (n+1)*(ro + co + 1);
f.alpha = alpha;
f.eta = eta;
f.epsilon = epsilon;
f.earlyStop = earlyStop;
f.epsilon1 = epsilon1;
f.name = 'Neural network with matrix inputs';
% -------------------------------------------------------------------------
% -
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -