📄 nmatreg.m
字号:
%
% Linear neural network with matrix inputs.
%
% In this model we take sums of u'Xv and iteratively optimize parameters of
% the sums. For paper(s) see pd.hacker.lt
%
% Model created and implemented by Povilas Daniu餴s, paralax@hacker.lt,
% 2008
%
% usage nMatReg(X,y,M,N,lambda)
%
% where X is an input matrix data structure (i.e. X(i).mat would be a i-th
% element
% y - is corresponding target values
% M - iterations
% N - number of regressors
% labda - regularization constant
function [u,v]=nMatReg(X,y,M,N,lambda)
[rows,cols] = size(X(1).mat);
for i=1:N
u(:,i) = rand(rows,1);
v(:,i) = rand(cols,1);
end
for i=1:M % iterations
for k=1:N % order
% find u(k)
A = zeros(rows,rows);
z = zeros(rows,cols);
for j=1:length(y)
A = A + X(j).mat * v(:,k) * v(:,k)' * X(j).mat';
akx = 0;
for m=1:N
if (m ~= k)
akx = akx + v(:,m)'*X(j).mat'*u(:,m);
end
end
z = z + (y(j) - akx) * X(j).mat;
end
u(:,k) = inv(A + lambda*eye(rows)) * z * v(:,k);
% find v(k)
A = zeros(cols,cols);
z = zeros(cols,rows);
for j=1:length(y)
A = A + X(j).mat' * u(:,k) * u(:,k)' * X(j).mat;
bkx = 0;
for m=1:N
if (m ~= k)
bkx = bkx + u(:,m)'*X(j).mat*v(:,m);
end
end
z = z + (y(j) - bkx) * X(j).mat';
end
v(:,k) = inv(A + lambda*eye(cols)) * z * u(:,k);
% test SSE
sse = 0;
for jj=1:length(y)
out = 0;
for kk=1:N
out = out + u(:,kk)'*X(jj).mat*v(:,kk);
end
sse = sse + (y(jj) - out).^2;
end
fprintf('Iteration %d, regressor %d, SSE = %f \n',i,k,sse);
end
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -