📄 classnetfun.m
字号:
function [LIK,sse3,A3, PDERIVNET] = classnetfun(beta);
global P T nneuron1;
[rt ct] = size(T);
rt = min([rt ct]);
[rp cp] = size(P);
x = P;
for j = 1:nneuron1,
nn1(:,j) = x * beta(1+(j-1)*cp:j*cp)' + beta(nneuron1*cp+j);
end;
for j = 1:nneuron1,
nn2(:,j) = 1 ./(1 + exp(-nn1(:,1)));
end
yhat = nn2(:,1:nneuron1-1) * beta(nneuron1*cp+nneuron1+1:nneuron1*cp+2*nneuron1-1)' ...
+ nn2(:,nneuron1) .* ...
(1-sum(beta(nneuron1*cp+nneuron1+1:nneuron1*cp+2*nneuron1-1)));
T = T;
TT = T;
y = T;
A3 = yhat;
lik = y .* log(yhat) + (1-y) .* log(1-yhat);
% LIK = log(lik);
LIK = -sum(lik);
sse3 = (TT-yhat)' * (TT-yhat);
% LIK = sse3;
g = [];
xmean = mean(P);
BETA = [beta(nneuron1*cp+nneuron1+1:nneuron1*cp+2*nneuron1-1) 1-sum(beta(nneuron1*cp+nneuron1+1:nneuron1*cp+2*nneuron1-1))];
GAMMA = beta(1:nneuron1*cp);
GAMMA1 = reshape(GAMMA,cp,nneuron1);
for j = 1:nneuron1,
nn1mean(:,j) = xmean * beta(1+(j-1)*cp:j*cp)' + beta(nneuron1*cp+j);
end;
for j = 1:nneuron1,
nn2mean(:,j) = 1 ./(1 + exp(-nn1mean(:,1)));
end
for i = 1:cp,
for j = 1:nneuron1,
junk(j,i) = BETA(j)* nn2mean(j) * (1-nn2mean(j)) * GAMMA1(i,j);
end
end
PDERIVNET = sum(junk);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -