📄 cm_mod1_new.m
字号:
function [error, yhat, pderiv,neuron1] = cm_mod1(beta);
global data1 squasher maxx minx maxy miny malags neuronxarg_inf cthres_inf;
y = data1(:,1);
x = data1(:,2:end);
[nx, cx] = size(x);
% beta(1:2*cx+3) = abs(beta(1:2*cx+3));
if squasher == 1,
yy = (y - miny)/(maxy-miny);
for i = 1:cx, xx(:,i) = (x(:,i)-minx(i))/(maxx(i)-minx(i));
end
else yy = y; xx = x;
end
if squasher == 1,
cxarg_inf = (cthres_inf-minx(neuronxarg)) / (maxx(neuronxarg_inf)-minx(neuronxarg_inf));
else cxarg_inf = cthres_inf;
end
ny = length(yy);
yhat1 = yy;
xx1 = xx * abs(beta(1:cx))' + ones(ny,1) * beta(cx+1);
ehat(1:malags,1) = zeros(malags,1);
neuron3(1:malags,1) = zeros(malags,1);
for i = malags+1:ny,
neuronx = abs(beta(cx+2))* (xx(i-1,neuronxarg_inf)-cxarg_inf);
neuron1(i,:) = 2 ./ (1 + exp(-2 * neuronx)) -1;
EXX = ehat(i-malags:i-1,:);
yhat1(i,:) = xx1(i,:) + neuron1(i,:) * abs(beta(cx+3)) * xx1(i,neuronxarg_inf) + beta(cx + 4:cx+malags+3) * EXX;
ehat(i,:) = yy(i,:) - yhat1(i,:);
end;
nparm = cx + malags+3;
error = yy - yhat1;
error = mean(error .^2);
sigma = error/ nparm;
T = length(yhat1);
loglik = -.5 * T * log(2 * pi) - .5 * T * log(sigma) - .5 * error /sigma;
error = -loglik;
if squasher == 1,
yhat = yhat1 * (maxy-miny) + miny;
else yhat = yhat1;
end;
for i = 1:cx,
for j = 2:ny,
pderiv(j,i) = abs(beta(i));
end;
end;
for j = 2:ny,
pderiv(j,neuronxarg_inf) = abs(beta(neuronxarg_inf)) + neuron1(j) * abs(beta(cx+3));
end;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -