📄 classnet.m
字号:
function [DISCRIMRES, NETRES, LOGITRES, PROBITRES,GOMPITRES,...
PDERIVNET, PDERIVLOG, PDERIVPROB,PDERIVGOMPIT,...
PDERIVNETF, PDERIVLOGF, PDERIVPROBF,PDERIVGOMPITF,...
Ahatin, Ahatout, LIKELIHOOD, beta, beta1, beta2, beta3] =...
classnet(assetx,coldep,percent,errweight,limit,info,gendum,maxgen, helge, beta0init, beta1init, beta2init, beta3init);
global NEPOCH;
% Ouputs:
% [DISCRIMRES, NETRES, LOGITRES, PROBITRES,...
% PDERIVNET, PDERIVLOG, PDERIVPROB,betalp,...
% betanet , Ahatin, Ahatout] =...
% classnet(assetx,coldep,percent,errweight,limit,info,gendum,maxgen, helge);
% output: insample error percetn, out of sample error percent,
% DSC, NN, LOG, PROBIT, then...derivatives for net, log,prob, then
% betalp, coefficients and t stat for logit and probit models
% betanet coefficients for neural net
% Actual predicted probabilities for net, logit, probit: in and out
% Inputs:
% Input matrix,
% column of dep variable,
% percent of data for in sample,
% error weighting for false positive and false negatives
% limit for classification, usually .5
% info: neurons in layers, n1, n2, n3
% gendum:
% genetic algorith with gd (=1),ga off, gd on (=0); just ga, gd off (=2)
% maxgen: number of generations for ga
% helge: scaling function
global P T nlayer nneuron1 nneuron2 nneuron3;
nntwarn off;
warning off MATLAB:divideByZero
fun = 'classnetfun';
fun1 = 'logit';
fun2 = 'probit';
fun3 = 'gompit';
maxgen_probit = maxgen + 50;
maxgen_gompit = maxgen_probit + 50;
nlayer = info(1);
nneuron1 = info(2);
nneuron2 = info(3);
nneuron3 = info(4);
popsize = 100; pc = .9; pdes = 0; toler = .000001; elite = 1;
[rr cc] = size(assetx);
y = assetx(:,coldep);
if coldep == 1, x = assetx(:, 2:end,:);
else x = [assetx(:,1:coldep-1) assetx(:, coldep+1:end)];
end
[rp, cp] = size(x);
yxmat = [y x];
[nrow ncol] = size(x);
[nrowy ncoly] = size(y);
nrow1 = round(percent * nrow);
nrow11 = nrow1 + 1; [nrow12 nrow13] = size(x(1:nrow1,:));
yy = y(1:nrow1,:); xx = [x(1:nrow1,:)];
[nrow ncol] = size(x);
nrow1 = round(percent * nrow);
nrow11 = nrow1 + 1; [nrow12 nrow13] = size(x(1:nrow1,:));
yy = y(1:nrow1,:); xx = x(1:nrow1,:);
smin = .1;
smax = .9;
[rx, cx] = size(x);
[ry, cy] = size(y);
maxy = max(y);
miny = min(y);
maxx = max(x);
minx = min(x);
meany = mean(y);
sigy = std(y);
yz = detrend(y,0) ./ kron(ones(rx,1), sigy);
meanx = mean(x);
sigx = std(x);
xz = detrend(x,0) ./ kron(ones(rx,1),sigx);
for i = 1:cy,
ys(:,i) = y(:,i);
yss(:,i) = y(:,i);
end
for i = 1:cx,
xs(:,i) = hsquasher(x(:,i), smax, smin);
xss(:,i) = 1 ./(1+ exp(-(xz(:,i))));
end
if helge == 0, PN = x; TN = y;
elseif helge == 1, PN = xs; TN = ys;
elseif helge == 2, PN = xss; TN = yss;
else PP = x; TT = y;
for i = 1:cp, PN(:,i) = (PP(:,i)- minx(i)) ./ (maxx(i)-minx(i));
end
for i = 1:cy,
TN(:,i) = (TT(:,i)-miny(i))/(maxy(i)-miny(i));
end
end
global P T;
P = PN(1:nrow1,:);
T = TN(1:nrow1,:);
% Discriminant analysis
yy = y(1:nrow1,:); xx = x(1:nrow1,:);
sample1 = xx;
training = xx;
group = yy ;
group = yy>limit;
group = group + 1;
class1 = classify(sample1, training, group);
group = group-1;
class1 = class1-1;
test1 = (class1 - group);
err1 = test1>0;
err2 = test1<0;
err1p = sum(err1) / nrow1; err2p = sum(err2) / nrow1;
testsc1 = errweight * err1p + (1-errweight) * err2p;
discrimres_in = [err1p err2p testsc1];
% Neural network analysis
global P T;
P = PN(1:nrow1,:);
T = TN(1:nrow1,:);
[rp, cp] = size(P);
nparm = nneuron1 * cp + nneuron1 + (nneuron1-1);
nparm1 = cp + 1;
maxgen1 = maxgen + 25;;
nepoch = NEPOCH;
tp = [25, nepoch, .02, .01, 1.07, .7, .9, 1.04];
pm = 1/nparm; elite = 1;
pdes = 0; scale = .1;
if nargin > 9, beta0 = beta0init; else beta0 = randn(1, nparm) * .1; end;
if gendum >= 1,
beta = gen7f(fun,beta0,popsize,maxgen);
else beta = beta0
end
if gendum == 1,
options = optimset('Display','iter', 'MaxIter', nepoch, 'maxfuneval', nepoch);
[beta,fval,exit0,output0,grad0,HESSN] = fminunc(fun, beta, options);
[LIK3,sse3,A3,PDERIVNET] = feval(fun,beta);
elseif gendum > 1, [LIK3,sse3,A3,PDERIVNET] = feval(fun,beta);
else [beta,fval,exit0,output0,grad0,HESSN] = fminunc(fun, beta0, options);
[LIK3,sse3,A3,PDERIVNET] = feval(fun,beta);
end
% Calculate Error Percentages...
if helge == 0, A3n = A3;
elseif helge == 1, A3 = A3;
for i = 1:cy,
A3n(:,i) = helgeyx(A3(:,i), maxy(i), miny(i),smax,smin);
end
elseif helge == 2,
A3x = -log(1./A3- ones(size(A3)));
A3x = real(A3x); [junkr, junkc] = size(A3x);
A3x = kron(ones(junkr,1),meany) + A3x .* kron(ones(junkr,1),sigy);
A3n = A3x;
else
for i = 1:cy,
A3n(:,i) = A3(:,i) * (maxy(:,i)-miny(:,i)) + miny(:,i);
end
end
A3real = A3n;
A3 = A3>limit;
err3 = A3 - T;
err3a = err3>0;
err3b = err3<0;
err3ap = sum(err3a) / nrow;
err3bp = sum(err3b) / nrow;
err3p = errweight * err3ap + (1-errweight) * err3bp;
netres_in = [err3ap err3bp err3p];
% Calculate Partial Derivatives...
PP= x(1:nrow1,:); TT = y(1:nrow1,:);
delta = .01;
[rp, cp] = size(PP);
IIP = eye(cp) * delta;
% Calculation of partial derivatives...
for jj = 1:cp,
PPX = kron(ones(rp,1), IIP(jj,:));
PP0 = PP + PPX;
TT = T(1:nrow1,:);
PPZ = detrend(PP0,0);
for ii = 1:cp,
PPZ(:,ii) = PPZ(:,i) / sigx(ii);
end
for i = 1:cx,
PPN1(:,i) = hsquasher(PP0(:,i), smax, smin);
PPN2(:,i) = 1 ./(1+ exp(-(PPZ(:,i))));
end
if helge == 0, PPN = PP0;
elseif helge == 1, PPN = PPN1;
elseif helge == 2, PPN = PPN2;
else for i = 1:cp, PPN(:,i) = (PP0(:,i)- minx(i)) ./ (maxx(i)-minx(i));
end
end
clear global P T;
global P T;
P = PPN;
T = TT;
[junk1,junk2,A3der] = feval(fun,beta);
if helge == 0, A3dern = A3der;
elseif helge == 1, A3der = A3der;
for i = 1:cy,
A3dern(:,i) = helgeyx(A3der(:,i), maxy(i), miny(i),smax,smin);
end
elseif helge == 2,
A3derx = -log(1./A3der- ones(size(A3der)));
A3derx = real(A3derx); [junkr, junkc] = size(A3derx);
A3derx = kron(ones(junkr,1),meany) + A3derx .* kron(ones(junkr,1),sigy);
A3dern = A3derx;
else
for i = 1:cy,
A3dern(:,i) = A3der(:,i) * (maxy(:,i)-miny(:,i)) + miny(:,i);
end
end
PDERIVNETF(:,jj) = (A3dern-A3real) ./ delta;
end
% Logit analysis and derivative
clear global P T;
global P T;
P = x(1:nrow1,:);
T = y(1:nrow1,:);
[rp, cp] = size(P);
nparm1 = cp + 1;
if nargin > 9, beta10 = beta1init; else beta10 = randn(1, nparm1) * .1; end;
if gendum >= 1, beta1 = gen7f(fun1,beta10,popsize,maxgen);
else beta1 = beta10;
end
if gendum ==1,[beta1,fval1,exit1,output1,grad1,HESSL] = fminunc(fun1,beta1,options);
[LIKL,sse4,A4,PDERIVLOG] = feval(fun1,beta1);
elseif gendum > 1, [LIKL,sse4,A4,PDERIVLOG] = feval(fun1,beta1);
else [beta1,fval1,exit1,output1,grad1,HESSL] = fminunc(fun1,beta10,options);
[LIKL,sse4,A4,PDERIVLOG] = feval(fun1,beta1);
end
A4real = A4;
A4 = A4>limit;
err4 = A4 - T;
err4a = err4>0;
err4b = err4<0;
err4ap = sum(err4a) / nrow;
err4bp = sum(err4b) / nrow;
err4p = errweight * err4ap + (1-errweight) * err4bp;
logitres_in = [err4ap err4bp err4p];
for jj = 1:cp,
PP = x(1:nrow1,:); TT = y(1:nrow1,:);
PPX = kron(ones(rp,1), IIP(jj,:));
PP0 = PP + PPX;
PPN = PP0;
clear global P T;
global P T;
P = PPN;
T = TT;
[junk1,junk2,A4der] = feval(fun1,beta1);
PDERIVLOGF(:,jj) = (A4der-A4real) ./ delta;
end
% Probit analysis and derivative
clear global P T;
global P T;
P = x(1:nrow1,:);
T = y(1:nrow1,:);
if nargin > 9, beta20 = beta2init; else beta20 = randn(1,nparm1) * .1; end;
if gendum >= 1,
beta2 = gen7f(fun2,beta20,popsize,maxgen_probit);
else beta2 = beta20;
end
if gendum == 1,
[beta2,feval2,exit2,output2,grad2,HESSP] = fminunc(fun2,beta2,options);
[LIKP,sse5,A5,PDERIVPROB] = feval(fun2,beta2);
elseif gendum > 1, [LIKP,sse5,A5,PDERIVPROB] = feval(fun2,beta2);
else [beta2,feval2,exit2,output2,grad2,HESSP] = fminunc(fun2,beta20,options);
[LIKP,sse5,A5,PDERIVPROB] = feval(fun2,beta2);
end
A5real = A5;
A5 = A5>limit;
err5 = A5 - T;
err5a = err5>0;
err5b = err5<0;
err5ap = sum(err5a) / nrow;
err5bp = sum(err5b) / nrow;
err5p = errweight * err5ap + (1-errweight) * err5bp;
probitres_in = [err5ap err5bp err5p];
for jj = 1:cp,
PP = x(1:nrow1,:); TT= y(1:nrow1,:);
PPX = kron(ones(rp,1), IIP(jj,:));
PP0 = PP + PPX;
PPN = PP0;
clear global P T;
global P T;
P = PPN;
T = TT;
[junk1,junk2,A5der] = feval(fun2,beta2);
PDERIVPROBF(:,jj) = (A5der-A5real) ./ delta;
end
% Gompit analysis and derivative
clear global P T;
global P T;
P = x(1:nrow1,:);
T = y(1:nrow1,:);
if nargin > 9, beta30 = beta3init; else beta30 = randn(1,nparm1) * .1; end;
if gendum >= 1,
beta3 = gen7f(fun3,beta30,popsize,maxgen_gompit);
else beta3 = beta30;
end
if gendum == 1,
[beta3] = fminunc(fun3,beta3,options);
[LIKG,sse6,A6,PDERIVGOMPIT] = feval(fun3,beta3);
elseif gendum > 1, [LIKG,sse6,A6,PDERIVBOMPIT] = feval(fun3,beta3);
else [beta3] = fminunc(fun3,beta30,options); [LIKG,sse6,A6,PDERIVBOMPIT] = feval(fun3,beta3);
end
A6real = A6;
A6 = A6>limit;
err6 = A6 - T;
err6a = err6>0;
err6b = err6<0;
err6ag = sum(err6a) / nrow;
err6bg = sum(err6b) / nrow;
err6g = errweight * err6ag + (1-errweight) * err6bg;
gompitres_in = [err6ag err6bg err6g];
for jj = 1:cp,
PP = x(1:nrow1,:); TT= y(1:nrow1,:);
PPX = kron(ones(rp,1), IIP(jj,:));
PP0 = PP + PPX;
PPN = PP0;
clear global P T;
global P T;
P = PPN;
T = TT;
[junk1,junk2,A6der] = feval(fun3,beta3);
PDERIVGOMPITF(:,jj) = (A6der-A6real) ./ delta;
end
if percent < 1,
xout = [x(nrow11:nrow,:)];
yout = y(nrow11:nrow,:);
[n1 c1] = size(yout);
sample2 = xout;
class2 = classify(sample2, training, group); yout1 = yout + 1;
test2 = (class2 - yout1);
err11 = test2>0;
err21 = test2<0;
err11p = sum(err11) / n1; err21p = sum(err21) / n1;
testsc2 = errweight * err11p + (1-errweight) * err21p;
discrimres_out = [err11p err21p testsc2];
T1 = y(nrow1+1:end,:);
clear global P T;
global P T;
T = TN(nrow1+1:end,:);
P = PN(nrow1+1:end,:);
[junk,junk,A31] = feval(fun,beta);
if helge == 0, A31n = A31;
elseif helge == 1, A31 = A31;
for i = 1:cy,
A3n(:,i) = helgeyx(A31(:,i), maxy(i), miny(i),smax,smin);
end
elseif helge == 2,
A31x = -log(1./A31- ones(size(A31)));
A31x = real(A31x); [junkr, junkc] = size(A31x);
A31x = kron(ones(junkr,1),meany) + A31x .* kron(ones(junkr,1),sigy);
A31n = A31x;
else
for i = 1:cy,
A31n(:,i) = A31(:,i) * (maxy(:,i)-miny(:,i)) + miny(:,i);
end
end
A31 = A31n;
A31real = A31;
A31 = A31>limit;
err31 = A31 - T1;
err31a = err31>0;
err31b = err31<0;
err31ap = sum(err31a) / n1;
err31bp = sum(err31b) / n1;
err31p = errweight * err31ap + (1-errweight) * err31bp;
netres_out = [err31ap err31bp err31p];
% Logit out of sample predictions
clear global P T;
global P T;
T = y(nrow1+1:end,:);
P = x(nrow1+1:end,:);
[ljunk1,sse41,A41] = feval(fun1,beta1);
A41real = A41;
A41 = A41>limit;
err41 = A41 - T;
err41a = err41>0;
err41b = err41<0;
err41ap = sum(err41a) / n1;
err41bp = sum(err41b) / n1;
err41p = errweight * err41ap + (1-errweight) * err41bp;
logitres_out = [err31ap err41bp err41p];
% Probit out of sample predictions
[ljunk2,sse51,A51] = feval(fun2,beta2);
A51real = A51;
A51 = A51>0;
err51 = A51 - T;
err51a = err51>0;
err51b = err51<0;
err51ap = sum(err51a) / n1;
err51bp = sum(err51b) / n1;
err51p = errweight * err51ap + (1-errweight) * err51bp;
probitres_out = [err51ap err51bp err51p];
% Gompit out of sample predictions
[ljunk2,sse61,A61] = feval(fun3,beta3);
A61real = A61;
A61 = A61>0;
err61 = A61 - T;
err61a = err61>0;
err61b = err61<0;
err61ap = sum(err61a) / n1;
err61bp = sum(err61b) / n1;
err61p = errweight * err61ap + (1-errweight) * err61bp;
gompitres_out = [err61ap err61bp err61p];
else
discrimres_out = ones(1,3);
netres_out = ones(1,3);
logitres_out = ones(1,3);
probitres_out = ones(1,3);
gompitres_out = ones(1,3);
A31real = zeros(1,1);
A41real = zeros(1,1);
A51real = zeros(1,1);
A61real = zeros(1,1);
end
betalp= [beta1; beta2; beta3];
betanet = beta;
A1hat = [A3real A4real A5real A6real];
A2hat = [A31real A41real A51real A61real];
Ahatin = A1hat;
Ahatout = A2hat;
DISCRIMRES = [discrimres_in; discrimres_out];
NETRES = [netres_in; netres_out];
LOGITRES = [logitres_in; logitres_out];
PROBITRES = [probitres_in; probitres_out];
GOMPITRES = [gompitres_in; gompitres_out];
LIKELIHOOD = [LIK3; LIKL; LIKP; LIKG];
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -