⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 darrwin12x.m

📁 Neural Network in Finance (神经网络在金融界:赢得预言性的优势)全部原码。内容包括预测与估计
💻 M
字号:
function [testsc1,testsc2,err3p, err31p, err4p, err41p,err5p,err51p,pderiv,betalp,...
      tstatlp,betanet,tstatnet,Ahatin, Ahatout] =...
   darrwin12x(assetx,coldep,percent,errweight,limit,info,gendum,maxgen);
% Ouputs:
%     output: insample error percetn, out of sample error percent, 
%     for discriminant, neural net, logit and probit models
%     discrimin, discrimout, nnetin, nnetout, logitin, logitout,probin, probout,
%     pderiv (mean), for the neural net, logit and probit models
%     betalp, tstatlp:  coefficients and t stat for logit and probit models
%     betanet, tstatnet: coefficients and t stat for neural net
%     Actual predicted probabilities for net, logit, probit: in and out
% Inputs:
%      Input matrix,
%      column of dep variable,  
%      percent of data for in sample,
%      
%      error weighting
%      limit
%      info: neurons in layers, n1, n2, n3
%      gendum:  
%          genetic algorith with gd (=1),ga off, gd on (=0); just ga, gd off (=2)
%      maxgen:  number of generations for ga

global P T nlayer nneuron1 nneuron2 nneuron3;
nlayer = info(1);
nneuron1 = info(2);
nneuron2 = info(3);
nneuron3 = info(4);
popsize = 40; pc = .9; pdes = 0; toler = .000001; elite = 1;
[rr cc] = size(assetx);
% Discriminant analysis
yrhat = [assetx];
y = assetx(:,coldep); x = cold(assetx,coldep);
[nrow ncol] = size(x);
nrow1 = round(percent * nrow);
nrow11 = nrow1 + 1; [nrow12 nrow13] = size(x(1:nrow1,:));
yy = y(1:nrow1,:); xx = x(1:nrow1,:);
sample1 = xx; 
training = xx;
group = yy + 1;
class1 = classify(sample1, training, group);
test1 = (class1 - group);
for i = 1:nrow1, if test1(i) == 1, err1(i) = 1; else err1(i) = 0; end; 
if test1(i) == -1, err2(i) = 1; else err2(i) = 0; end; end;
err1p = sum(err1) / nrow1;  err2p = sum(err2) / nrow1; 
testsc1 =  errweight * err1p + (1-errweight) * err2p;
P = xx(1:nrow1,:)'; pbar = (xx(end,:)); pbar = pbar';
T = yy(1:nrow1,:)';
% Neural network analysis
[rp,cp] = size(P);
if nlayer == 1, nparm = nneuron1*rp + 2 *nneuron1 + 1;
elseif nlayer == 2, nparm = nneuron1 * rp + nneuron1 + nneuron1 * nneuron2 + 2 * nneuron2 + 1;
else nparm = nneuron1 * rp + nneuron1 + nneuron1 * nneuron2 + nneuron2 + nneuron2 * nneuron3 + 2 * nneuron3 + 1;
   end
   maxgen1 = maxgen + 25;;
nepoch = round*sqrt(1000 * nparm);
tp = [25, nepoch, .02, .01, 1.07, .7, .9, 1.04]; 
pm = 1/nparm; elite = 1; 
pdes = 0; scale = 1;
beta0 = randn(1, nparm);
if gendum >= 1, 
   beta = genetic5('emnetg1x',nparm,popsize,maxgen,pc,pm,elite,pdes,maxgen1,toler,scale,beta0);
else beta = .01 * ones(1, nparm); end
[sse3,g,A3,W3,b3,W4,b4, W5, b5, W6, b6] = emnetg1x(beta);
if gendum <= 1,
   options = optimset('Display','iter', 'MaxIter', nepoch, 'maxfuneval', nepoch);
   [beta,fval,exit0,output0,grad0,HESSN] = fminunc('emnetg1x', beta, options);
      [sse3,g,A3, W3,b3,W4,b4, W5, b5, W6, b6] = emnetg1x(beta);
else W3=W3; b3 = b3; W4 = W4; b4 = b4; W5 = W5, b5 = b5, W6 = W6, b6 = b6; end   

A3real = A3;
for i = 1:nrow1, if A3(i) > limit, A3(i) = 1; else A3(i) = 0; end; end
err3 = A3 - T;
for i = 1:nrow1, if err3(i) == 1, err3a(i) = 1; else err3a(i) = 0; end; end;
for i = 1:nrow1, if err3(i) == -1, err3b(i) = 1; else err3b(i) = 0; end; end;
err3ap = sum(err3a) / nrow;
err3bp = sum(err3b) / nrow;
err3p =  errweight * err3ap + (1-errweight) * err3bp;
pderivn = logsigderx(pbar,W3,b3,W4,b4,W5,b5,W6,nlayer);
[rowp colp] = size(pbar);
hdelta = .000001;
hdeltav = eye(rowp) * hdelta;
for i = 1: rowp,
   PDEL = pbar + hdeltav(:,i);
   [A3d] = emnetg1d(PDEL,beta);
   pdernum(i) = -(T(end) - A3d)/hdelta;
end
if gendum > 1, stdev = 1, else stdevn = sqrt(diag(inv(HESSN)))'; end;
tstatn = beta ./ stdevn;

%  Logit analysis and derivative
if gendum >= 1,    beta1 = genetic5('logit',nparm,popsize,maxgen,pc,pm,elite,pdes,maxgen1,toler,scale,beta0);
   else beta1 = .0001 * ones(1, cc); end
options(1) = 1; options(14) = 1000;
if gendum <= 1,[beta1,fval1,exit1,output1,grad1,HESSL] = fminunc('logit',beta1,options);
   [LIKL,g,sse4,A4] = logit(beta1);
else [LIKL, g,sse4,A4] = logit(beta1); end
A4real = A4;
for i = 1:nrow1, if A4(i) > limit, A4(i) = 1; else A4(i) = 0; end; end
err4 = A4 - T;
for i = 1:nrow1, if err4(i) == 1, err4a(i) = 1; else err4a(i) = 0; end; end;
for i = 1:nrow1, if err4(i) == -1, err4b(i) = 1; else err4b(i) = 0; end; end;
err4ap = sum(err4a) / nrow;
err4bp = sum(err4b) / nrow;
err4p =  errweight * err4ap + (1-errweight) * err4bp;
pderivl = logitder(pbar, beta1);
if gendum > 1, stdev1 = 1, else stdev1 = sqrt(diag(inv(HESSL)))'; end;
tstat1 = beta1 ./ stdev1;

% Probit analysis and derivative

if gendum >= 1, beta2 = genetic1('probit',cc,popsize,maxgen,pc,pm,elite,maxgen1,toler);
   beta2 = genetic5('probit',nparm,popsize,maxgen,pc,pm,elite,pdes,maxgen1,toler,scale,beta0);
   else beta2 = .0001 * ones(1, cc); end
options(1) = 1; options(14) = 1000;
if gendum <= 1, [beta2,feval2,exit2,output2,grad2,HESSP] = fminunc('probit',beta2,options);
   [LIKP,g, sse5,A5] = probit(beta2);
else [LIKP,g,sse5,A5] = probit(beta2); end
A5real = A5;
for i = 1:nrow1, if A5(i) > limit, A5(i) = 1; else A5(i) = 0; end; end
err5 = A5 - T;
for i = 1:nrow1, if err5(i) == 1, err5a(i) = 1; else err5a(i) = 0; end; end;
for i = 1:nrow1, if err5(i) == -1, err5b(i) = 1; else err5b(i) = 0; end; end;
err5ap = sum(err5a) / nrow;
err5bp = sum(err5b) / nrow;
err5p =  errweight * err5ap + (1-errweight) * err5bp;
if gendum > 1, stdev2 = 1, else stdev2 = sqrt(diag(inv(HESSP)))'; end
tstat2 = beta2 ./ stdev2;
pderivpb = probitder(pbar, beta2);
pderiv = [pderivn; pdernum; pderivl; pderivpb];




yhat = A3'; ydep = T';
xout  = [x(nrow11:nrow,:)];
yout = y(nrow11:nrow,:);
[n1 c1] = size(yout);
sample2 = xout; 
class2 = classify(sample2, training, group); yout1 = yout + 1;
test2 = (class2 - yout1);
for i = 1:n1, if test2(i) == 1, err11(i) = 1; else err11(i) = 0; end; 
if test2(i) == -1, err21(i) = 1; else err21(i) = 0; end; end;
err11p = sum(err11) / n1;  err21p = sum(err21) / n1; 
testsc2 =  errweight * err11p + (1-errweight) * err21p;
T1 = yout'; 
P1 = xout';
if nlayer == 1,  A31 = feval('logsig', W3 * P1,b3');
   A31 = feval('purelin', W4 * A31, b4); 
elseif nlayer == 2,
   A31 = feval('logsig', W3 * P1, b3');
   A31 = feval('logsig', W4 * A31, b4');
   A31 = feval('purelin', W5 * A31, b5);
else
   A31 = feval('logsig', W3 * P1, b3');
   A31 = feval('logsig', W4 * A31, b4');
   A31 = feval('logsig', W5 * A31, b5');
   A31 = feval('purelin', W6 * A31, b6);
end

A31real = A31;
for i = 1:n1, if A31(i) > .5, A31(i) = 1; else A31(i) = 0; end; end;
err31 = A31 - T1;
for i = 1:n1, if err31(i) == 1, err31a(i) = 1; else err31a(i) = 0; end; end;
for i = 1:n1, if err31(i) == -1, err31b(i) = 1; else err31b(i) = 0; end; end;
err31ap = sum(err31a) / n1;
err31bp = sum(err31b) / n1;
err31p =  errweight * err31ap + (1-errweight) * err31bp;
clear global P T; P = P1; T = T1; global P T;  
% Logit out of sample predictions
[ljunk1,g, sse41,A41] = logit(beta1);
A41real = A41;
for i = 1:n1, if A41(i) > .5, A41(i) = 1; else A41(i) = 0; end; end;
err41 = A41 - T;
for i = 1:n1, if err41(i) == 1, err41a(i) = 1; else err41a(i) = 0; end; end;
for i = 1:n1, if err41(i) == -1, err41b(i) = 1; else err41b(i) = 0; end; end;
err41ap = sum(err41a) / n1;
err41bp = sum(err41b) / n1;
err41p =  errweight * err41ap + (1-errweight) * err41bp;
% Probit out of sample predictions
[ljunk2,g, sse51,A51] = probit(beta2);
A51real = A51;
for i = 1:n1, if A51(i) > .5, A51(i) = 1; else A51(i) = 0; end; end;
err51 = A51 - T;
for i = 1:n1, if err51(i) == 1, err51a(i) = 1; else err51a(i) = 0; end; end;
for i = 1:n1, if err51(i) == -1, err51b(i) = 1; else err51b(i) = 0; end; end;
err51ap = sum(err51a) / n1;
err51bp = sum(err51b) / n1;
err51p =  errweight * err51ap + (1-errweight) * err51bp;

betalp= [beta1; beta2];
tstatlp = [tstat1; tstat2];
betanet = beta;
tstatnet = tstatn;
A1hat = [A3real' A4real' A5real'];
A2hat = [A31real' A41real' A51real'];
AHAT = [A1hat; A2hat];
Ahatin = A1hat;
Ahatout = A2hat;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -