📄 trainafterselectbaru.m
字号:
%function [datas,datad,datad1,M]=dscale
%DSCALE
%------
% This subfunction scales data to value between 0 and 1
%
% datas = scaled data
% data = actual data before scaling
% min = actual data at their minimum
% max = actual data at their maximum
load dat9features.txt;
newdata=dat9features;
%bearing=bearing(randperm(212),1:3); % Randomize
[R Q] = size (newdata);
P1 = newdata(:,1);
P2 = newdata(:,2);
P3 = newdata(:,3);
P4 = newdata(:,4);
P5 = newdata(:,5);
P6 = newdata(:,6);
P7 = newdata(:,7);
P8 = newdata(:,8);
P9 = newdata(:,9);
T = newdata(:,10); %output data set
Pa = P1';
Pb = P2';
Pc = P3';
Pd = P4';
Pe = P5';
Pf = P6';
Pg = P7';
Ph = P8';
Pi = P9';
Ta = T';
for i=1:R
data(1,i)=Pa(i);
data(2,i)=Pb(i);
data(3,i)=Pc(i);
data(4,i)=Pd(i);
data(5,i)=Pe(i);
data(6,i)=Pf(i);
data(7,i)=Pg(i);
data(8,i)=Ph(i);
data(9,i)=Pi(i);
data(10,i)=Ta(i);
end
[N M]=size(data);
for i=1:N
max(i)=data(i,N);
min(i)=data(i,N);
for j=1:M
if data(i,j)>max(i)
max(i)=data(i,j);
end
if data(i,j)<min(i)
min(i)=data(i,j);
end
end
datas(i,:)=(data(i,:)-min(i))/(max(i)-min(i)); % no delay
%datad(i,1:M-1)=datas(i,2:M); % 1 delayed term
%datad1(i,1:M-2)=datas(i,3:M); % 2 delayed term
end
X=M;
% Training data
input(1,1:X)=datas(1,1:X); % column P1
input(2,1:X)=datas(2,1:X); % column P1
input(3,1:X)=datas(3,1:X); % column P1
input(4,1:X)=datas(4,1:X);
input(5,1:X)=datas(5,1:X);
input(6,1:X)=datas(6,1:X);
input(7,1:X)=datas(7,1:X);
input(8,1:X)=datas(8,1:X);
input(9,1:X)=datas(9,1:X);
output(1,1:X)=datas(10,1:X); % target
[pn,meanp,stdp,tn,meant,stdt] = prestd(input,output);
%[A B] = size(TrainIn);
in= input;
out= output;
input1=in*1.05;
output1 =out*1.05;
input2=in*1.1;
output2 = out*1.1;
ptr=in; ttr=out; % Training
v.P=input1; v.T=output1; % Validatiorn
t.P=input2; t.T=output2; % Testing
S1=5; % Number of nodes
fnet=newff(minmax(input),[S1 1],{'tansig' 'purelin'},'trainlm');
fnet.trainparam.epochs=5000; % Max epoch number
fnet.trainParam.maxit= 5000;
fnet.trainParam.goal=1e-12;
fnet.trainParam.max_fail=1000;
fnet.trainParam.show=10;
fnet.iw{1,1}
fnet.b{1}
fnet=init(fnet);
[fnet,tr]=train(fnet,ptr,ttr,[],[],v,t)
plot(tr.epoch,tr.perf,tr.epoch,tr.vperf,tr.epoch,tr.tperf);
legend('Training','Validation','Test',-1);
ylabel('Squared error');
xlabel('Epoch');
an=sim(fnet,input); %simulation the network
e=output'-an';
perf=sse(e);
perf1=mse(e)
van = sim(fnet,input1);
ve=output1'-van';
verr1 = sse(ve);
verr2 = mse(ve)
tan = sim(fnet,input2);
te = output2' - tan';
terr = sse(te);
terr2 = mse(te)
a = poststd(an,meant,stdt);
for z=1
figure(i)
[m(z),b(z),r(z)] = postreg(a(z,:),output(z,:));
end
figure(1);
s=(1:48); %plot the graf data vs month(1986-2001)
plot(s,output(s),'r+-',s,an,'b.-')
title('Training ')
xlabel('Observation Data')
ylabel('Classification Output (Normalized)')
legend(' = actual',' = prediction')
figure(2);
s=(1:48); %plot the graf data vs month(1986-2001)
plot(s,output1(s),'r+-',s,van,'b.-')
title('Validation ')
xlabel('Observation Data')
ylabel('Classification Output (Normalized)')
legend(' = actual',' = prediction')
figure(3);
s=(1:48); %plot the graf data vs month(1986-2001)
plot(s,output2(s),'r+-',s,tan,'b.-')
title('Testing ')
xlabel('Observation Data')
ylabel('Classification Output (Normalized)')
legend(' = actual',' = prediction')
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -