⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 20041006.m

📁 matlab粒子群神经网络的预测编程实现,电力负荷预测本人已实现
💻 M
字号:
function psobp 
% BP neural network trained by PSO algorithm
clc
clear all
% generate training samples according to eutrophication evaluation standard
% of surface water
st=cputime;  
AllSamIn=...
  [     70.00  60.00  60.00  60.00  70.00 
 70.00  65.00  65.00  65.00  70.00 
 70.00  75.00  70.00  70.00  70.00 
 70.00  75.00  70.00  65.00  70.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  70.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  65.00  65.00  60.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
 70.00  75.00  65.00  65.00  68.00 
135.00 120.00 135.00 100.00 105.00 
135.00 120.00 135.00 100.00 105.00 
135.00 120.00 135.00 100.00 105.00 
135.00 130.00 135.00 100.00 105.00 
135.00 135.00 135.00  90.00 105.00 
135.00 120.00 135.00 105.00 105.00 
130.00 120.00  85.00  85.00  90.00 
110.00 120.00  80.00  80.00  90.00 
120.00 120.00  80.00  80.00  90.00 
120.00 120.00  80.00  80.00  90.00 
120.00 100.00  80.00  80.00  90.00 
135.00 135.00 120.00 105.00 105.00 
135.00 130.00 130.00 105.00 105.00 
135.00 130.00 130.00 105.00 105.00 
135.00 135.00 135.00 105.00 105.00 
135.00 135.00 135.00 105.00 105.00 
110.00 110.00 110.00  85.00  85.00 
110.00 110.00 110.00  85.00  85.00 
110.00 110.00 110.00  85.00  85.00 
110.00 110.00 110.00  85.00  85.00 
100.00 100.00  80.00  85.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
100.00  85.00  80.00  80.00  85.00 
100.00  85.00  80.00  80.00  85.00 
100.00 100.00  80.00  80.00  85.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
135.00 135.00 135.00 135.00 135.00 
125.00 135.00 135.00 135.00 135.00 
125.00 130.00 130.00 130.00 135.00 
110.00 130.00 130.00 130.00 130.00 
125.00 130.00 130.00 130.00 135.00 
125.00 120.00 115.00 115.00 120.00 
125.00 120.00 115.00 115.00 120.00 
125.00 120.00 115.00 115.00 120.00 
125.00 120.00 115.00 115.00 100.00 
 70.00  75.00  60.00  60.00  66.00 
 70.00  75.00  60.00  60.00  66.00 
 70.00  75.00  60.00  60.00  66.00 
 70.00  75.00  60.00  60.00  66.00   
 ]';
% 产生噪声,加给输出
% NoiseVar=0.1;
% rand('state',sum(100*clock));
AllSamOut=[        70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
 70.00 
135.00 
135.00 
135.00 
135.00 
135.00 
135.00 
130.00 
110.00 
120.00 
120.00 
120.00 
135.00 
135.00 
135.00 
135.00 
135.00 
110.00 
110.00 
110.00 
110.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
100.00 
135.00 
135.00 
135.00 
135.00 
135.00 
135.00 
135.00 
135.00 
125.00 
125.00 
110.00 
125.00 
125.00 
125.00 
125.00 
125.00 
 70.00 
 70.00 
 70.00 
 70.00 
      ]'; %+NoiseVar*randn(1,1000);

% 预处理,压缩到[-1,+1]
global minAllSamOut;
global maxAllSamOut;
[AllSamInn,minAllSamIn,maxAllSamIn,AllSamOutn,minAllSamOut,maxAllSamOut] = premnmx(AllSamIn,AllSamOut);
% 从总样本中抽取10%的样本作为测试样本,其余作为训练样本

TestSamIn=AllSamIn;
RealTestSamOut=[   70.00
 70.00
 70.00
 70.00
 70.00
 70.00
 70.00
 70.00
 70.00
 70.00
 68.00
 68.00
 70.00
 70.00
 70.00
 70.00
 70.00
 70.00
 68.00
 70.00
 70.00
 68.00
 68.00
 70.00
 70.00
 70.00
 70.00
 70.00
133.00
133.00
133.00
135.00
135.00
135.00
133.00
110.00
118.00
118.00
118.00
130.00
135.00
135.00
135.00
135.00
110.00
110.00
110.00
110.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
 98.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
135.00
130.00
123.00
 68.00
 68.00
 68.00
 68.00  
]';
[TestSamInn,minTestSamIn,maxTestSamIn,RealTestSamOutn,minRealTestSamOut,maxRealTestSamOut] = premnmx(TestSamIn,RealTestSamOut);

% 训练样本
TrainSamIn=AllSamIn;
TrainSamOut=AllSamOut;
[TrainSamInn,minTrainSamIn,maxTrainSamIn,TrainSamOutn,minTrainSamOut,maxTrainSamOut] = premnmx(TrainSamIn,TrainSamOut);
% Evaluation Sample
%EvaSamIn=...
 %  [6.5400 	9.4000 	8.2200 	5.9100 	6.1300 	7.8400 	8.2600 	8.7900 	7.2900 	5.9300 	4.6900 	3.5100 
  %  0.1270 	0.0390 	0.0820 	0.0860 	0.1230 	0.1370 	0.0190 	0.0640 	0.0880 	0.0500 	0.0860 	0.0750 
   % 1.2840 	1.3580 	1.3200 	0.4500 	2.2200 	1.4700 	1.0900 	1.6600 	0.8400 	0.6600 	0.5500 	0.5200 
   % 3.3400 	4.7200 	5.0300 	6.0000 	7.9600 	4.6600 	4.0000 	4.1500 	4.0000 	3.8100 	1.6300 	3.4500 
    %0.3500 	0.4500 	0.3800 	0.4500 	0.3000 	0.4500 	0.4000 	0.4000 	0.3000 	0.3000 	0.4000 	0.3500];
%EvaSamInn=tramnmx(EvaSamIn,minAllSamIn,maxAllSamIn); % 预处理
%**********************************************************
% training set
global Ptrain;
Ptrain = TrainSamInn;
global Ttrain;
Ttrain = TrainSamOutn;
% testing set
Ptest = TestSamInn;

%**********************************************************
% Initialize BPN parameters
global indim;
indim=5;
global hiddennum;
hiddennum=3;
global outdim;
outdim=1;
%**********************************************************
% Initialize PSO parameters
vmax=0.5; % Maximum velocity
minerr=0.0001; % Minimum error
wmax=0.90;
wmin=0.30;
global itmax; %Maximum iteration number
itmax=100;
c1=2;
c2=1.8;
%cf=c1+c2;
for iter=1:itmax
    W(iter)=wmax-((wmax-wmin)/itmax)*iter; % weight declining linearly
end 
% particles are initialized between (a,b) randomly
a=-1;  
b=1;
%Between (m,n), (which can also be started from zero)
m=-1;
n=1;
global N; % number of particles
N=40;
global D; % length of particle
D=(indim+1)*hiddennum+(hiddennum+1)*outdim;
global fvrec;
MinFit=[];
BestFit=[];
%MaxFit=[];
%MeanFit=[];
    
% Initialize positions of particles
rand('state',sum(100*clock));
X=a+(b-a)*rand(N,D,1);
%Initialize velocities of particles
V=m+(n-m)*rand(N,D,1);

%**********************************************************
%Function to be minimized, performance function,i.e.,mse of net work
global net;
net=newff(minmax(Ptrain),[hiddennum,outdim],{'tansig','purelin'});
fitness=fitcal(X,net,indim,hiddennum,outdim,D,Ptrain,Ttrain,minAllSamOut,maxAllSamOut);
fvrec(:,1,1)=fitness(:,1,1);
%[maxC,maxI]=max(fitness(:,1,1));
%MaxFit=[MaxFit maxC];
%MeanFit=[MeanFit mean(fitness(:,1,1))];
[C,I]=min(fitness(:,1,1));
MinFit=[MinFit C];
BestFit=[BestFit C];
L(:,1,1)=fitness(:,1,1); %record the fitness of particle of every iterations
B(1,1,1)=C;  %record the minimum fitness of particle
gbest(1,:,1)=X(I,:,1);  %the global best x in population
%********************************************************
%Matrix composed of gbest vector 
for p=1:N 
    G(p,:,1)=gbest(1,:,1);
end
for i=1:N;
    pbest(i,:,1)=X(i,:,1);
end
V(:,:,2)=W(1)*V(:,:,1)+c1*rand*(pbest(:,:,1)-X(:,:,1))+c2*rand*(G(:,:,1)-X(:,:,1));
%V(:,:,2)=cf*(W(1)*V(:,:,1)+c1*rand*(pbest(:,:,1)-X(:,:,1))+c2*rand*(G(:,:,1)-X(:,:,1)));
%V(:,:,2)=cf*(V(:,:,1)+c1*rand*(pbest(:,:,1)-X(:,:,1))+c2*rand*(G(:,:,1)-X(:,:,1)));
% limits velocity of particles by vmax
for ni=1:N
    for di=1:D
        if V(ni,di,2)>vmax
            V(ni,di,2)=vmax;
        elseif V(ni,di,2)<-vmax
            V(ni,di,2)=-vmax;
        else
            V(ni,di,2)=V(ni,di,2);
        end
    end
end            
X(:,:,2)=X(:,:,1)+V(:,:,2);
%******************************************************
for j=2:itmax 
    %disp('Iteration and Current Best Fitness')
    %disp(j-1)
    %disp(B(1,1,j-1))
% Calculation of new positions 
    fitness=fitcal(X,net,indim,hiddennum,outdim,D,Ptrain,Ttrain,minAllSamOut,maxAllSamOut);
    fvrec(:,1,j)=fitness(:,1,j);
    %[maxC,maxI]=max(fitness(:,1,j));
    %MaxFit=[MaxFit maxC];
    %MeanFit=[MeanFit mean(fitness(:,1,j))];
    [C,I]=min(fitness(:,1,j));
    MinFit=[MinFit C];   
    BestFit=[BestFit min(MinFit)];
    L(:,1,j)=fitness(:,1,j);
    B(1,1,j)=C;
    gbest(1,:,j)=X(I,:,j);
    [C,I]=min(B(1,1,:));
    % keep gbest is the best particle of all have occured
    if B(1,1,j)<=C
        gbest(1,:,j)=gbest(1,:,j); 
    else
        gbest(1,:,j)=gbest(1,:,I);
    end 
    if C<=minerr, break, end
    %Matrix composed of gbest vector 
    if j>=itmax, break, end
    for p=1:N
         G(p,:,j)=gbest(1,:,j);
    end
    for i=1:N;
        [C,I]=min(L(i,1,:));
        if L(i,1,j)<=C
            pbest(i,:,j)=X(i,:,j);
        else
            pbest(i,:,j)=X(i,:,I);
        end
    end
    V(:,:,j+1)=W(j)*V(:,:,j)+c1*rand*(pbest(:,:,j)-X(:,:,j))+c2*rand*(G(:,:,j)-X(:,:,j));
    %V(:,:,j+1)=cf*(W(j)*V(:,:,j)+c1*rand*(pbest(:,:,j)-X(:,:,j))+c2*rand*(G(:,:,j)-X(:,:,j)));
    %V(:,:,j+1)=cf*(V(:,:,j)+c1*rand*(pbest(:,:,j)-X(:,:,j))+c2*rand*(G(:,:,j)-X(:,:,j)));
    for ni=1:N
        for di=1:D
            if V(ni,di,j+1)>vmax
                V(ni,di,j+1)=vmax;
            elseif V(ni,di,j+1)<-vmax
                V(ni,di,j+1)=-vmax;
            else
                V(ni,di,j+1)=V(ni,di,j+1);
            end
        end
    end     
    X(:,:,j+1)=X(:,:,j)+V(:,:,j+1);
end
disp('Iteration and Current Best Fitness')
disp(j)
disp(B(1,1,j))
disp('Global Best Fitness and Occurred Iteration')
[C,I]=min(B(1,1,:))
% simulation network
for t=1:hiddennum
    x2iw(t,:)=gbest(1,((t-1)*indim+1):t*indim,j);
end
for r=1:outdim
    x2lw(r,:)=gbest(1,(indim*hiddennum+1):(indim*hiddennum+hiddennum),j);
end
x2b=gbest(1,((indim+1)*hiddennum+1):D,j);
x2b1=x2b(1:hiddennum).';
x2b2=x2b(hiddennum+1:hiddennum+outdim).';
net.IW{1,1}=x2iw;
net.LW{2,1}=x2lw;
net.b{1}=x2b1;
net.b{2}=x2b2;
%net.IW{1,1}
%net.LW{2,1}
%net.b{1}
%net.b{2}
%nettesterr=mse(sim(net,Ptest)-Ttest);
TestSamOut = sim(net,Ptest);
[a]=postmnmx(TestSamOut,minRealTestSamOut,maxRealTestSamOut);
ae=abs(a-RealTestSamOut)
mae=mean(ae)
re=(a-RealTestSamOut)./RealTestSamOut
mre=mean(abs(re))
a
%EvaSamOutn = sim(net,EvaSamInn);
%EvaSamOut = postmnmx(EvaSamOutn,minAllSamOut,maxAllSamOut);


figure(1)
grid
hold on
%plot(MaxFit,'k');
plot(log(BestFit),'--k','linewidth',2);
title('适应度');
xlabel('迭代次数');
ylabel('fit');
%plot(log(MinFit),'b');


figure(2) 
grid
hold on
plot(a,'b');
plot(RealTestSamOut,'-k','linewidth',2);
title('10月6日电价预测');
xlabel('时段');
ylabel('电价(元);
)'
figure(3)
grid
hold on
plot(ae,'k','linewidth',2);
title('绝对误差');
xlabel('时段');
ylabel('误差');

figure(4)
grid
hold on
plot(re,'k','linewidth',2);
title('相对误差');
xlabel('时段');
ylabel('误差');
et=cputime-st;


%plot(EvaSamOut,'k');


save D:
%--------------------------------------------------------------------------
%sub function for getting fitness of all paiticles in specific generation
%change particle to weight matrix of BPN,then calculate training error 
function fitval = fitcal(pm,net,indim,hiddennum,outdim,D,Ptrain,Ttrain,minAllSamOut,maxAllSamOut)  
    [x,y,z]=size(pm);
    for i=1:x
        for j=1:hiddennum
            x2iw(j,:)=pm(i,((j-1)*indim+1):j*indim,z);
        end
        for k=1:outdim
            x2lw(k,:)=pm(i,(indim*hiddennum+1):(indim*hiddennum+hiddennum),z);
        end
        x2b=pm(i,((indim+1)*hiddennum+1):D,z);
        x2b1=x2b(1:hiddennum).';
        x2b2=x2b(hiddennum+1:hiddennum+outdim).';
        net.IW{1,1}=x2iw;
        net.LW{2,1}=x2lw;
        net.b{1}=x2b1;
        net.b{2}=x2b2;
        error=postmnmx(sim(net,Ptrain),minAllSamOut,maxAllSamOut)-postmnmx(Ttrain,minAllSamOut,maxAllSamOut);
        fitval(i,1,z)=mse(error);
    end
    
  

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -