⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 forecast.m

📁 用神经网络做预测的程序,
💻 M
📖 第 1 页 / 共 2 页
字号:
    numout = AHEAD;
    numhidden = 21; %round((numin+numout)/2);     
    NET = mlp(numin, numhidden, numout, 'linear', alpha);	            
    NET = netopt(NET, options, P,T, 'scg'); 

   name = [PATH 'demand_mlp.mat'];     
   eval(['save ' name ' net*  W NET '])    
   
      
 end             
 %%%    End of networks training   %%%%



%  *************************************************************
%  *************************************************************


 if OPTION == 3    % Testing 
    
    figure

    X = WVSER0(start_tst:end_tst);	
    Date = DATE(start_tst:end_tst,1)'; 
    [Ni,Np] = size(X);
		% Preparing target for comparison

     name = [PATH 'demand_mlp.mat'];           
     eval(['load ' name  ])    	% net  W NET
    
     nin= WIN;   % number 

     for lev = 0:LEVEL+1
	     fprintf('Predict for level = %d\n\n',lev);	
	     ser = eval(['WVSER' num2str(lev) ]);
         tstx = ser(start_tst:end_tst);  
      
         for D=1:AHEAD   	
 	        fprintf('Predict for step = %d\n',D);	
            
	        if lev~=LEVEL+1
	           mlpnet=eval(['net' num2str(lev) '.step' num2str(D)]);
	           k = 0;
               while k < Np -(nin+D-1)
                    k= k + 1;      
                    xin = tstx(k:k+nin-1);   
		            tmpt(k,1)= tstx(k+nin+D-1);                   
                    nd = size(xin, 1);                       
		            z = tanh(xin*mlpnet.w1 + ones(nd, 1)*mlpnet.b1);
                    tmpo(k,:) = z*mlpnet.w2 + ones(nd, 1)*mlpnet.b2;  	
               end              
 	           eval(['p' num2str(lev) '.step' num2str(D) '= tmpo;'])
	           eval(['py' num2str(lev) '.step' num2str(D) '= tmpt;']);
               clear tmpo tmpt tmpa   xin z      	
               
           else		% regression for residual    
	   
	          nin= WIN;
	          coef =eval(['net' num2str(lev) '.step' num2str(D)]);
	          k = 0;
              while k< length(tstx) - (nin+D-1)
                     k = k + 1;
                     px(k,:) = tstx(k:k+nin-1); 
	          end  
  	          py = tstx(nin+D:Np);
	          pred = px*coef;    
	          eval(['p' num2str(lev) '.step' num2str(D) '= pred;']);
  	          eval(['py' num2str(lev) '.step' num2str(D) '= py;']);
              clear  coef px py pred 				
           end
 
         end		%complete prediction for all steps in one level

     end 		%complete prediction for all levels


    % Monitor prediction for each level
	% "one-step-ahead" prediction      
    for lev=0:LEVEL+1      
        fprintf('Predict for level = %d\n',lev);	
        for d=1:AHEAD
     	    fprintf('Predict for step = %d\n',d);	
            pred = eval(['p' num2str(lev) '.step' num2str(d) ]);  
                % Predicted series
	        dest = eval(['py' num2str(lev) '.step' num2str(d) ]); 
                % Target series corresponds to pred  
	        plot(pred(101:200),'r')
	        hold on; pause(1);
	        plot(dest(101:200),'g')
	        hold off; pause(1);  %pause;    
        end
   end	

   
   P = [];   wvp= 0;   
   lev = -1;    
   while lev<LEVEL+1
 	     lev = lev + 1;       
 	     if lev== LEVEL+1
 	         beta = 2^(- LEVEL/2);            
         else
 	         beta =2^(-lev/2);             
         end
 	     for d=1:AHEAD
             tmp=eval(['p' num2str(lev) '.step' num2str(d) ]); 
             k = 0;
             while k< Np - (nin+AHEAD-1)
                       k = k + 1;
                       tmpP(k,d) = tmp(k);
             end 
                 clear tmp
          end 		
          P = [P beta*tmpP];

 	      if lev ~=0
 	         wvp = wvp + beta*tmpP;
          else
 	         mlpp = tmpP;	
 	      end 	
          clear tmpP tmp
    end    
 
    % For the plain target, wvp is the results from the 
    % direct summation of wavelet coefficients predictions 
    % mlpp:  prediction from a single, independent MLP

    % Prediction from 2nd stage perceptron
     k = 0;        
     while k < Np-(WIN+D-1) 
          k = k +1;
	      target(k,:)=X(WIN+k:WIN+AHEAD+k-1);
	      Tdate(k,1)=Date(WIN+k);

           xin = P(k,:);                 
           hyp1(k, :) = xin*W;     % W is perceptron's weight; 
					  % prediction from perceptron
 	       ndata = size(xin, 1);
           z = tanh(xin*NET.w1 + ones(ndata, 1)*NET.b1);
           hyp2(k,:) = z*NET.w2 + ones(ndata, 1)*NET.b2; 		
     end                      

    

	 % Change back to original data range:   
	 % 	Pwv is from the scheme by direct summation
	 % 	Phy is from hybrid scheme prediction
	 % 	Ptar is target series
	 % 	Pmlp is from a single MLP prediction
  
    Ptar = 0.5*(Max-Min)*target + 0.5*(Max+Min)+ Me(1);
    Phy1 = 0.5*(Max-Min)*hyp1 + 0.5*(Max+Min)+ Me(1);
    Phy2 = 0.5*(Max-Min)*hyp2 + 0.5*(Max+Min)+ Me(1);
    Pwv = 0.5*(Max-Min)*wvp + 0.5*(Max+Min)+ Me(1);
    Pmlp = 0.5*(Max-Min)*mlpp + 0.5*(Max+Min)+ Me(1);

   % Performance calculations:
   for j=1:length(Tdate)
       tmper1 = Ptar(j,:) - Pmlp(j,:);
       tmper2 = Ptar(j,:) - Phy1(j,:); 
       tmper3 = Ptar(j,:) - Phy2(j,:); 
       tmper4 = Ptar(j,:) - Pwv(j,:); 
       for k=1:AHEAD
          mlpape(j,k) = abs(tmper1(k))/Ptar(j,k);
          hy1ape(j,k) = abs(tmper2(k))/Ptar(j,k);
          hy2ape(j,k) = abs(tmper3(k))/Ptar(j,k);
          wvape(j,k) = abs(tmper4(k))/Ptar(j,k);         
       end
   end

   
   
   for step=1:AHEAD
       mean_mlpape(step) = mean(mlpape(step,:));
       var_mlpape(step) = var(mlpape(step,:));
       prc_mlpape(step) = prctile(mlpape(step,:),90);

       mean_hy1ape(step) = mean(hy1ape(step,:));
       var_hy1ape(step) = var(hy1ape(step,:));
       prc_hy1ape(step) = prctile(hy1ape(step,:),90);

       mean_hy2ape(step) = mean(hy2ape(step,:));
       var_hy2ape(step) = var(hy2ape(step,:));
       prc_hy2ape(step) = prctile(hy2ape(step,:),90);

       mean_wvape(step) = mean(wvape(step,:));
       var_wvape(step) = var(wvape(step,:));
       prc_wvape(step) = prctile(wvape(step,:),90);

       nmse_mlper(step) = nmse(Ptar(:,step), Pmlp(:,step) );
       nmse_hy1er(step) = nmse(Ptar(:,step), Phy1(:,step) );
       nmse_hy2er(step) = nmse(Ptar(:,step), Phy2(:,step) );
       nmse_wver(step) = nmse(Ptar(:,step), Pwv(:,step) );
   end
   

   figure 
   subplot(321)
   plot(Pmlp(:,1));		hold on
   plot(Ptar(:,1),'r -.');
   title('One step ahead prediction (MLP)')
   subplot(322)
   plot(Pmlp(:,AHEAD));		hold on
   plot(Ptar(:,AHEAD),'r -.');
   title('Seven steps ahead prediction (MLP) ')
   subplot(323)
   plot(Pwv(:,1));		hold on
   plot(Ptar(:,1),'r -.');
   title('One step ahead prediction (a trous)')
   subplot(324)
   plot(Pwv(:,AHEAD));		hold on
   plot(Ptar(:,AHEAD),'r -.');
   title('Seven steps ahead prediction (a trous) ')
   subplot(325)
   plot(Phy1(:,1));		hold on
   plot(Ptar(:,1),'r -.');
   title('One step ahead prediction (hybrid)')
   subplot(326)
   plot(Phy1(:,AHEAD));		hold on
   plot(Ptar(:,AHEAD),'r -.');
   title('Seven steps ahead prediction (hybrid)')

   figure
   subplot(321)
   plot(mlpape(:,1));		hold on
   title('APE for one step prediction (MLP)')
   subplot(322)
   plot(mlpape(:,AHEAD));		hold on
   title('APE for seven step prediction (MLP)')
  
   subplot(323)
   plot(wvape(:,1));		hold on
   title('APE for one step prediction (a trous)') 
   subplot(324)
   plot(wvape(:,AHEAD));		hold on
   title('APE for seven step prediction (a trous)')
   
   subplot(325)
   plot(hy2ape(:,1));		hold on
   title('APE for one step prediction (hybrid)') 
   subplot(326)
   plot(hy2ape(:,AHEAD));		hold on
   title('APE for seven step prediction (hybird)')
  


  if strcmp(tsts, 'y')
       name3 = [PATH 'demand_err_tr.mat'];            
  else
       name3 = [PATH 'demand_err_tst.mat'];        
  end    
  eval(['save ' name3 ' mlpape wvape hy1ape hy2ape mean* var* prc* nmse* ']) 

 end
 


⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -