⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 forecast.m

📁 用神经网络做预测的程序,
💻 M
📖 第 1 页 / 共 2 页
字号:

%    Electricity demand time-series  forecasting program
%    Based on shift-invariant wavelet transform features 
%
%    The prediction structure is two layer hybrid  structure. 
% 	 The first layer is responsible for predicting the 
%	 wavelet coefficients, using a number of MLP on different 
%	 scales, which may have different sizes.
%
%    To get a prediction of original time-series, we use another network 
%	 to map the predicted coef. to  the target time-series. The simplest 
%	 network is perceptron as used in the program. The target 
%	 time-series may be same as or different with original time-series, 
%	 for example, a denoised price series is used as
%	 target in this program.
%

%%   REMIND:  all time-series involved have row vector format!!
%%   REMIND:  Netlab neural network software toolbox is applied which 
%%            can be found from http://www.ncrg.aston.ac.uk/netlab/

clc
clear
 
           %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
           %%                                       %%
           %%       Programmed by Bailing  Zhang    %%
           %%             16/02/2000                %%
           %%    Tel: +61 3 96884829 (office)       %%
           %%    Fax: +61 3 96884050                %%
           %%    Email: zhang.bailing@vu.edu.au     %%
           %%    http://sci.vu.edu.au/~bzhang/      %%
           %%                                       %%
           %%         Copyright  Reserved           %%
           %%                                       %%
           %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%  

 path(path, '~\Matlab\NetLab\');
 PATH = ['~\Demand\'];

  load demand_data.txt  
  data = demand_data(:,4:5);  % The first column of data is load  date = demand_data(:,1);    xc = data'; 
  num_data = length(data);  	
  WIN = 30;                % Length of moving window for training
  AHEAD = 7;	           % Steps ahead  to be predicted
  LEVEL=4;                 % Number of wavelet decomposition
  time_base=50;            % Parameter used in a trous filtering
  
  % Normalize all data to [-1 +1] :
  Me =  mean(xc,2);
  mxc(1,:) = xc(1,:) - Me(1);
  mxc(2,:) = xc(2,:) - Me(2);

  Max = max(mxc(1,:)); 
  Min = min(mxc(1,:));  
  rc = 2*(mxc(1,:)-Min)/(Max-Min) - 1;
              % Predict the load (demand value) series only
 
  clear data xc


  

 OPTION =menu('Simulation options','1. data preparation','2. training','3. testing','4. close');

 if OPTION==4
    close all
 end

 if OPTION ~=1 

   name = [PATH '\demand_wvdata.mat']; 
   num_train = 1000;  
   num_test  = 400;   
   eval(['load ' name ]); 
   clear rc xc
 end

 if OPTION==2   % define parameters required by the NETLAB

   
   alpha = 0.01;
  	 	      % coefficient of weight-decay
   func =  'linear';
              % output unit activation
 		      % alternatives: "linear","logistic", "softmax"
              
   prior = alpha;
   
   % Set up vector of options for optimiser, required by NETLAB 	 
   options = zeros(1,18);
   options(1) = 1;          % This provides display of error values.
   options(14) = 1000;      % Number of training cycles. 
   
 end

 if OPTION ==3   
    tsts = input('Testing on training set (y/n)? ', 's');
    if strcmp(tsts, 'y')
       start_tst = 1;
       end_tst = num_train;
       testtype='tr';
    else
       start_tst = num_train + 1;
       end_tst =  num_train + num_test;
       testtype='tst';
    end 
 
 end
 	

% *********************************************************
% *********************************************************

  if OPTION == 1	

	% data pre-processing by shift-invariant wavelet transform	 
    % (a trous filtering by appling auto-correlation shell decomposition)
      
   
     [SER, DATE] = atfilter(rc, LEVEL, num_data, time_base, date);

     col = 0;
     while col < LEVEL+2
           col = col + 1;
           tmp =  SER(:,col)';
           eval(['WVSER' num2str(col-1) '=tmp;' ])             
     end 

     name = [PATH 'demand_wvdata.mat'];           
     eval(['save ' name  ' WVSER* DATE ']); 
  
     subplot(6,2,1)
     plot(WVSER0)    
     subplot(6,2,3)
     plot(WVSER1)
     subplot(6,2,5)
     plot(WVSER2)    
     subplot(6,2,7)
     plot(WVSER3)    
     subplot(6,2,9)
     plot(WVSER4)    
     subplot(6,2,11)
     plot(WVSER5)
    
  end
 


  
 % *********************************************************
 % %%%%%%%   Training MLPs on different scales  %%%%%%%%%%%%


 if OPTION == 2 

       
       % Using wavelet processed data, we train a number of
       % MLPs on different scales (including a raw price) 
	   % In this scheme, different days ahead are predicted
	   % by separate modules 
  
		% We use one perceptron or MLP to intergrate the results from different levels
		
  
   TX = WVSER0(1:num_train);	
		% Target series to be forecast
   [Ni,Np] = size(TX);
   nout = 1;         % number of outputs  in all MLPs
   nin= WIN;         % number of inputs
   nhidden=round((nin+nout)/2);  % number of hidden units
                     % This is an emprical choice
    
   for lev =0: LEVEL+ 1    
     
      fprintf('Training for resolution level  %d\n',lev);  
      ser = eval(['WVSER' num2str(lev) ]);  
      trx= ser(1: num_train);   clear ser
 
      for D=1:AHEAD     
     	 if lev~=LEVEL+1	
            fprintf('MLP training for step = %d\n',D);  
            k = 0;
            while k< Np - (nin+D-1)
                k = k + 1;
                px(k,:) = trx(k:k+nin-1);
                pt(k,:) = trx(k+nin+D-1);
	         end 
	         nn = mlp(nin,nhidden,nout,func,alpha);	            
 	         nn = netopt(nn, options, px,pt, 'scg'); 
 	         eval(['net' num2str(lev) '.step' num2str(D) '=nn;'])           
             eval(['p' num2str(lev) '.step' num2str(D) '=pt;']) 	    
	         clear px pt tmpt xin z o            
         else	% regression for residual    	   
	         fprintf('Regression for step = %d\n',D);  
            k = 0;
            while k< Np - (nin+D-1)
                k = k + 1;
                px(k,:) = trx(k:k+nin-1);
                pt(k,:) = trx(k+nin+D-1);
	    end
 	    coef = pinv(px'*px)*(px')*pt;   	         	    
	    eval(['p' num2str(lev) '.step' num2str(D) '= pt;']); 
        eval(['net' num2str(lev) '.step' num2str(D) '=coef;'])   
	    clear coef px pt	 
        end	     		
     end	       % complete training for all steps ahead
                   % in one level of wavelet coefficients
   end 	       
   % complete training for all levels of wavelet coefficients
   % This is the first stage of prediction

   %
   nin=WIN; 

   lev = -1;  P = [];      
   while lev<LEVEL+1
        lev = lev + 1;  	      
 	    if lev== LEVEL+1
 	       beta = 2^(- LEVEL/2);            
        else
 	       beta =2^(-lev/2);             
        end
        % beta is the combination coefficients appeared in the reconstruction equation
        
 	    for d=1:AHEAD
            tmp=eval(['p' num2str(lev) '.step' num2str(d) ]);  
            k = 0;
            while k< Np - (WIN+AHEAD-1)
                 k = k + 1;
                 tmpP(k,d) = tmp(k,1);
            end 
            clear tmp
       end 	      
        P = [P beta*tmpP];
        clear tmpP tmp
    end    
    clear p*
    k=0;
    while k<Np-(nin+AHEAD-1)
	        k = k + 1;
           T(k,:)=TX(nin+k:nin+AHEAD+k-1);
    end
    clear TX X trx DATE Date date ser* SER*

    % Using pseudo-inverse to calculate the perceptron's weight   
    W = pinv(P'*P)*(P')*T;

    % Then applying another MLP to combine the wavelet 
    % coefficients prediction (another method)
    options(14) = 2000; 
    fprintf('Training last stage MLP %d\n');  
    [nrow,ncol]=size(P);
    numin = ncol;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -