⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 special3.m

📁 基于神经网络的控制工具箱
💻 M
📖 第 1 页 / 共 2 页
字号:
   end

   % Matrix containing partial derivatives of the output from each hidden unit
   % w.r.t the most recent control input:
   d10(H_hiddenf) = (1-y1f(H_hiddenf).*y1f(H_hiddenf)).*W1f(H_hiddenf,na+1);

   % Partial derivative of output w.r.t the most recent control input
   d20 = d21(1:hiddenf)*d10;

  
   %>>>>>>>>>>>>>>  COMPUTE DERIVATIVE OF CONTROL W.R.T. EACH WEIGHT  <<<<<<<<<<<<<<
    if i>=2, 
    % ==========   Elements corresponding to the linear output units   ============
    if L_outputi'

      % -- The part of PSI corresponding to hidden-to-output layer weights --
      index1 = 1;
      PSI(index1:index1+hiddeni) = y1i;
      % ---------------------------------------------------------------------
 
      % -- The part of PSI corresponding to input-to-hidden layer weights ---
      for j = L_hiddeni',
        PSI(index(j):index(j)+inputs) = W2i(j)*phii;
      end
      
      for j = H_hiddeni',
        PSI(index(j):index(j)+inputs) = W2i(j)*(1-y1i(j)*y1i(j))*phii;
      end 
      % ---------------------------------------------------------------------    

    % ============  Elements corresponding to the tanh output units   =============
    elseif H_outputi',
      % -- The part of PSI corresponding to hidden-to-output layer weights --
      index1 = 1;
      PSI(index1:index1+hiddeni,i) = y1i * (1 - u*u);
      % ---------------------------------------------------------------------
       
      % -- The part of PSI corresponding to input-to-hidden layer weights ---
      for j = L_hiddeni',
        PSI(index(j):index(j)+inputs) = W2i(j)*(1-u*u) * phii;
      end
      
      for j = H_hiddeni',
        PSI(index(j):index(j)+inputs) = W2i(j)*(1-y1i(j)*y1i(j))*(1-u*u) * phii;
      end
      % ---------------------------------------------------------------------
    end
 
 
    %>>>>>>>>>>>  COMPUTE DERIVATIVE OF PREDICTED OUTPUT W.R.T. WEIGHT  <<<<<<<<<<< 
      for j=1:na,
        PSI = PSI+Ahat(j)*PSIold_red(:,j);
      end
      for j=1:nb-1,
        PSI = PSI+Bhat(j)*PSIold(:,j);
      end
      PSI_red = PSI*d20;
    end

    %>>>>>>>>>>>>>>>>>>>>>>>>>>>    UPDATE THE WEIGHTS    <<<<<<<<<<<<<<<<<<<<<<<<<
    if iter >first,                          % wait a few samples before updating
    % ---------- Forgetting factor method ----------
    if mflag==1,
      % -- Update P matrix --
      P = (P - P*PSI_red/(lambda + PSI_red'*P*PSI_red)*PSI_red'*P ) / lambda;

      % -- Update Parameters --
      theta = theta + P*PSI_red*ey;
  
      % ----------  Constant trace method   ---------- 
    elseif mflag == 2,
      % -- Correction factor --
      K = P*PSI_red /(lambda + PSI_red'*P*PSI_red);
      
      % -- Measurement update of P matrix --
      Pbar = (P - P*PSI_red/(1 + PSI_red'*P*PSI_red)*PSI_red'*P )/lambda;

      % -- Update Parameters --
      theta = theta + K*ey;

      % -- Time update of P matrix --
      P         = ((alpha_max-alpha_min)/trace(Pbar))*Pbar;
      P(index3) = P(index3)+alpha_min;
      
    % ----------       EFRA method        ---------- 
    else 
      % -- Correction factor --
      K = P*PSI_red * (alpha/(1 + PSI_red'*P*PSI_red));

      % -- Update Parameters --
      theta = theta + K*ey;
      
      % -- Update P --
      P = P/lambda - K*PSI_red'*P + betaI-delta*P*P;
    end 
  
    SSE = SSE + ey*ey;                    % Update performance index (SSE)
  
    % -- Put parameters back into weight matrices --
    W1i = reshape(theta(parameters2+1:parameters),inputs+1,hiddeni)';
    W2i = reshape(theta(1:parameters2),hiddeni+1,1)';
  end


  %>>>>>>>>>>>>>>>>>>>>>>>    DETERMINE CONTROL SIGNAL     <<<<<<<<<<<<<<<<<<<<<<<<  
  % Control using the inverse model
  phii= [ref(i+1);y;y_old(1:na-1);u_old(1:nb-1);1];
  h1i = W1i*phii;  
  y1i(H_hiddeni) = pmntanh(h1i(H_hiddeni));
  y1i(L_hiddeni) = h1i(L_hiddeni);    
  h2i = W2i*y1i;
  u(H_outputi)   = pmntanh(h2i(H_outputi));
  u(L_outputi)   = h2i(L_outputi);
  
  
  %>>>>>>>>>>>>>>>>>>>>>>>        LINEARIZE NETWORK        <<<<<<<<<<<<<<<<<<<<<<<<

  % Matrix with partial derivative of each output with respect to each of the
  % outputs from the hidden neurons
  dy2dy1 = W2i(:,1:hiddeni);

  % Matrix with partial derivatives of the output from each hidden neurons with
  % respect to each input:
  dy1dx = W1i(:,2:inputs);
  for j = H_hiddeni',
    dy1dx(j,:) = W1i(j,2:inputs)*(1-y1i(j)*y1i(j));
  end

  % Matrix with partial derivative of each output with respect to each input
  dy2dx     = dy2dy1 * dy1dx;
  Ahat = dy2dx(1:na);
  Bhat = dy2dx(na+1:na+nb-1);


  %>>>>>>>>>>>>>>>>>>       COPY DATA INTO THE DATA VECTORS       <<<<<<<<<<<<<<<<<
  u_data(i)    = u;
  y_data(i)    = y;
  yhat_data(i) = yhat;


  %>>>>>>>>>>>>>>>>>>>>>>>>         TIME OPDATES          <<<<<<<<<<<<<<<<<<<<<<<<<
  y_old = shift(y_old,y);
  u_old = shift(u_old,u);
  PSIold = [PSI,PSIold(:,1:nb-2)];  % Past gradients
  PSIold_red = [PSI_red,PSIold_red(:,1:na-1)];  % Past gradients


  %>>>>>>>>>>>>>>>>>>>>      PRINT %-AGE OF EPOCH COMPLETED      <<<<<<<<<<<<<<<<<<
  progress(fighandle,floor(100*i/samples));
  

  %>>>>>>>>>>>>>>>>>>>>>>>>>>>        DRAW PLOTS       <<<<<<<<<<<<<<<<<<<<<<<<<<<<
  if i==samples,
    epochs = epochs+1;
    figure(gcf)
    
    % Plot A
    if(exist('plot_a')==1),
      if epochs==1,
        [a_plots,dummy]=size(plot_a);      % # of plots in plot A
        plmata = zeros(samples,a_plots);   % Collect vectors in plmat
      end
      for nn = 1:a_plots, 
        plmata(:,nn) = eval(plot_a(nn,:));   
      end
      subplot(2,1,1);
      plot([0:samples-1],plmata);          % Plot plmat
      xlabel('Samples');
      set(gca,'Xlim',[0 samples-1]);       % Set x-axis
      title(['Specialized Training  (SSE = ' num2str(SSE) ...
                                         ',    epoch = ' num2str(epochs) ')']);
      grid on
    end
  
    % Plot B
    if(exist('plot_b')==1),
      if epochs==1,
        [b_plots,dummy]=size(plot_b);      % # of plots in plot B
        plmatb = zeros(samples,b_plots);   % Collect the vectors in plmat
      end
      for nn = 1:b_plots, 
        plmatb(:,nn) = eval(plot_b(nn,:));   
      end
      subplot(2,1,2);
      plot([0:samples-1],plmatb);          % Plot plmat
      xlabel('Samples'); 
      set(gca,'Xlim',[0 samples-1]);       % Set x-axis
      grid on
    end
    figure(gcf); drawnow
    i   = 0;
    SSE = 0;
    if iter<maxiter, fighandle=progress; end
  end
end
%----------------------------------------------------------------------------------
%----------------           >>>   END OF MAIN LOOP   <<<          -----------------
%----------------------------------------------------------------------------------
subplot(111)

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -