📄 ekfdemo1.m
字号:
% PURPOSE : To estimate the input-output mapping with inputs x% and outputs y generated by the following nonlinear, % nonstationary state space model:%% x(t+1) = 0.5x(t) + [25x(t)]/[(1+x(t))^(2)] % + 8cos(1.2t) + process noise% y(t) = x(t)^(2) / 20 + measurement noise with time % varying covariance.%% using a multi-layer perceptron (MLP) and both the EKF and % EKF with evidence maximisation algorithm. % AUTHOR : Nando de Freitas - Thanks for the acknowledgement :-)% DATE : 08-09-98clear;echo off;% INITIALISATION AND PARAMETERS:% =============================N = 240; % Number of time steps.t = 1:1:N; % Time. x0 = 0.1; % Initial input.x = zeros(N,1); % Input observation.y = zeros(N,1); % Output observation.x(1,1) = x0; % Initia input. actualR = 3; % Measurement noise variance.actualQ = .01; % Process noise variance. s1=10; % Neurons in the hidden layer.s2=1; % Neurons in the output layer - only one in this implementation.initVar= 10; % Variance of prior for weights.KalmanR = 3; % Kalman filter measurement noise covariance hyperparameter;KalmanQ = 1e-5; % Kalman filter process noise covariance hyperparameter;KalmanP = 10; % Kalman filter initial weights covariance hyperparameter;window = 10; % Length of moving window to estimate the time covariance. % GENERATE PROCESS AND MEASUREMENT NOISE:% ======================================v = sqrt(actualR)*sin(0.05*t)'.*randn(N,1);w = sqrt(actualQ)*randn(N,1); figure(1)clf;subplot(221)plot(v);ylabel('Measurement Noise','fontsize',15);xlabel('Time');subplot(222)plot(w);ylabel('Process Noise','fontsize',15);xlabel('Time');% GENERATE INPUT-OUTPUT DATA:% ==========================y(1,1) = (x(1,1)^(2))./20 + v(1,1); for t=2:N, x(t,1) = 0.5*x(t-1,1) + 25*x(t-1,1)/(1+x(t-1,1)^(2)) + 8*cos(1.2*(t-1)) + w(t,1); y(t,1) = (x(t,1).^(2))./20 + v(t,1) ;end;subplot(223)plot(x)ylabel('Input','fontsize',15);xlabel('Time','fontsize',15);subplot(224)plot(y)ylabel('Output','fontsize',15);xlabel('Time','fontsize',15);fprintf('Press a key to continue') pause;fprintf('\n')fprintf('Training the MLP with EKF')fprintf('\n')% PERFORM EXTENDED KALMAN FILTERING TO TRAIN MLP:% ==============================================tInit=clock;[ekfp,theta,thetaR,PR,innovations] = mlpekf(x',y',s1,s2,KalmanR,KalmanQ,KalmanP,initVar,N);durationekf=etime(clock,tInit);errorekf=norm(ekfp(N/3:N)'-y(N/3:N));% PERFORM SEQUENTIAL EKF WITH Q UPDATE:% ====================================fprintf('Training the MLP with EKF and Q updating')fprintf('\n')[ekfp2,theta2,thetaR2,PR2,innovations2,qplot] = mlpekfQ(x',y',s1,s2,KalmanR,KalmanQ,KalmanP,initVar,window,N);durationekfmax=etime(clock,tInit);errorekfmax=norm(ekfp2(N/3:N)'-y(N/3:N));fprintf('EKF error = %d',errorekf)fprintf('\n')fprintf('EKFMAX error = %d',errorekfmax)fprintf('\n')fprintf('EKF duration = %d seconds.\n',durationekf)fprintf('EKFMAX duration = %d seconds.\n',durationekfmax)% PLOT RESULTS:% ============figure(1)clf;subplot(221)plot(1:length(x),y,'g',1:length(x),ekfp2,'r',1:length(x),ekfp,'b')legend('True value','EKFMAX estimate','EKF estimate');ylabel('One-step-ahead prediction','fontsize',15)xlabel('Time','fontsize',15)subplot(222)plot(1:length(x),qplot)ylabel('Q parameter','fontsize',15)xlabel('Time','fontsize',15)subplot(223)plot(1:length(x),innovations2)ylabel('Innovations variance for EKFMAX','fontsize',15)xlabel('Time','fontsize',15)subplot(224);plot(1:length(x),innovations)ylabel('Innovations variance for EKF','fontsize',15)xlabel('Time','fontsize',15)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -