⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 demo.asv

📁 基于BP模型的神经网络模型
💻 ASV
字号:
%~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
%
%  Data fitting DEMO of neural networks with matrix inputs.
%
%  Author: Povilas Daniu餴s, paralax@hacker.lt
%  http://ai.hacker.lt - lithuanian site about Artificial Intelligence.
%
%  TODO: weighted MSE minimization, maximal likelihood method, multiple
%  activation function support.
%  ----------------------------------------------------------------------

functio
%clear all

alpha = 0.9;       % inertia
eta = 0.005;       % inital learning rate
epsilon = 0.03;    % needed MSE
epsilon1 = 0.001;  % minimal descent (stopping criteria) - all iterations in this case
neurones = 5;
n = 10;
numEpochs = 1;
earlyStop = 5;      

x = [0:0.01:n];
e = randn(1,100*n + 1)*0.1;
y = sinc(n/2 - x)  + e;



for i=1:100*n-6
    data.training(i).mat = [y(i+1), y(i+2); y(i+3), y(i+4); y(i+5), y(i+6)];  % matrix 
    data.vtraining(i,:) = [y(i+1), y(i+2), y(i+3), y(i+4), y(i+5), y(i+6)];   % vector
    data.target(i) = y(i+7);
end

data.vtraining = data.vtraining';

net=newff(minmax(data.vtraining),[neurones,1],{'tansig','purelin'},'traingd'); 
net.trainParam.show = NaN;
net.trainParam.epochs = 1000;
net.trainParam.goal = epsilon;  
[net,tr,rez,errors]=train(net,data.vtraining,data.target);


nn = NN_device(neurones,length(data.vtraining(:,1)));
e = mNN_device(neurones,size(data.training(1).mat),alpha,eta,epsilon,epsilon1,earlyStop);
e_gd = gdtrain(e,data,numEpochs);
e_elm = ELM_train(e,data); 
nn = velm_train(nn,data);


% -----------------------------------------------------------------------
clear data;

x = [0:0.01:n];
er = randn(1,100*n + 1)*0.1;
y = sinc(n/2 - x) + er;



for i=1:100*n-6
    data.training(i).mat = [y(i+1), y(i+2); y(i+3), y(i+4); y(i+5), y(i+6)];  % matrix 
    data.vtraining(i,:) = [y(i+1), y(i+2), y(i+3), y(i+4), y(i+5), y(i+6)];   % vector
    data.target(i) = y(i+7);
end
data.vtraining = data.vtraining';


s = sim(net,data.vtraining);
s1 = NN_sim(nn,data);
s_gd = mNN_sim(e_gd,data);
s_elm = mNN_sim(e_elm,data);



%figure;
%plot(data.target,'r-');
%hold on;
%plot(s,'b-');
%hold on;
%plot(s1,'b:');
%hold on;
%plot(s_gd,'b--');
%hold on;
%plot(s_elm,'b-');
%legend('Real','NNg','NNe','MNNg','MNNe');



sse1 = sum((data.target - s).^2);     % NN(gd)
sse2 = sum((data.target - s1).^2);    % NN(elm)
sse3 = sum((data.target - s_gd).^2);  % MNN(gd)
sse4 = sum((data.target - s_elm).^2); % MNN(elm) 

sse1 = sse1 / length(data.target);
sse2 = sse2 / length(data.target);
sse3 = sse3 / length(data.target);
sse4 = sse4 / length(data.target);


%fprintf('NN(gd) = %f, NN(elm) = %f, MNN(gd) = %f, MNN(elm) = %f \n',sse1,sse2,sse3,sse4);
f = sse2 / sse4; 

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -