📄 bp_xor.m
字号:
clear;clc;
X=[ 0 0
0 1
1 0
1 1];%input sample set,each row is a sample vector
T=[ 0;
1;
1;
0];%each row is a output expectation vector
[Num_sample,Num_in]=size(X);%Num_sample is the number of samples
%Num_in is the number of elements in samples
Num_out=length(T(1,:)); %Num_out is the number of output neurons
Num_hid=2;
W12=rand(Num_in,Num_hid);
W23=rand(Num_hid,Num_out);
eta=0.7;%eta is learning rate
alpha=1;
E=0;
epsilon=0.166; %0.166 or 0.008
p=1;n=1; %n is the number of iteration
while(p<=Num_sample)
Y1=X(p,:);%训练样本输入,也即第一层的输出
t=T(p,:);%对应的最后一层的期望输出值
%前向计算
%第2层(即第一隐层)
for i=1:Num_hid %第2层(即第一隐层)中的神经元个数
V2(i)=Y1*W12(:,i);
Y2(i)=sigmoid(V2(i)); %计算隐层各个神经元的输出
%deriv2(i)=Y2(i)*(1-Y2(i));
end
%第3层(即输出层)
for i=1:Num_out
V3(i)=Y2*W23(:,i);
Y3(i)=sigmoid(V3(i)); %计算输出层各个神经元的输出
%deriv3(i)=Y3(i)*(1-Y3(i));
end
%误差计算
e=t-Y3;
E=E+sum(e.^2)/2;
if(p==4) %p=4,表示一个训练周期结束
% X=[X(2,:);X(4,:);X(1,:);X(3,:)];
% T=[T(2,:);T(4,:);T(1,:);T(3,:)];
if(E>epsilon) %判断终止条件
p=0;
E=0;
end
end
%Error back propagation
%第3层(输出层)权值调整
for i=1:Num_out %第i个输出层神经元
%delta3(i)=e(i)*deriv3(i);
delta3(i)=e(i)*Y3(i)*(1-Y3(i));
for j=1:Num_hid %第j个隐层神经元
W23(j,i)=alpha*W23(j,i)+eta*delta3(i)*Y2(j);
end
end
%第2层(隐层)权值调整
for i=1:Num_hid
%delta2(i)=W23(i,:)*delta3'*deriv2(i);
delta2(i)=W23(i,:)*delta3'*Y2(i)*(1-Y2(i));
for j=1:Num_in
W12(j,i)=alpha*W12(j,i)+eta*delta2(i)*Y1(j);
end
end
p=p+1; %go to the next paired-sample
n=n+1; %number of iteration
if(n>100000)
error('Maximum Number of Iteration')
break
end
end
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -