📄 testmlpunit.pas
字号:
while I<=NOut-1 do
begin
AllSame := AllSame and (Y1[I]=Y2[I]);
Inc(I);
end;
Err := Err or not AllSame;
//
// Same inputs on original network leads to same outputs
// on copy created using MLPCopy
//
UnsetNetwork(Network2);
MLPCopy(Network, Network2);
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
X2[I] := X1[I];
Inc(I);
end;
I:=0;
while I<=NOut-1 do
begin
Y1[I] := 2*RandomReal-1;
Y2[I] := 2*RandomReal-1;
Inc(I);
end;
MLPProcess(Network, X1, Y1);
MLPProcess(Network2, X2, Y2);
AllSame := True;
I:=0;
while I<=NOut-1 do
begin
AllSame := AllSame and (Y1[I]=Y2[I]);
Inc(I);
end;
Err := Err or not AllSame;
//
// Same inputs on original network leads to same outputs
// on copy created using MLPSerialize
//
UnsetNetwork(Network2);
MLPSerialize(Network, RA, RLen);
SetLength(RA2, RLen-1+1);
I:=0;
while I<=RLen-1 do
begin
RA2[I] := RA[I];
Inc(I);
end;
MLPUnserialize(RA2, Network2);
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
X2[I] := X1[I];
Inc(I);
end;
I:=0;
while I<=NOut-1 do
begin
Y1[I] := 2*RandomReal-1;
Y2[I] := 2*RandomReal-1;
Inc(I);
end;
MLPProcess(Network, X1, Y1);
MLPProcess(Network2, X2, Y2);
AllSame := True;
I:=0;
while I<=NOut-1 do
begin
AllSame := AllSame and (Y1[I]=Y2[I]);
Inc(I);
end;
Err := Err or not AllSame;
//
// Different inputs leads to different outputs (non-zero network)
//
if not ZeroNet then
begin
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
X2[I] := 2*RandomReal-1;
Inc(I);
end;
I:=0;
while I<=NOut-1 do
begin
Y1[I] := 2*RandomReal-1;
Y2[I] := Y1[I];
Inc(I);
end;
MLPProcess(Network, X1, Y1);
MLPProcess(Network, X2, Y2);
AllSame := True;
I:=0;
while I<=NOut-1 do
begin
AllSame := AllSame and (Y1[I]=Y2[I]);
Inc(I);
end;
Err := Err or AllSame;
end;
//
// Randomization changes outputs (when inputs are unchanged, non-zero network)
//
if not ZeroNet then
begin
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
X2[I] := 2*RandomReal-1;
Inc(I);
end;
I:=0;
while I<=NOut-1 do
begin
Y1[I] := 2*RandomReal-1;
Y2[I] := Y1[I];
Inc(I);
end;
MLPCopy(Network, Network2);
MLPRandomize(Network2);
MLPProcess(Network, X1, Y1);
MLPProcess(Network2, X1, Y2);
AllSame := True;
I:=0;
while I<=NOut-1 do
begin
AllSame := AllSame and (Y1[I]=Y2[I]);
Inc(I);
end;
Err := Err or AllSame;
end;
//
// Full randomization changes outputs (when inputs are unchanged, non-zero network)
//
if not ZeroNet then
begin
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
X2[I] := 2*RandomReal-1;
Inc(I);
end;
I:=0;
while I<=NOut-1 do
begin
Y1[I] := 2*RandomReal-1;
Y2[I] := Y1[I];
Inc(I);
end;
MLPCopy(Network, Network2);
MLPRandomizeFull(Network2);
MLPProcess(Network, X1, Y1);
MLPProcess(Network2, X1, Y2);
AllSame := True;
I:=0;
while I<=NOut-1 do
begin
AllSame := AllSame and (Y1[I]=Y2[I]);
Inc(I);
end;
Err := Err or AllSame;
end;
//
// Normalization properties
//
if NKind=1 then
begin
//
// Classifier network outputs are normalized
//
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
Inc(I);
end;
MLPProcess(Network, X1, Y1);
V := 0;
I:=0;
while I<=NOut-1 do
begin
V := V+Y1[I];
Err := Err or (Y1[I]<0);
Inc(I);
end;
Err := Err or (AbsReal(V-1)>1000*MachineEpsilon);
end;
if NKind=2 then
begin
//
// B-type network outputs are bounded from above/below
//
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
Inc(I);
end;
MLPProcess(Network, X1, Y1);
I:=0;
while I<=NOut-1 do
begin
if A2>=0 then
begin
Err := Err or (Y1[I]<A1);
end
else
begin
Err := Err or (Y1[I]>A1);
end;
Inc(I);
end;
end;
if NKind=3 then
begin
//
// R-type network outputs are within [A1,A2] (or [A2,A1])
//
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
Inc(I);
end;
MLPProcess(Network, X1, Y1);
I:=0;
while I<=NOut-1 do
begin
Err := Err or (Y1[I]<Min(A1, A2)) or (Y1[I]>Max(A1, A2));
Inc(I);
end;
end;
Inc(Pass);
end;
end;
(*************************************************************************
Gradient functions test
*************************************************************************)
procedure TestGradient(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);
var
Network : MultiLayerPerceptron;
Network2 : MultiLayerPerceptron;
N1 : Integer;
N2 : Integer;
WCount : Integer;
ZeroNet : Boolean;
H : Double;
ETol : Double;
A1 : Double;
A2 : Double;
Pass : Integer;
I : Integer;
J : Integer;
K : Integer;
AllSame : Boolean;
ILen : Integer;
RLen : Integer;
SSize : Integer;
XY : TReal2DArray;
Grad1 : TReal1DArray;
Grad2 : TReal1DArray;
X : TReal1DArray;
Y : TReal1DArray;
X1 : TReal1DArray;
X2 : TReal1DArray;
Y1 : TReal1DArray;
Y2 : TReal1DArray;
IA : TInteger1DArray;
RA : TReal1DArray;
V : Double;
E : Double;
E1 : Double;
E2 : Double;
V1 : Double;
V2 : Double;
V3 : Double;
V4 : Double;
WPrev : Double;
begin
Assert(PassCount>=2, 'PassCount<2!');
A1 := 0;
A2 := 0;
if NKind=2 then
begin
A1 := 1000*RandomReal-500;
A2 := 2*RandomReal-1;
end;
if NKind=3 then
begin
A1 := 1000*RandomReal-500;
A2 := A1+(2*RandomInteger(2)-1)*(0.1+0.9*RandomReal);
end;
CreateNetwork(Network, NKind, A1, A2, NIn, NHid1, NHid2, NOut);
MLPProperties(Network, N1, N2, WCount);
H := 0.0001;
ETol := 0.01;
//
// Initialize
//
SetLength(X, NIn-1+1);
SetLength(X1, NIn-1+1);
SetLength(X2, NIn-1+1);
SetLength(Y, NOut-1+1);
SetLength(Y1, NOut-1+1);
SetLength(Y2, NOut-1+1);
SetLength(Grad1, WCount-1+1);
SetLength(Grad2, WCount-1+1);
//
// Process
//
Pass:=1;
while Pass<=PassCount do
begin
MLPRandomizeFull(Network);
//
// Test error/gradient calculation (least squares)
//
SetLength(XY, 0+1, NIn+NOut-1+1);
I:=0;
while I<=NIn-1 do
begin
X[I] := 4*RandomReal-2;
Inc(I);
end;
APVMove(@XY[0][0], 0, NIn-1, @X[0], 0, NIn-1);
if MLPIsSoftmax(Network) then
begin
I:=0;
while I<=NOut-1 do
begin
Y[I] := 0;
Inc(I);
end;
XY[0,NIn] := RandomInteger(NOut);
Y[Round(XY[0,NIn])] := 1;
end
else
begin
I:=0;
while I<=NOut-1 do
begin
Y[I] := 4*RandomReal-2;
Inc(I);
end;
APVMove(@XY[0][0], NIn, NIn+NOut-1, @Y[0], 0, NOut-1);
end;
MLPGrad(Network, X, Y, E, Grad2);
MLPProcess(Network, X, Y2);
APVSub(@Y2[0], 0, NOut-1, @Y[0], 0, NOut-1);
V := APVDotProduct(@Y2[0], 0, NOut-1, @Y2[0], 0, NOut-1);
V := V/2;
Err := Err or (AbsReal((V-E)/V)>ETol);
Err := Err or (AbsReal((MLPError(Network, XY, 1)-V)/V)>ETol);
I:=0;
while I<=WCount-1 do
begin
WPrev := Network.Weights[I];
Network.Weights[I] := WPrev-2*H;
MLPProcess(Network, X, Y1);
APVSub(@Y1[0], 0, NOut-1, @Y[0], 0, NOut-1);
V1 := APVDotProduct(@Y1[0], 0, NOut-1, @Y1[0], 0, NOut-1);
V1 := V1/2;
Network.Weights[I] := WPrev-H;
MLPProcess(Network, X, Y1);
APVSub(@Y1[0], 0, NOut-1, @Y[0], 0, NOut-1);
V2 := APVDotProduct(@Y1[0], 0, NOut-1, @Y1[0], 0, NOut-1);
V2 := V2/2;
Network.Weights[I] := WPrev+H;
MLPProcess(Network, X, Y1);
APVSub(@Y1[0], 0, NOut-1, @Y[0], 0, NOut-1);
V3 := APVDotProduct(@Y1[0], 0, NOut-1, @Y1[0], 0, NOut-1);
V3 := V3/2;
Network.Weights[I] := WPrev+2*H;
MLPProcess(Network, X, Y1);
APVSub(@Y1[0], 0, NOut-1, @Y[0], 0, NOut-1);
V4 := APVDotProduct(@Y1[0], 0, NOut-1, @Y1[0], 0, NOut-1);
V4 := V4/2;
Network.Weights[I] := WPrev;
Grad1[I] := (V1-8*V2+8*V3-V4)/(12*H);
if AbsReal(Grad1[I])>1.0E-6 then
begin
Err := Err or (AbsReal((Grad2[I]-Grad1[I])/Grad1[I])>ETol);
end
else
begin
Err := Err or (AbsReal(Grad2[I]-Grad1[I])>ETol);
end;
Inc(I);
end;
//
// Test error/gradient calculation (natural).
// Testing on non-random structure networks
// (because NKind is representative only in that case).
//
SetLength(XY, 0+1, NIn+NOut-1+1);
I:=0;
while I<=NIn-1 do
begin
X[I] := 4*RandomReal-2;
Inc(I);
end;
APVMove(@XY[0][0], 0, NIn-1, @X[0], 0, NIn-1);
if MLPIsSoftmax(Network) then
begin
I:=0;
while I<=NOut-1 do
begin
Y[I] := 0;
Inc(I);
end;
XY[0,NIn] := RandomInteger(NOut);
Y[Round(XY[0,NIn])] := 1;
end
else
begin
I:=0;
while I<=NOut-1 do
begin
Y[I] := 4*RandomReal-2;
Inc(I);
end;
APVMove(@XY[0][0], NIn, NIn+NOut-1, @Y[0], 0, NOut-1);
end;
MLPGradN(Network, X, Y, E, Grad2);
MLPProcess(Network, X, Y2);
V := 0;
if NKind<>1 then
begin
I:=0;
while I<=NOut-1 do
begin
V := V+0.5*Sqr(Y2[I]-Y[I]);
Inc(I);
end;
end
else
begin
I:=0;
while I<=NOut-1 do
begin
if Y[I]<>0 then
begin
if Y2[I]=0 then
begin
V := V+Y[I]*Ln(MaxRealNumber);
end
else
begin
V := V+Y[I]*Ln(Y[I]/Y2[I]);
end;
end;
Inc(I);
end;
end;
Err := Err or (AbsReal((V-E)/V)>ETol);
Err := Err or (AbsReal((MLPErrorN(Network, XY, 1)-V)/V)>ETol);
I:=0;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -