📄 testmlpunit.pas
字号:
while I<=WCount-1 do
begin
WPrev := Network.Weights[I];
Network.Weights[I] := WPrev+H;
MLPProcess(Network, X, Y2);
Network.Weights[I] := WPrev-H;
MLPProcess(Network, X, Y1);
Network.Weights[I] := WPrev;
V := 0;
if NKind<>1 then
begin
J:=0;
while J<=NOut-1 do
begin
V := V+0.5*(Sqr(Y2[J]-Y[J])-Sqr(Y1[J]-Y[J]))/(2*H);
Inc(J);
end;
end
else
begin
J:=0;
while J<=NOut-1 do
begin
if Y[J]<>0 then
begin
if Y2[J]=0 then
begin
V := V+Y[J]*Ln(MaxRealNumber);
end
else
begin
V := V+Y[J]*Ln(Y[J]/Y2[J]);
end;
if Y1[J]=0 then
begin
V := V-Y[J]*Ln(MaxRealNumber);
end
else
begin
V := V-Y[J]*Ln(Y[J]/Y1[J]);
end;
end;
Inc(J);
end;
V := V/(2*H);
end;
Grad1[I] := V;
if AbsReal(Grad1[I])>1.0E-3 then
begin
Err := Err or (AbsReal((Grad2[I]-Grad1[I])/Grad1[I])>ETol);
end
else
begin
Err := Err or (AbsReal(Grad2[I]-Grad1[I])>ETol);
end;
Inc(I);
end;
//
// Test gradient calculation: batch (least squares)
//
SSize := 1+RandomInteger(10);
SetLength(XY, SSize-1+1, NIn+NOut-1+1);
I:=0;
while I<=WCount-1 do
begin
Grad1[I] := 0;
Inc(I);
end;
E1 := 0;
I:=0;
while I<=SSize-1 do
begin
J:=0;
while J<=NIn-1 do
begin
X1[J] := 4*RandomReal-2;
Inc(J);
end;
APVMove(@XY[I][0], 0, NIn-1, @X1[0], 0, NIn-1);
if MLPIsSoftmax(Network) then
begin
J:=0;
while J<=NOut-1 do
begin
Y1[J] := 0;
Inc(J);
end;
XY[I,NIn] := RandomInteger(NOut);
Y1[Round(XY[I,NIn])] := 1;
end
else
begin
J:=0;
while J<=NOut-1 do
begin
Y1[J] := 4*RandomReal-2;
Inc(J);
end;
APVMove(@XY[I][0], NIn, NIn+NOut-1, @Y1[0], 0, NOut-1);
end;
MLPGrad(Network, X1, Y1, V, Grad2);
E1 := E1+V;
APVAdd(@Grad1[0], 0, WCount-1, @Grad2[0], 0, WCount-1);
Inc(I);
end;
MLPGradBatch(Network, XY, SSize, E2, Grad2);
Err := Err or (AbsReal(E1-E2)/E1>0.01);
I:=0;
while I<=WCount-1 do
begin
if Grad1[I]<>0 then
begin
Err := Err or (AbsReal((Grad2[I]-Grad1[I])/Grad1[I])>ETol);
end
else
begin
Err := Err or (Grad2[I]<>Grad1[I]);
end;
Inc(I);
end;
//
// Test gradient calculation: batch (natural error func)
//
SSize := 1+RandomInteger(10);
SetLength(XY, SSize-1+1, NIn+NOut-1+1);
I:=0;
while I<=WCount-1 do
begin
Grad1[I] := 0;
Inc(I);
end;
E1 := 0;
I:=0;
while I<=SSize-1 do
begin
J:=0;
while J<=NIn-1 do
begin
X1[J] := 4*RandomReal-2;
Inc(J);
end;
APVMove(@XY[I][0], 0, NIn-1, @X1[0], 0, NIn-1);
if MLPIsSoftmax(Network) then
begin
J:=0;
while J<=NOut-1 do
begin
Y1[J] := 0;
Inc(J);
end;
XY[I,NIn] := RandomInteger(NOut);
Y1[Round(XY[I,NIn])] := 1;
end
else
begin
J:=0;
while J<=NOut-1 do
begin
Y1[J] := 4*RandomReal-2;
Inc(J);
end;
APVMove(@XY[I][0], NIn, NIn+NOut-1, @Y1[0], 0, NOut-1);
end;
MLPGradN(Network, X1, Y1, V, Grad2);
E1 := E1+V;
APVAdd(@Grad1[0], 0, WCount-1, @Grad2[0], 0, WCount-1);
Inc(I);
end;
MLPGradNBatch(Network, XY, SSize, E2, Grad2);
Err := Err or (AbsReal(E1-E2)/E1>ETol);
I:=0;
while I<=WCount-1 do
begin
if Grad1[I]<>0 then
begin
Err := Err or (AbsReal((Grad2[I]-Grad1[I])/Grad1[I])>ETol);
end
else
begin
Err := Err or (Grad2[I]<>Grad1[I]);
end;
Inc(I);
end;
Inc(Pass);
end;
end;
(*************************************************************************
Hessian functions test
*************************************************************************)
procedure TestHessian(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);
var
Network : MultiLayerPerceptron;
Network2 : MultiLayerPerceptron;
HKind : Integer;
N1 : Integer;
N2 : Integer;
WCount : Integer;
ZeroNet : Boolean;
H : Double;
ETol : Double;
Pass : Integer;
I : Integer;
J : Integer;
AllSame : Boolean;
ILen : Integer;
RLen : Integer;
SSize : Integer;
A1 : Double;
A2 : Double;
XY : TReal2DArray;
H1 : TReal2DArray;
H2 : TReal2DArray;
Grad1 : TReal1DArray;
Grad2 : TReal1DArray;
Grad3 : TReal1DArray;
X : TReal1DArray;
Y : TReal1DArray;
X1 : TReal1DArray;
X2 : TReal1DArray;
Y1 : TReal1DArray;
Y2 : TReal1DArray;
IA : TInteger1DArray;
RA : TReal1DArray;
V : Double;
E : Double;
E1 : Double;
E2 : Double;
V1 : Double;
V2 : Double;
V3 : Double;
V4 : Double;
WPrev : Double;
begin
Assert(PassCount>=2, 'PassCount<2!');
A1 := 0;
A2 := 0;
if NKind=2 then
begin
A1 := 1000*RandomReal-500;
A2 := 2*RandomReal-1;
end;
if NKind=3 then
begin
A1 := 1000*RandomReal-500;
A2 := A1+(2*RandomInteger(2)-1)*(0.1+0.9*RandomReal);
end;
CreateNetwork(Network, NKind, A1, A2, NIn, NHid1, NHid2, NOut);
MLPProperties(Network, N1, N2, WCount);
H := 0.0001;
ETol := 0.01;
//
// Initialize
//
SetLength(X, NIn-1+1);
SetLength(X1, NIn-1+1);
SetLength(X2, NIn-1+1);
SetLength(Y, NOut-1+1);
SetLength(Y1, NOut-1+1);
SetLength(Y2, NOut-1+1);
SetLength(Grad1, WCount-1+1);
SetLength(Grad2, WCount-1+1);
SetLength(Grad3, WCount-1+1);
SetLength(H1, WCount-1+1, WCount-1+1);
SetLength(H2, WCount-1+1, WCount-1+1);
//
// Process
//
Pass:=1;
while Pass<=PassCount do
begin
MLPRandomizeFull(Network);
//
// Test hessian calculation .
// E1 contains total error (calculated using MLPGrad/MLPGradN)
// Grad1 contains total gradient (calculated using MLPGrad/MLPGradN)
// H1 contains Hessian calculated using differences of gradients
//
// E2, Grad2 and H2 contains corresponing values calculated using MLPHessianBatch/MLPHessianNBatch
//
HKind:=0;
while HKind<=1 do
begin
SSize := 1+RandomInteger(10);
SetLength(XY, SSize-1+1, NIn+NOut-1+1);
I:=0;
while I<=WCount-1 do
begin
Grad1[I] := 0;
Inc(I);
end;
I:=0;
while I<=WCount-1 do
begin
J:=0;
while J<=WCount-1 do
begin
H1[I,J] := 0;
Inc(J);
end;
Inc(I);
end;
E1 := 0;
I:=0;
while I<=SSize-1 do
begin
//
// X, Y
//
J:=0;
while J<=NIn-1 do
begin
X1[J] := 4*RandomReal-2;
Inc(J);
end;
APVMove(@XY[I][0], 0, NIn-1, @X1[0], 0, NIn-1);
if MLPIsSoftmax(Network) then
begin
J:=0;
while J<=NOut-1 do
begin
Y1[J] := 0;
Inc(J);
end;
XY[I,NIn] := RandomInteger(NOut);
Y1[Round(XY[I,NIn])] := 1;
end
else
begin
J:=0;
while J<=NOut-1 do
begin
Y1[J] := 4*RandomReal-2;
Inc(J);
end;
APVMove(@XY[I][0], NIn, NIn+NOut-1, @Y1[0], 0, NOut-1);
end;
//
// E1, Grad1
//
if HKind=0 then
begin
MLPGrad(Network, X1, Y1, V, Grad2);
end
else
begin
MLPGradN(Network, X1, Y1, V, Grad2);
end;
E1 := E1+V;
APVAdd(@Grad1[0], 0, WCount-1, @Grad2[0], 0, WCount-1);
//
// H1
//
J:=0;
while J<=WCount-1 do
begin
WPrev := Network.Weights[J];
Network.Weights[J] := WPrev-2*H;
if HKind=0 then
begin
MLPGrad(Network, X1, Y1, V, Grad2);
end
else
begin
MLPGradN(Network, X1, Y1, V, Grad2);
end;
Network.Weights[J] := WPrev-H;
if HKind=0 then
begin
MLPGrad(Network, X1, Y1, V, Grad3);
end
else
begin
MLPGradN(Network, X1, Y1, V, Grad3);
end;
APVSub(@Grad2[0], 0, WCount-1, @Grad3[0], 0, WCount-1, 8);
Network.Weights[J] := WPrev+H;
if HKind=0 then
begin
MLPGrad(Network, X1, Y1, V, Grad3);
end
else
begin
MLPGradN(Network, X1, Y1, V, Grad3);
end;
APVAdd(@Grad2[0], 0, WCount-1, @Grad3[0], 0, WCount-1, 8);
Network.Weights[J] := WPrev+2*H;
if HKind=0 then
begin
MLPGrad(Network, X1, Y1, V, Grad3);
end
else
begin
MLPGradN(Network, X1, Y1, V, Grad3);
end;
APVSub(@Grad2[0], 0, WCount-1, @Grad3[0], 0, WCount-1);
V := 1/(12*H);
APVAdd(@H1[J][0], 0, WCount-1, @Grad2[0], 0, WCount-1, V);
Network.Weights[J] := WPrev;
Inc(J);
end;
Inc(I);
end;
if HKind=0 then
begin
MLPHessianBatch(Network, XY, SSize, E2, Grad2, H2);
end
else
begin
MLPHessianNBatch(Network, XY, SSize, E2, Grad2, H2);
end;
Err := Err or (AbsReal(E1-E2)/E1>ETol);
I:=0;
while I<=WCount-1 do
begin
if AbsReal(Grad1[I])>1.0E-6 then
begin
Err := Err or (AbsReal((Grad2[I]-Grad1[I])/Grad1[I])>ETol);
end
else
begin
Err := Err or (AbsReal(Grad2[I]-Grad1[I])>ETol);
end;
Inc(I);
end;
I:=0;
while I<=WCount-1 do
begin
J:=0;
while J<=WCount-1 do
begin
if AbsReal(H1[I,J])>1.0E-6 then
begin
Err := Err or (AbsReal((H1[I,J]-H2[I,J])/H1[I,J])>ETol);
end
else
begin
Err := Err or (AbsReal(H2[I,J]-H1[I,J])>ETol);
end;
Inc(J);
end;
Inc(I);
end;
Inc(HKind);
end;
Inc(Pass);
end;
end;
(*************************************************************************
Silent unit test
*************************************************************************)
function testmlpunit_test_silent():Boolean;
begin
Result := TestMLP(True);
end;
(*************************************************************************
Unit test
*************************************************************************)
function testmlpunit_test():Boolean;
begin
Result := TestMLP(False);
end;
end.
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -