📄 testmlpunit.pas
字号:
unit testmlpunit;
interface
uses Math, Ap, Sysutils, mlpbase, trinverse, lbfgs, cholesky, spdsolve, mlptrain;
function TestMLP(Silent : Boolean):Boolean;
function testmlpunit_test_silent():Boolean;
function testmlpunit_test():Boolean;
implementation
procedure CreateNetwork(var Network : MultiLayerPerceptron;
NKind : Integer;
A1 : Double;
A2 : Double;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer);forward;
procedure UnsetNetwork(var Network : MultiLayerPerceptron);forward;
procedure TestInformational(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);forward;
procedure TestProcessing(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);forward;
procedure TestGradient(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);forward;
procedure TestHessian(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);forward;
function TestMLP(Silent : Boolean):Boolean;
var
WasErrors : Boolean;
PassCount : Integer;
MaxN : Integer;
MaxHid : Integer;
Info : Integer;
NF : Integer;
NHid : Integer;
NL : Integer;
NHid1 : Integer;
NHid2 : Integer;
NKind : Integer;
I : Integer;
J : Integer;
Network : MultiLayerPerceptron;
Network2 : MultiLayerPerceptron;
Rep : MLPReport;
CVRep : MLPCVReport;
NCount : Integer;
XY : TReal2DArray;
ValXY : TReal2DArray;
SSize : Integer;
ValSize : Integer;
AllSame : Boolean;
InfErrors : Boolean;
ProcErrors : Boolean;
GradErrors : Boolean;
HessErrors : Boolean;
TrnErrors : Boolean;
begin
WasErrors := False;
InfErrors := False;
ProcErrors := False;
GradErrors := False;
HessErrors := False;
TrnErrors := False;
PassCount := 10;
MaxN := 4;
MaxHid := 4;
//
// General multilayer network tests
//
NF:=1;
while NF<=MaxN do
begin
NL:=1;
while NL<=MaxN do
begin
NHid1:=0;
while NHid1<=MaxHid do
begin
NHid2:=0;
while NHid2<=0 do
begin
NKind:=0;
while NKind<=3 do
begin
//
// Skip meaningless parameters combinations
//
if (NKind=1) and (NL<2) then
begin
Inc(NKind);
Continue;
end;
if (NHid1=0) and (NHid2<>0) then
begin
Inc(NKind);
Continue;
end;
//
// Tests
//
TestInformational(NKind, NF, NHid1, NHid2, NL, PassCount, InfErrors);
TestProcessing(NKind, NF, NHid1, NHid2, NL, PassCount, ProcErrors);
TestGradient(NKind, NF, NHid1, NHid2, NL, PassCount, GradErrors);
TestHessian(NKind, NF, NHid1, NHid2, NL, PassCount, HessErrors);
Inc(NKind);
end;
Inc(NHid2);
end;
Inc(NHid1);
end;
Inc(NL);
end;
Inc(NF);
end;
//
// Test network training on simple XOR problem
//
SetLength(XY, 3+1, 2+1);
XY[0,0] := -1;
XY[0,1] := -1;
XY[0,2] := -1;
XY[1,0] := +1;
XY[1,1] := -1;
XY[1,2] := +1;
XY[2,0] := -1;
XY[2,1] := +1;
XY[2,2] := +1;
XY[3,0] := +1;
XY[3,1] := +1;
XY[3,2] := -1;
MLPCreate1(2, 2, 1, Network);
MLPTrainLM(Network, XY, 4, 0.001, 10, Info, Rep);
TrnErrors := TrnErrors or (MLPRMSError(Network, XY, 4)>0.1);
//
// Test CV on random noisy problem
//
NCount := 100;
SetLength(XY, NCount-1+1, 1+1);
I:=0;
while I<=NCount-1 do
begin
XY[I,0] := 2*RandomReal-1;
XY[I,1] := RandomInteger(4);
Inc(I);
end;
MLPCreateC0(1, 4, Network);
MLPKFoldCVLM(Network, XY, NCount, 0.001, 5, 10, Info, Rep, CVRep);
//
// Final report
//
WasErrors := InfErrors or ProcErrors or GradErrors or HessErrors or TrnErrors;
if not Silent then
begin
Write(Format('MLP TEST'#13#10'',[]));
Write(Format('INFORMATIONAL FUNCTIONS: ',[]));
if not InfErrors then
begin
Write(Format('OK'#13#10'',[]));
end
else
begin
Write(Format('FAILED'#13#10'',[]));
end;
Write(Format('BASIC PROCESSING: ',[]));
if not ProcErrors then
begin
Write(Format('OK'#13#10'',[]));
end
else
begin
Write(Format('FAILED'#13#10'',[]));
end;
Write(Format('GRADIENT CALCULATION: ',[]));
if not GradErrors then
begin
Write(Format('OK'#13#10'',[]));
end
else
begin
Write(Format('FAILED'#13#10'',[]));
end;
Write(Format('HESSIAN CALCULATION: ',[]));
if not HessErrors then
begin
Write(Format('OK'#13#10'',[]));
end
else
begin
Write(Format('FAILED'#13#10'',[]));
end;
Write(Format('TRAINING: ',[]));
if not TrnErrors then
begin
Write(Format('OK'#13#10'',[]));
end
else
begin
Write(Format('FAILED'#13#10'',[]));
end;
if WasErrors then
begin
Write(Format('TEST SUMMARY: FAILED'#13#10'',[]));
end
else
begin
Write(Format('TEST SUMMARY: PASSED'#13#10'',[]));
end;
Write(Format(''#13#10''#13#10'',[]));
end;
Result := not WasErrors;
end;
(*************************************************************************
Network creation
*************************************************************************)
procedure CreateNetwork(var Network : MultiLayerPerceptron;
NKind : Integer;
A1 : Double;
A2 : Double;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer);
begin
Assert((NIn>0) and (NHid1>=0) and (NHid2>=0) and (NOut>0), 'CreateNetwork error');
Assert((NHid1<>0) or (NHid2=0), 'CreateNetwork error');
Assert((NKind<>1) or (NOut>=2), 'CreateNetwork error');
if NHid1=0 then
begin
//
// No hidden layers
//
if NKind=0 then
begin
MLPCreate0(NIn, NOut, Network);
end
else
begin
if NKind=1 then
begin
MLPCreateC0(NIn, NOut, Network);
end
else
begin
if NKind=2 then
begin
MLPCreateB0(NIn, NOut, A1, A2, Network);
end
else
begin
if NKind=3 then
begin
MLPCreateR0(NIn, NOut, A1, A2, Network);
end;
end;
end;
end;
Exit;
end;
if NHid2=0 then
begin
//
// One hidden layer
//
if NKind=0 then
begin
MLPCreate1(NIn, NHid1, NOut, Network);
end
else
begin
if NKind=1 then
begin
MLPCreateC1(NIn, NHid1, NOut, Network);
end
else
begin
if NKind=2 then
begin
MLPCreateB1(NIn, NHid1, NOut, A1, A2, Network);
end
else
begin
if NKind=3 then
begin
MLPCreateR1(NIn, NHid1, NOut, A1, A2, Network);
end;
end;
end;
end;
Exit;
end;
//
// Two hidden layers
//
if NKind=0 then
begin
MLPCreate2(NIn, NHid1, NHid2, NOut, Network);
end
else
begin
if NKind=1 then
begin
MLPCreateC2(NIn, NHid1, NHid2, NOut, Network);
end
else
begin
if NKind=2 then
begin
MLPCreateB2(NIn, NHid1, NHid2, NOut, A1, A2, Network);
end
else
begin
if NKind=3 then
begin
MLPCreateR2(NIn, NHid1, NHid2, NOut, A1, A2, Network);
end;
end;
end;
end;
end;
(*************************************************************************
Unsets network (initialize it to smallest network possible
*************************************************************************)
procedure UnsetNetwork(var Network : MultiLayerPerceptron);
begin
MLPCreate0(1, 1, Network);
end;
(*************************************************************************
Iformational functions test
*************************************************************************)
procedure TestInformational(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);
var
Network : MultiLayerPerceptron;
N1 : Integer;
N2 : Integer;
WCount : Integer;
begin
CreateNetwork(Network, NKind, 0.0, 0.0, NIn, NHid1, NHid2, NOut);
MLPProperties(Network, N1, N2, WCount);
Err := Err or (N1<>NIn) or (N2<>NOut) or (WCount<=0);
end;
(*************************************************************************
Processing functions test
*************************************************************************)
procedure TestProcessing(NKind : Integer;
NIn : Integer;
NHid1 : Integer;
NHid2 : Integer;
NOut : Integer;
PassCount : Integer;
var Err : Boolean);
var
Network : MultiLayerPerceptron;
Network2 : MultiLayerPerceptron;
N1 : Integer;
N2 : Integer;
WCount : Integer;
ZeroNet : Boolean;
A1 : Double;
A2 : Double;
Pass : Integer;
I : Integer;
AllSame : Boolean;
RLen : Integer;
X1 : TReal1DArray;
X2 : TReal1DArray;
Y1 : TReal1DArray;
Y2 : TReal1DArray;
RA : TReal1DArray;
RA2 : TReal1DArray;
V : Double;
begin
Assert(PassCount>=2, 'PassCount<2!');
//
// Prepare network
//
A1 := 0;
A2 := 0;
if NKind=2 then
begin
A1 := 1000*RandomReal-500;
A2 := 2*RandomReal-1;
end;
if NKind=3 then
begin
A1 := 1000*RandomReal-500;
A2 := A1+(2*RandomInteger(2)-1)*(0.1+0.9*RandomReal);
end;
CreateNetwork(Network, NKind, A1, A2, NIn, NHid1, NHid2, NOut);
MLPProperties(Network, N1, N2, WCount);
//
// Initialize arrays
//
SetLength(X1, NIn-1+1);
SetLength(X2, NIn-1+1);
SetLength(Y1, NOut-1+1);
SetLength(Y2, NOut-1+1);
//
// Main cycle
//
Pass:=1;
while Pass<=PassCount do
begin
//
// Last run is made on zero network
//
MLPRandomizeFull(Network);
ZeroNet := False;
if Pass=PassCount then
begin
APVMul(@Network.Weights[0], 0, WCount-1, 0);
ZeroNet := True;
end;
//
// Same inputs leads to same outputs
//
I:=0;
while I<=NIn-1 do
begin
X1[I] := 2*RandomReal-1;
X2[I] := X1[I];
Inc(I);
end;
I:=0;
while I<=NOut-1 do
begin
Y1[I] := 2*RandomReal-1;
Y2[I] := 2*RandomReal-1;
Inc(I);
end;
MLPProcess(Network, X1, Y1);
MLPProcess(Network, X2, Y2);
AllSame := True;
I:=0;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -