📄 ann.pas
字号:
//Copyright Ramesh Sabeti - sabeti@reazon.com
unit ANN;
interface
uses Math, Dialogs, SysUtils, classes;
const
NUM_FEATURES = 2;
NUM_PIXELS = 100;
type
TCharMatrix = array [0..NUM_PIXELS - 1] of Integer;
TNeuralNet = class;
THopfield = class
private
WeightMatrix: array [0..NUM_PIXELS - 1, 0..NUM_PIXELS - 1] of Extended;
State,
NewState: TCharMatrix;
procedure Converge;
public
constructor Create;
procedure Learn(Sample: TCharMatrix);
function Retrieve(Probe: TCharMatrix): TCharMatrix;// var Output: array of Extended);
function Hamming: Integer;
function Normalize(New: Extended; Prev: Integer): Integer;
procedure Clear;
end;
TNodeType = (ndIn, ndHidden, ndOut);
TNode = class
ANN: TNeuralNet;
NType: TNodeType;
Net, NOut,
Delta: Extended;
EdgesIn, EdgesOut: TList;
public
fTag: String;
constructor Create(NeuralNet: TNeuralNet; NodeType: TNodeType; Tag: string);
destructor Destroy;
procedure Update;
function GetNet: Extended;
function GetNOut: Extended;
end;
TNodeVector = class(TList);
TEdge = class
LearningFactor,
OldWeight,
Weight : Extended;
FromNode, ToNode: TNode;
public
fTag: String;
constructor Create(NodeFrom, NodeTo: TNode; Tag: String);
procedure UpdateWeight;
end;
TNeuralNet = class
private
fLearningFactor : Extended;
public
constructor Create(LearningFactor: Extended);
destructor Destroy;
procedure Connect(NodeA, NodeB: TNode);
end;
TFeedForward = class(TNeuralNet)
Layers: TList;
public
constructor Create(NumNodes: array of Integer; LearningFactor: Extended);
destructor Destroy;
procedure Feed(Sample: array of Extended);
function Train( Sample: array of Extended;
DesiredOut: array of Extended): Extended;
procedure BackProp( Sample: array of Extended;
DesiredOut: array of Extended);
function Error(DesiredOut: array of Extended): Extended;
end;
TMatrix = array [0..NUM_FEATURES - 1, 0..NUM_FEATURES - 1] of Extended;
TFeatureVector = array [0 .. NUM_FEATURES - 1] of Extended;
TFeature = record
Vector: TFeatureVector;
OrigClass, CompClass: Integer;
end;
TAnalysis = record
Total,
Classified,
MisClassified: Integer;
PercentClassified,
PercentMisClassified: Extended;
end;
procedure InitClass ( var Samples: array of TFeature;
classNum: Integer;
Means, StdDevs: array of Extended);
procedure ResultAnalysis( Samples: array of TFeature;
var Results: array of TAnalysis;
TrainingSetCounts: array of Integer);
implementation
constructor TNode.Create(NeuralNet: TNeuralNet; NodeType: TNodeType; Tag: string);
begin
inherited Create;
fTag := Tag;
ANN := NeuralNet;
NType := NodeType;
Delta := 0;
EdgesIn := TList.Create;
EdgesOut := TList.Create;
end;
constructor TNeuralNet.Create(LearningFactor: Extended);
begin
Inherited Create;
fLearningFactor := LearningFactor;
end;
constructor TFeedForward.Create( NumNodes: array of Integer;
LearningFactor: Extended);
var
LayerNum, NodeNum, NextNodeNum,
i, j: Integer;
NewNode: TNode;
NodeVector: TNodeVector;
begin
inherited Create(LearningFactor);
Layers := TList.Create;
//Create layers
for i := Low(NumNodes) to High(NumNodes) do begin
NodeVector := TNodeVector.Create;
Layers.Add(NodeVector);
//Create nodes of each layer
for j := 1 to NumNodes[i] do begin
if i = Low(NumNodes) then //Input layer
NewNode := TNode.Create(Self, ndIn, IntToStr(i) + ',' + IntToStr(j))
else if i = High(NumNodes) then //output layer
NewNode := TNode.Create(Self, ndOut, IntToStr(i) + ',' + IntToStr(j))
else
NewNode := TNode.Create(Self, ndHidden, IntToStr(i) + ',' + IntToStr(j));
NodeVector.Add(NewNode);
end;
end;
//Connect nodes
Randomize;
//for each layer except for the output layer
for LayerNum := Low(NumNodes) to High(NumNodes) - 1 do begin
//iterate through the layer
for NodeNum := 0 to NumNodes[LayerNum] - 1 do begin
//iterate through the next layer
for NextNodeNum := 0 to NumNodes[LayerNum + 1] - 1 do begin
//connect each node to all nodes of next layer
Connect(TNode(TNodeVector(Layers.Items[LayerNum]).Items[NodeNum]),
TNode(TNodeVector(Layers.Items[LayerNum + 1]).Items[NextNodeNum]));
end;
end;
end;
end;
procedure TNeuralNet.Connect( NodeA, NodeB: TNode);
var
Edge: TEdge;
begin
Edge := TEdge.Create(NodeA, NodeB, NodeA.fTag + '->' + NodeB.fTag);
NodeA.EdgesOut.Add(Edge);
NodeB.EdgesIn.Add(Edge);
end;
constructor TEdge.Create(NodeFrom, NodeTo: TNode; Tag: string);
begin
inherited Create;
Weight := Random(100) / 100;
FromNode := NodeFrom;
ToNode := NodeTo;
fTag := Tag;
end;
procedure InitClass ( var Samples: array of TFeature;
classNum: Integer;
Means, StdDevs: array of Extended);
var
i, j: Integer;
begin
for i := Low(Samples) to High(Samples) do begin
for j := Low(TFeatureVector) to High(TFeatureVector) do begin
Samples[i].Vector[j] := RandG(Means[j], StdDevs[j]);
end;
Samples[i].OrigClass := ClassNum;
Samples[i].CompClass := -1;
end;
end;
procedure ResultAnalysis( Samples: array of TFeature;
var Results: array of TAnalysis;
TrainingSetCounts: array of Integer);
var
i, j: Integer;
begin
//initialize the results array
for i := Low(Results) to High(Results) do begin
Results[i].Classified := 0;
Results[i].Misclassified := 0;
Results[i].Total := 0;
end;
//Count number of classified and misclassified per class.
for i := Low(Samples) to High(Samples) do begin
if Samples[i].OrigClass = Samples[i].CompClass then
Results[Samples[i].OrigClass].Classified :=
Results[Samples[i].OrigClass].Classified + 1
else
Results[Samples[i].OrigClass].Misclassified :=
Results[Samples[i].OrigClass].Misclassified + 1;
Results[Samples[i].CompClass].Total :=
Results[Samples[i].CompClass].Total + 1;
end;
//Calculate the percentages
for i := Low(Results) to High(Results) do begin
Results[i].PercentClassified := Results[i].Classified /
TrainingSetCounts[i] * 100;
Results[i].PercentMisClassified := Results[i].MisClassified /
TrainingSetCounts[i] * 100;
end;
end;
destructor TNeuralNet.Destroy;
begin
inherited Destroy;
end;
destructor TFeedForward.Destroy;
var
i: Integer;
begin
for i := 0 to Layers.Count - 1 do begin
TNode(Layers.Items[i]).Free;
end;
inherited Destroy;
end;
destructor TNode.Destroy;
var
i: Integer;
begin
for i := 0 to EdgesIn.Count - 1 do begin
TEdge(EdgesIn.Items[i]).Free;
end;
for i := 0 to EdgesOut.Count - 1 do begin
TEdge(EdgesOut.Items[i]).Free;
end;
inherited Destroy;
end;
procedure TNode.Update;
var
i: Integer;
begin
if NType <> ndIn then begin
Net := 0;
for i := 0 to EdgesIn.Count - 1 do
Net := Net +
(TNode(TEdge(EdgesIn.Items[i]).FromNode).NOut *
TEdge(EdgesIn.Items[i]).Weight);
end;
NOut := 1 / (1 + Exp(-Net));
end;
function TNode.GetNet: Extended;
begin
Result := Net;
end;
function TNode.GetNOut: Extended;
begin
Result := NOut;
end;
//This function trains the net on one sample, and returns the error measure
function TFeedForward.Train( Sample: array of Extended;
DesiredOut: array of Extended): Extended;
begin
Feed(Sample);
Result := Error(DesiredOut);
BackProp(Sample, DesiredOut);
end;
function TFeedForward.Error(DesiredOut: array of Extended): Extended;
var
OutLayer: TNodeVector;
i: Integer;
begin
OutLayer := Layers.Items[Layers.Count - 1];
Result := 0;
for i := 0 to OutLayer.Count - 1 do begin
Result := Result + Power(DesiredOut[i] - TNode(OutLayer.Items[i]).NOut, 2);
end;
Result := Result / (OutLayer.Count - 1);
end;
procedure TFeedForward.Backprop( Sample: array of Extended;
DesiredOut: array of Extended);
var
i, j, k, l: Integer;
CurLayer: TNodeVector;
CurNode, NextNode : TNode;
CurEdge, EdgeOut: TEdge;
Accum: Extended;
begin
//For all hidden and output layers
for j := Layers.Count - 1 downto 0 do begin
CurLayer := TNodeVector(Layers.Items[j]);
//for each node in the layer
for i := 0 to CurLayer.Count - 1 do begin
CurNode := TNode(CurLayer.Items[i]);
if CurNode.NType = ndOut then begin //if output node
CurNode.Delta := (DesiredOut[i] - CurNode.NOut) *
CurNode.NOut * (1 - CurNode.NOut);
end
else begin //if hidden node
Accum := 0;
for l := 0 to CurNode.EdgesOut.Count - 1 do begin
EdgeOut := CurNode.EdgesOut.Items[l];
NextNode := TNode(EdgeOut.ToNode);
Accum := Accum + (EdgeOut.OldWeight * NextNode.Delta);
end;
CurNode.Delta := CurNode.NOut * (1 - CurNode.NOut) * Accum;
end;
for k := 0 to CurNode.EdgesIn.Count - 1 do begin
CurEdge := TEdge(CurNode.EdgesIn.Items[k]);
CurEdge.UpdateWeight;
end;
end;
end;
end;
procedure TEdge.UpdateWeight;
begin
OldWeight := Weight;
Weight := Weight + (
ToNode.ANN.fLearningFactor * ToNode.Delta * FromNode.NOut);
end;
procedure TFeedForward.Feed( Sample: array of Extended );
var
i, j: Integer;
CurLayer: TNodeVector;
CurNode: TNode;
begin
//Initialize the Input Layer
CurLayer := TNodeVector(Layers.Items[0]);
for i := Low(Sample) to High(Sample) do begin
CurNode := TNode(CurLayer.Items[i]);
CurNode.Net := Sample[i];
CurNode.Update;
end;
//Update hidden and output Layers
for j := 1 to Layers.Count - 1 do begin
CurLayer := TNodeVector(Layers.Items[j]);
for i := 0 to CurLayer.Count - 1 do begin
CurNode := TNode(CurLayer.Items[i]);
CurNode.Update;
end;
end;
end;
procedure THopfield.Converge;
begin
end;
constructor THopfield.Create;
var
i, j: Integer;
begin
inherited;
Clear;
end;
procedure THopfield.Learn(Sample: TCharMatrix);
var
i, j, k: Integer;
begin
for i := Low(Sample) to High(Sample) do begin
//update all edges except self feedback ones
for j := Low(Sample) to High(Sample) do begin
if i <> j then begin
WeightMatrix[i, j] := WeightMatrix[i, j] + Sample[i] * Sample[j];
end;
end;
end;
end;
function THopfield.Retrieve( Probe: TCharMatrix): TCharMatrix;
var
i, j, x, iter: Integer;
Sum: Extended;
NodeSet: set of Low(State)..High(State);
Converged : Boolean;
begin
//Initialize
State := Probe;
//Converge
Randomize;
iter := 0;
while iter < 5000 do begin
Inc(iter);
NodeSet := []; //empty the set of computed nodes
j := (j + 1) mod NUM_PIXELS; //serial update
Sum := 0;
for i := Low(State) to High(State) do
Sum := Sum + (WeightMatrix[j, i] * State[i]);
Sum := Normalize(Sum, State[j]);
if Sum <> State[j] then begin
State[j] := Trunc(Sum);
if Hamming < 10 then //has converged
Break;
end;
end; //while
//Output
Result := State;
end;
function THopfield.Hamming: Integer;
var
i, j: Integer;
Y : array [0..NUM_PIXELS - 1] of Extended;
begin
Result := 0;
for i := Low(State) to High(State) do begin
Y[i] := 0;
for j := Low(State) to High(State) do
Y[i] := Y[i] + WeightMatrix[i, j] * State[i];
if Normalize(Y[i], State[i]) <> State[i] then
Result := Result + 1; //increment distance
end;
end;
function THopfield.Normalize(New: Extended; Prev: Integer): Integer;
begin
if New > 0 then
Result := 1
else if New < 0 then
Result := -1
else //Leave state unchanged
Result := Prev;
end;
procedure THopfield.Clear;
var
i, j: integer;
begin
for i := Low(WeightMatrix) to High(WeightMatrix) do
for j := Low(WeightMatrix) to High(WeightMatrix) do
WeightMatrix[i, j] := 0;
end;
end.
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -