⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 pr.pas

📁 ANN And Hopfield Neural Network
💻 PAS
📖 第 1 页 / 共 2 页
字号:
    StringGrid1.Cells[i + 1, 3] := IntToStr(Classifications[i].Misclassified);
    StringGrid1.Cells[i + 1, 4] := FloatToStr(Classifications[i].PercentMisclassified
                                              ) + '%';
    StringGrid1.Cells[i + 1, 5] := IntToStr(Classifications[i].Total);
  end;
    StringGrid1.Cells[5, 2] :=FloatToStr(
                              (Classifications[0].Classified +
                               Classifications[1].Classified +
                               Classifications[2].Classified +
                               Classifications[3].Classified) /
                               8.25) + '%';

  if CurClassifier < 10 then begin
    with  Chart1.Series[ CurClassifier ] do begin
      Clear;
      for j := 0 to NumClasses - 1 do begin
        AddY( Classifications[j].PercentClassified,
              'Class ' + IntToStr(j + 1),
              clTeeColor);
      end;

      //Show the overall classification
      AddY((Classifications[0].Classified +
            Classifications[1].Classified +
            Classifications[2].Classified +
            Classifications[3].Classified) /
            8.25,
            'Overall',
            clTeeColor);

    end;
  end;

  //Save results
  AssignFile(F, IntToStr(CurClassifier) + '-res.txt');
  Rewrite(F);
  Writeln(F, 'Centroids');
  for i := Low(Centroids) to NumCentroids - 1 do
    Writeln(F, FloatToStr(Centroids[i].Vector[0]) + ', ' +
               FloatToStr(Centroids[i].Vector[1]) + '          ' +
               IntToStr(Classifications[i].Total) + ' members'
            );

  Writeln(F, 'Analysis');
  for i := 0 to 3 do begin
    Writeln(F, Format('Class %1d: %8d  %8.2f  %8d  %8.2f  %8d',
       [i + 1,
        Classifications[i].Classified,
        Classifications[i].percentClassified,
        Classifications[i].Misclassified,
        Classifications[i].PercentMisclassified,
        Classifications[i].Total]));
  end;
  Writeln(F, FloatToStr(
        (Classifications[0].Classified +
         Classifications[1].Classified +
         Classifications[2].Classified +
         Classifications[3].Classified) /
         8.25) + '%');

  CloseFile(F);

end;

procedure TForm1.Button12Click(Sender: TObject);
var
  FormImage: TBitmap;
begin
  Chart1.CopyToClipboardBitmap;
end;

procedure TForm1.Button13Click(Sender: TObject);
var
  P: TPrinter;
begin

(*  P.BeginDoc;
  PrintSet(P, TrainingSet, True);
  P.EndDoc;
*)

//  RichEdit1.Print('');

end;

procedure TForm1.PrintSet( var P: TPrinter;
                           Samples: array of TFeature;
                           ShowErrors: Boolean);
var
  i: Integer;
begin
  for i := Low(Samples) to High(Samples) do
    PrintSample(P, Samples[i], ShowErrors);

end;

procedure TForm1.PrintSample(var P: TPrinter; Sample: TFeature; ShowError: Boolean);
var
  X, Y: Integer;
begin
  X := XOrig + Trunc(Sample.Vector[0] * XScale);
  Y := YOrig  + Trunc(Sample.Vector[1] * YScale);

  if ShowError then
    //if misclassified, mark it...
    if (Sample.OrigClass <> Sample.CompClass) and
       (Sample.CompClass >= 0 ) then begin
      P.Canvas.Pen.Color := Colors[Sample.OrigClass];
      P.Canvas.Ellipse(X-1, Y-1, X + 3, Y + 3);
    end;

  if Sample.CompClass >= 0 then
    P.Canvas.Pen.Color := Colors[Sample.CompClass]
  else
    P.Canvas.Pen.Color := Colors[Sample.OrigClass];

  P.Canvas.Ellipse(X, Y, X + 2, Y + 2);

end;

procedure TForm1.Button2Click(Sender: TObject);
begin
//  FeedForward := TFeedForward.Create([2,10,10,4], 1);
  FeedForward := TFeedForward.Create([2,10,10,4], 1);
  DisplayNetwork;
  BuildDataSets;
  Randomize;

(*  RichEdit1.Print('');
  FeedForward.Train([0.3, 0.7], [0.1, 0.1, 0.1, 0.9]);
  DisplayNetwork;
  RichEdit1.Print('');
*)
  //train
(*  for i := 1 to StrToInt(Edit1.Text) do begin
    Train;
  end;
  recall;
*)


end;

procedure TForm1.DisplayNet;
var
  X, Y,
  LayerNum, NodeNum, NextNodeNum: Integer;
begin
  for LayerNum := 0 to FeedForward.Layers.Count - 1 do begin
     X := 80 + LayerNum * 80;
    //iterate through the layer
    for NodeNum := 0 to TList(FeedForward.Layers.Items[LayerNum]).Count - 1 do begin
      Y := 80 + NodeNum * 30;
      //iterate through the next layer
//      for NextNodeNum := 0 to NumNodes[LayerNum + 1] - 1 do begin

        //display each node

        Self.Canvas.Pen.Color := clRed;
        Self.Canvas.Ellipse(X, Y, X + 5, Y + 5);

//      end;
    end;
  end;

end;

procedure TForm1.FormDestroy(Sender: TObject);
begin
  FeedForward.Free;

end;

procedure TForm1.DisplayNetwork;
var
  i, j, k: Integer;
  CurLayer: TNodeVector;
  CurNode: TNode;
  CurEdge: TEdge;
begin
//  RichEdit1.Clear;
  with FeedForward do begin
    for j := 0 to Layers.Count - 1 do begin
      CurLayer := TNodeVector(Layers.Items[j]);
      for i := 0 to CurLayer.Count - 1 do begin
        CurNode := TNode(CurLayer.Items[i]);
//        RichEdit1.Lines.Add( CurNode.fTag + ': ' +
//                             FloatToStr(CurNode.Delta) + ' ' +
//                             FloatToStr(CurNode.Net) + ' ' +
//                             FloatToStr(CurNode.NOut));
        if j < Layers.Count - 1 then  //if not the output layer
          for k := 0 to CurNode.EdgesOut.Count - 1 do begin
            CurEdge := TEdge(CurNode.EdgesOut.Items[k]);
//            RichEdit1.Lines.Add( CurEdge.fTag + ': ' +
//                                 FloatToStr(CurEdge.Weight) + ' ' +
//                                 CurEdge.ToNode.fTag);

          end

      end;
    end;
  end;
end;

procedure TForm1.Train;
var
  i, j: Integer;
  C1, C2, C3, C4: Extended;
  Error: Extended;
begin
//    Randomize;
    j := Random(825);
    C1 := 0.001;
    C2 := 0.001;
    C3 := 0.001;
    C4 := 0.001;
    case TrainingSet[j].OrigClass of
      0: C1 := 0.999;
      1: C2 := 0.999;
      2: C3 := 0.999;
      3: C4 := 0.999;
    end;

    //Normalize data.  X values are between 5..70 and y values 5..170
//    FeedForward.Train([(TrainingSet[j].Vector[0] - 5) / 70,
//                       (TrainingSet[j].Vector[1] - 5) / 170],
 //                      [C1, C2, C3, C4]);

//    Error := FeedForward.Train([(TrainingSet[j].Vector[0]/100),
//                       (TrainingSet[j].Vector[1]/100)],
//                       [C1, C2, C3, C4]);
    Error := FeedForward.Train([(TrainingSet[j].Vector[0]),
                       (TrainingSet[j].Vector[1])],
                       [C1, C2, C3, C4]);

    Edit3.Text := IntToStr(TrainingSet[j].OrigClass);
    Edit4.Text := FloatToStr(Error);

end;

function TForm1.TrainEven(ErrorTolerance: Extended): Boolean;
var
  j: Integer;
  Error: array [0..3] of Extended;
begin

    j := Random(CLASS1COUNT div 2);
    Error[0] :=
    FeedForward.Train([(Class1[j].Vector[0]),
                       (Class1[j].Vector[1])],
                       [0.999, 0.001, 0.001, 0.001]);

    j := Random(CLASS2COUNT div 2);
    Error[1] :=
    FeedForward.Train([(Class2[j].Vector[0]),
                       (Class2[j].Vector[1])],
                       [0.001, 0.999, 0.001, 0.001]);

    j := Random(CLASS3COUNT div 2);
    Error[2] :=
    FeedForward.Train([(Class3[j].Vector[0]),
                       (Class3[j].Vector[1])],
                       [0.001, 0.001, 0.999, 0.001]);

    j := Random(CLASS4COUNT div 2);
    Error[3] :=
    FeedForward.Train([(Class4[j].Vector[0]),
                       (Class4[j].Vector[1])],
                       [0.001, 0.001, 0.001, 0.999]);

    Result := (Error[0] < ErrorTolerance) and
              (Error[1] < ErrorTolerance) and
              (Error[2] < ErrorTolerance) and
              (Error[3] < ErrorTolerance);

end;

procedure TForm1.Recall;
var
  i, j: Integer;
  OutLayer: TNodeVector;
  MaxVal: Extended;
  MaxClass: Integer;
begin
//Edit3.Text := IntToStr(UnknownSet[i].OrigClass);
  for i := Low(UnknownSet) to High(UnknownSet) do begin
    FeedForward.Feed([UnknownSet[i].Vector[0]/100, UnknownSet[i].Vector[1]/100]);
    OutLayer := FeedForward.Layers[FeedForward.Layers.Count - 1];

    //Find the maximun output value of the neuron
    MaxVal := -99999999;
    for j := 0 to OutLayer.Count - 1 do begin
      if MaxVal < TNode(OutLayer.Items[j]).NOut then begin
        MaxVal := TNode(OutLayer.Items[j]).NOut;
        MaxClass := j;
      end;
    end;
    UnknownSet[i].CompClass := MaxClass;
  end;

  DisplayNetwork;
  DisplayResults(UnknownSet);
end;

procedure TForm1.Button3Click(Sender: TObject);
begin
  TrainEven(StrToFloat(Edit5.Text));
  DisplayNetwork;
end;

procedure TForm1.Button4Click(Sender: TObject);
begin
  Recall;
end;

procedure TForm1.Button5Click(Sender: TObject);
var
  i: Integer;
begin
  for i := 1 to StrToInt(Edit1.Text) do begin
    Edit6.Text := IntToStr(i);
    //if it has converged
    if TrainEven(StrToFloat(Edit5.Text)) then
      break;
  end;
  DisplayNetwork;
end;

end.

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -