⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bp算法.txt

📁 数学建模用到的常见算法 神经网络
💻 TXT
📖 第 1 页 / 共 2 页
字号:
0.5
-5.469
3
4
-5
6
-7
8
-0.9
0.25
0.75
-1
-1.24


////////////////////使用例子2 - BP.in/////////////////////

BPTest3.dll
4 2 3 3 2
0.00000001 0.3 20 0 1000000 0.3
0.05 0.02
1 0
0.09 0.11
1 0
0.12 0.20
1 0
0.15 0.22
1 0
0.20 0.25
1 0
0.75 0.75
0 1
0.80 0.83
0 1
0.82 0.80
0 1
0.90 0.89
0 1
0.95 0.89
0 1
0.09 0.04
1 0
0.10 0.10
1 0
0.14 0.21
1 0
0.18 0.24
1 0
0.22 0.28
1 0
0.77 0.78
0 1
0.79 0.81
0 1
0.84 0.82
0 1
0.94 0.93
0 1
0.98 0.99
0 1

///////////////BPTest3.dll 源程序////////////////////

library BPTest3;

function layer1(input: Extended): Extended;stdcall;
begin
Result:=input;
end;

function middle_layer(input: Extended): Extended;stdcall;
begin
Result:=1/(1+exp(-input));
end;

function last_layer(input: Extended): Extended;stdcall;
begin
Result:=1/(1+exp(-input));
end;

function d_layer1(input: Extended): Extended;stdcall;
begin
Result:=1;
end;

function d_middle_layer(input: Extended): Extended;stdcall;
var ex,ex2:Extended;
begin
ex:=exp(-input);
ex2:=1+ex;
Result:=ex/(ex2*ex2);
end;

function d_last_layer(input: Extended): Extended;stdcall;
var ex,ex2:Extended;
begin
ex:=exp(-input);
ex2:=1+ex;
Result:=ex/(ex2*ex2);
end;

exports
  layer1,
  middle_layer,
  last_layer,
  d_layer1,
  d_middle_layer,
  d_last_layer;

begin
end.


最近才发现这个程序有点问题,改正和改进了,如下:

{
  BP 算法 源程序
  ** 1. 用BP算法对神经网络进行训练
  ** 2. 对额外的神经网络输入计算输出值
  2005, by stlxv
  stlxv@21cn.com

  输入文件:BP.in
  第一行:DLL文件名
  第二行:layer_count, n(1), n(2), ... , n(layer_count) ;n(i) <--第i层神经元个数
  第三行:允许误差ε 学习率η 学习实例个数N 附加测试输入个数M 迭代次数(若0则不计迭代次数) 最低学习率min_yita
    // 若迭代次数为零,则最低学习率可以不用写
    // 若迭代次数不为零,则学习率为最高学习率,且必须输入最低学习率
  接下来是学习实例,每个学习实例2行:
    第一行:输入的n(1)个数
    第二行:期望的输出
  再接下来是M个测试输入
    测试输入
  N<=40,M<=100
  n(i)<=MAX_note  // 通过修改常量MAX_layer_count来修改
  3<=layer_count<=MAX_layer_count  // 通过修改常量MAX_note来修改
  0<η<1,推荐0.3<=η<=0.9
  输入:[0,1]

  注意:误差E[j,k]=1/2*(y*[j,k]-y[j,k])^2<=ε

  输出文件:BP.out
  第一行:总迭代次数
  接下来N行:对应每个学习实例的输出和测试输入的输出
  接下来为所有权值
}

uses
  SysUtils,
  Windows,
  ClassFunctionsUnit in 'ClassFunctionsUnit.pas';

{ Global Variables }

const MAX_note=20;  // 一层神经元个数
      MAX_layer_count=10;

var
  yita,epsilon,min_yita,delta_yita:Extended;
  layer_count,t,k,instance_count,total_time,ExtraCount:integer;
    // t - 迭代次数,k - 学习实例序号,total_time - 总迭代次数
  func:TFunctions;
  n:array[1..MAX_layer_count] of integer; // 每层的神经元个数
  instances:array[1..140] of record // 学习实例 
    X,Y:array[1..MAX_note] of Extended;
    end;

  W:array[1..MAX_layer_count-1] of array[1..MAX_note,1..MAX_note] of Extended;
  IO_map:array[1..MAX_layer_count,1..MAX_note] of record
    I,Extended;
    end;
  E:array[1..MAX_note] of Extended;  // E - 神经网络输出误差
  delta:array[1..MAX_layer_count,1..MAX_note] of Extended;

{ Global Procedures }

  procedure LoadFromFile;
  var F:TextFile;
      FileName:String;
      i,j:integer;
  begin
  AssignFile(F,'BP.in');
  Reset(F);
  Readln(F,FileName);

  func:=TFunctions.Create(FileName);

  Read(F,layer_count);

  for i:=1 to layer_count do
    read(f,n[i]);
  Readln(F);

  Read(F,epsilon);
  read(F,yita);
  Read(F,instance_count);
  Read(F,ExtraCount);
  Read(F,total_time);
  if total_time>0 then Read(F,min_yita);
  Readln(F);

  for i:=1 to instance_count do
    begin
    for j:=1 to n[1] do
      read(f,instances[i].X[j]);
    Readln(F);
    for j:=1 to n[layer_count] do
      read(F,Instances[i].Y[j]);
    Readln(F);
    end;
  if ExtraCount>0 then
    for i:=instance_count+1 to ExtraCount+instance_count do
      begin
      for j:=1 to n[1] do
        read(f,instances[i].X[j]);
      Readln(F);
      end;

  CloseFile(F);
  end;

  procedure MapIO;forward;

  procedure WriteResultToFile;
  var F:TextFile;
      i,j,r:integer;
  begin
  AssignFile(F,'BP.out');
  Rewrite(F);
  Writeln(F,t-1);
  //for k:=1 to instance_count do
  k:=1;
  while k<=instance_count+ExtraCount do
    begin
    MapIO;
    if n[1]>1 then
      for i:=1 to n[1]-1 do
        write(F,Format('%8.8f,',[Instances[k].X[i]]));
    write(F,Format('%8.8f',[Instances[k].X[n[1]]]));
    write(F,'   -->   ');
    if n[layer_count]>1 then
      for i:=1 to n[layer_count]-1 do
        write(F,Format('%8.8f,',[IO_map[layer_count,i].O]));
    writeln(F,Format('%8.8f',[IO_map[layer_count,n[layer_count]].O]));
    k:=k+1;
    end;
  for i:=1 to layer_count-1 do
    for j:=1 to n[i] do
      for r:=1 to n[i+1] do
        Writeln(F,Format('w(%d,%d->%d,%d): %8.8f',[i,j,i+1,r,W[i][j,r]]));
  CloseFile(F);
  end;

  procedure Init;
  var i,j,r:integer;
  begin
  // init W
  for i:=1 to layer_count-1 do
    for j:=1 to n[i] do
      for r:=1 to n[i+1] do
        W[i][j,r]:=Random(100)/1000;
  //init t,k
  t:=1;
  k:=1;
  // init delta_yita
  if total_time>0 then
    delta_yita:=(yita-min_yita)/total_time
    else delta_yita:=0;
  end;

  function computeIn(layer,q:integer):Extended;
  { TEST OK }
    // 计算layer层第q个神经元的输入
  var j:integer;
      a,b:Extended;
  begin
  if layer=1 then
    begin
    Result:=Instances[k].X[q];
    Exit;
    end;

  Result:=0;
  for j:=1 to n[layer-1] do
    begin
    a:=IO_map[layer-1,j].O;
    b:=W[layer-1][j,q];
    Result:=Result+a*b;
    end;
  end;

  { MapIO 搞定所有神经元的输入输出值 }
  procedure MapIO;
  var i,j:integer;
  begin
  for i:=1 to layer_count do
    for j:=1 to n[i] do
      begin
      IO_map[i,j].I:=computeIn(i,j);
      if i=1 then
        IO_map[i,j].=func.layer1(IO_map[i,j].I)
      else begin
        if i=layer_count then
          IO_map[i,j].=func.last_layer(IO_map[i,j].I)
        else IO_map[i,j].=func.middle_layer(IO_map[i,j].I);
        end;
      end;
  end;

  { computeE 计算神经网络的输出误差,在
    调用该过程前要先MapIO }
  procedure computeE;
  var i:integer;
  begin
  for i:=1 to n[layer_count] do
    E[i]:=sqr(Instances[k].Y[i]-IO_map[layer_count,i].O)/2;
  end;

  procedure computeDelta;
  { maybe test OK }
  var i,j,r:integer;
      x:Extended;
  begin
  for i:=1 to n[layer_count] do
    delta[layer_count,i]:=(IO_map[layer_count,i].O-Instances[k].Y[i])*
      func.d_last_layer(IO_map[layer_count,i].I);
  for i:=layer_count-1 downto 1 do
    for j:=1 to n[i] do
      begin
      x:=0;
      for r:=1 to n[i+1] do
        x:=x+delta[i+1,r]*W[i][j,r];
      if i>1 then
        // middle layer
        x:=x*func.d_middle_layer(IO_map[i,j].I)
        else // first layer
        x:=x*func.d_layer1(IO_map[i,j].I);
      delta[i,j]:=x;
      end;
  end;

  procedure makeWChange;
  { maybe Test OK }
  var deltaW:Extended;
      i,j,r:integer;
  begin
  for i:=1 to layer_count-1 do
    for j:=1 to n[i] do
      for r:=1 to n[i+1] do
        begin
        deltaW:=-yita*delta[i+1,r]*IO_map[i,j].O;
        W[i][j,r]:=W[i][j,r]+deltaW;
        end;
  end;

  function isInRange:Boolean;
  var i:integer;
  begin
  Result:=True;
  for i:=1 to n[layer_count] do
    if E[i]>epsilon then begin Result:=False; break; end;
  end;

  function isAllInRange:Boolean;
  var old_k:integer;
  begin
  MapIO;
  computeE;
  if not isInRange then
    begin
      Result:=False;
      Exit;
    end;

  old_k:=k;
  Result:=True;

  //  while (k<=instance_count)and(k<>old_k) do
  for k:=1 to instance_count do
    if k<>old_k then 
    begin
    MapIO;
    computeE;
    if not isInRange then
      begin
      Result:=False;
      break;
      end;
    end;

  k:=old_k;
  end;

  procedure MainCompute;
  begin
  while true do
    begin
    if (total_time>0) and (t>total_time) then break;
    if isAllInRange then Break;
//    MapIO;   <-- included in isAllInRange
//    computeE;
    computeDelta;
    makeWChange;
    t:=t+1;
    k:=((k+1) mod instance_count)+1;
    yita:=yita-delta_yita;
    end;
  end;

begin
Randomize;
LoadFromFile;

Init;
MainCompute;

WriteResultToFile;

func.Free;
end.


⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -