⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 stdnet.c

📁 GENETIC ALGORITHM FOR NEURAL NETWORKS ON TRANSPUTERS (Source code and Reference)
💻 C
字号:
/* Implementation of standard 2 layer networks */ #include "defs.h"#include "ind.h"#include "stdnet.h"#include "genback.h"char IndStdOptStr[40]="B:W:E:b:\0";char NetStdOptStr[40]="\0";int   Nbits	=8;float Width	=10.0;float Estimate	=0;int Nback	=0;char *IndStdUsage=  "-B <no. of bits for weights>      8\n"  "-W <interval for weights [-W,W]>: 10\n"  "-E <error estimation factor>:     0.0\n"  "-b <no. of backprop. iterations>: 0\n\0";char *NetStdUsage="\0";char* IndOptStr() {  strcat(IndStdOptStr,StdOptStr());  return IndStdOptStr;}char* NetOptStr() {  strcat(NetStdOptStr,StdOptStr());  return NetStdOptStr;}char* IndUsage() {  char *s;  s=(char*)malloc(512);  strcpy(s,StdUsage());  strcat(s,IndStdUsage);  return s;}char* NetUsage() {  char *s;  s=(char*)malloc(512);  strcpy(s,StdUsage());  strcat(s,NetStdUsage);  return s;}int handleIndOpt(char opt,char* arg) {  switch(opt)  {    case 'B': return (Nbits	=getint(arg,1,24))<0;    case 'W': return getfloat(&Width	,arg,0.01,100);    case 'E': return getfloat(&Estimate	,arg,0.1,100);    case 'b': return (Nback	=getint(arg,0,100000))<0;  };  return handleStdOpt(opt,arg); }int handleNetOpt(char opt,char* arg) {  return handleStdOpt(opt,arg); }float *grey2float= (float*)0;float *grey2lrate= (float*)0;float *grey2imp=   (float*)0;word  *int2grey=   (word*)0;float *Hidden=(float*)0;float **TrainIn=  (float**)0;	/* Training Set Input */float **TrainOut= (float**)0;	/* Training Set Output */int OffW1,OffW2,IncW1,IncW2,OffLR,OffIM;void intgrey(word *a,int n){  int i;  int sum,bit,par;  for(i=0;i<n;i++)  {    bit=n>>1; sum=0; par=0;    while(bit)    {      if(bit&i) par=!par;      if(par) sum|=bit;      bit>>=1;    };    a[sum]=i;  };}void greyfloat(float *a,int n,float l,float h){  int i;  int sum,bit,par;  for(i=0;i<n;i++)  {    bit=n>>1; sum=0; par=0;    while(bit)    {      if(bit&i) par=!par;      if(par) sum|=bit;      bit>>=1;    };    a[i]=(float)sum*(h-l)/(n-1)+l;  };}int getTrainSet(){  int i,j,k;  float *p,*q;  if(TrainIn) return 0;  if(!(TrainIn=(float**)malloc(Ntrain*PNTLEN))) return 1;    if(!(TrainOut=(float**)malloc(Ntrain*PNTLEN))) return 1;  if(!(p=(float*)malloc((Nin+1)*Ntrain*FLOATLEN))) return 1;  if(!(q=(float*)malloc(Nout*Ntrain*FLOATLEN))) return 1;  for(i=0;i<Ntrain;i++)  {    TrainIn[i]=p; p+=(Nin+1);    TrainOut[i]=q; q+=Nout;    TrainIn[i][Nin]=1.0;  };  initTrain();  for(i=0;i<4*Ntrain;i++)  {     j=getrand()%Ntrain; k=getrand()%Ntrain;    p=TrainIn[k]; q=TrainOut[k];    TrainIn[k]=TrainIn[j]; TrainOut[k]=TrainOut[j];    TrainIn[j]=p; TrainOut[j]=q;  };  return 0;}int initDecoding(){  int i;  OffW1=0;  IncW1=(Nin+1)*Nbits;  OffW2=IncW1*Nhid;  IncW2=(Nhid+1)*Nbits;  OffLR=CrBits-Nlrate-Nimp;  OffIM=CrBits-Nimp;  FGmax=(1<<Nbits)-1;  FGmult=0.5*(float)FGmax/Width;  FGadd=0.5*(float)FGmax+0.5;  if(!(grey2float=(float*)malloc((1<<Nbits)*FLOATLEN))) return 1;  greyfloat(grey2float,1<<Nbits,-Width,Width);  if(Nback)  {    if(int2grey) return 0;    if(!(int2grey=(word*)malloc((1<<Nbits)*WORDLEN))) return 1;    intgrey(int2grey,1<<Nbits);    if(!(grey2lrate=(float*)malloc((1<<Nlrate)*FLOATLEN))) return 1;    greyfloat(grey2lrate,1<<Nlrate,log(Llrate),log(Hlrate));    for(i=0;i<(1<<Nlrate);i++) grey2lrate[i]=exp(grey2lrate[i]);    if(!(grey2imp=(float*)malloc((1<<Nimp)*FLOATLEN))) return 1;    greyfloat(grey2imp,1<<Nimp,Limp,Himp);  };  return 0;}int initInd(){  char s[80]="\0";  char t[80]="\0";  initStd();  if(!(Ntrain && Nin && Nhid && Nout)) return 1;  CrBits=(Nin+1)*Nbits*Nhid+(Nhid+1)*Nbits*Nout;  if(Nback) CrBits+= Nlrate+Nimp;  CrBytes=(CrBits-1)/8+1;  CrWords=(CrBytes-1)/WORDLEN+1;  if(initDecoding()) return 1;  if(getTrainSet()) return 1;  if(!(Hidden=(float*)malloc(Nhid*FLOATLEN))) return 1;   NoTrain=Ntrain;  OffsTrain=0;  if(Nback)    sprintf(t,     "            %d backpropagation steps per generation\n",Nback);  if(Estimate!=0.0)  {    NoTrain=(Estimate*((Nin+1)*Nhid+(Nhid+1)*Nout))/(Nin+Nout)+1;    NoTrain=min(NoTrain,Ntrain);    sprintf(s,      "Estimation: Initial factor %4.1f, %d patterns (%d %%) used\n",      Estimate,NoTrain,(NoTrain*100)/Ntrain);  };   sprintf(IndParamStr,    "Network:    %s (%d patterns)\n"    "            Topology %d-%d-%d, %d Neurons, %d bits (Weights %d)\n"    "            Weights in [%6.2f,%6.2f]\n%s%s",    StdName,Ntrain,Nin,Nhid,Nout,Nhid+Nout,CrBits,Nbits,    -Width,Width,t,s);  return 0;}errtyp calcerr(ind x){  int p,pp,i,j,k;  float e,s,d;  if(Nback)    backsteps(x,Nback);  p=OffsTrain; e=0.0;  for(pp=0;pp<NoTrain;pp++)  {        for(j=0;j<Nhid;j++)    {      s=0.0;      for(i=0;i<Nin;i++) 	s+=weight1(x,i,j)*TrainIn[p][i];      Hidden[j]=sigma(s+bias1(x,j));    };    for(k=0;k<Nout;k++)    {      s=0.0;      for(j=0;j<Nhid;j++) s+=weight2(x,j,k)*Hidden[j];      d=sigma(s+bias2(x,k))-TrainOut[p][k];      e+=d*d;    };    p++; if(p>=Ntrain) p=0;  };  GenCalcs+=NoTrain;  return (0.5*e)/((float)NoTrain);}void printind(ind x){  int i,j,k,c;   c=0;  for(j=0;j<Nhid;j++)  {    if(c>70-6*Nin) { printf("\n"); c=0; };    printf("(");     for(i=0;i<Nin;i++) printf("%5.2f,",weight1(x,i,j));    printf("b:%5.2f)",bias1(x,j));    c+=9+6*Nin;  };  printf("\n"); c=0;  for(k=0;k<Nout;k++)  {    if(c>70-6*Nhid) { printf("\n"); c=0; };    printf("(");    for(j=0;j<Nhid;j++) printf("%5.2f,",weight2(x,j,k));    printf("b:%5.2f)",bias2(x,k));    c+=9+6*Nhid;  };  printf("\n");}/* Backpropagation */int initNet(){  initStd();  if(getTrainSet()) return 1;  Nback=0;  sprintf(NetParamStr,    "Network:    %s\n"    "            Topology %d-%d-%d, %d Neurons, %d patterns\n",    StdName,Nin,Nhid,Nout,Nhid+Nout,Ntrain);  return 0;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -