📄 back.c
字号:
/* Implementation of backpropagation algorithm */#include "defs.h"#include "back.h"#include "ind.h"/* Parameter Handling */#define DEFLEARNRATE 1.0#define DEFIMPULSE 0.0#define DEFINITWEIGHT 1.0#define DEFONLINE 1char *BackOptStr() {return "o:w:k:a:\0";}char *BackUsage() {return "Backpropagation Parameters:\n" "-o <use online learning ?>: 1\n" "-w <w for initial weights [-w,w]> 1.0\n" "-k <learn rate>: 1.0\n" "-a <impulse factor>: 0.0\n\0";}/* set default values */int Online =DEFONLINE; /* Use online learning */float LearnRate =DEFLEARNRATE; /* learn rate */float Impulse =DEFIMPULSE; /* impulse constant */float InitWeight=DEFINITWEIGHT; /* initial weight range *//* others */float *Dout; /* Delta update for output layer */const char OnlineStr[2][10]={"Batch","Online"};int handleBackOpt(char opt,char* arg){ switch(opt) { case 'o': return (Online =getint(arg,0,1))<0; case 'w': return getfloat(&InitWeight,arg,0.0,1000.0); case 'k': return getfloat(&LearnRate,arg,0.0,1000.0); case 'a': return getfloat(&Impulse,arg,0.0,1000.0); default: return 1; };}int initBack(){ int i,j,k; float *p,*q,*pp,*qq; BackCalcs=0; if(!(Hid=(float*)malloc((Nin+1)*FLOATLEN))) return 1; if(!(Out=(float*)malloc(Nout*FLOATLEN))) return 1; if(!(Dout=(float*)malloc(Nout*FLOATLEN))) return 1; if(!(Wih=(float**)malloc((Nin+1)*PNTLEN))) return 1; if(!(Who=(float**)malloc((Nhid+1)*PNTLEN))) return 1; if(!(Dih=(float**)malloc((Nin+1)*PNTLEN))) return 1; if(!(Dho=(float**)malloc((Nhid+1)*PNTLEN))) return 1; if(!(p=(float*)malloc((Nin+1)*Nhid*FLOATLEN))) return 1; if(!(q=(float*)malloc((Nhid+1)*Nout*FLOATLEN))) return 1; if(!(pp=(float*)malloc((Nin+1)*Nhid*FLOATLEN))) return 1; if(!(qq=(float*)malloc((Nhid+1)*Nout*FLOATLEN))) return 1; for(i=0;i<=Nin;i++) { Wih[i]=p; p+=Nhid; Dih[i]=pp; pp+=Nhid; for(j=0;j<Nhid;j++) Dih[i][j]=0.0; }; BiasHid=Wih[Nin]; for(j=0;j<=Nhid;j++) { Who[j]=q; q+=Nout; Dho[j]=qq; qq+=Nout; for(k=0;k<Nout;k++) Dho[j][k]=0.0; }; randomNet(InitWeight); BiasOut=Who[Nhid]; Hid[Nhid]=1.0; if(Nback || Impulse!=0.0) { if(!(DDih=(float**)malloc((Nin+1)*PNTLEN))) return 1; if(!(DDho=(float**)malloc((Nhid+1)*PNTLEN))) return 1; if(!(p=(float*)malloc((Nin+1)*Nhid*FLOATLEN))) return 1; if(!(q=(float*)malloc((Nhid+1)*Nout*FLOATLEN))) return 1; for(i=0;i<=Nin;i++) { DDih[i]=p; p+=Nhid; }; for(j=0;j<=Nhid;j++) { DDho[j]=q; q+=Nout; }; }; if(Nback) sprintf(BackParamStr, "Backprop.: Method: %s, InitWeight = [%7.3f,%7.3f]\n", OnlineStr[Online],-InitWeight,+InitWeight); else sprintf(BackParamStr, "Backprop.: Method: %s, LearnRate = %7.4f, Impulse = %7.4f\n" " InitWeight = [%7.3f,%7.3f]\n", OnlineStr[Online],LearnRate,Impulse, -InitWeight,+InitWeight); return 0;}#define randweight() (((float)getrand()/(float)MAXRANDOM) \ *2.0*w-w)void randomNet(float w){ int i,j,k; for(i=0;i<=Nin;i++) for(j=0;j<Nhid;j++) Wih[i][j]= randweight(); for(j=0;j<=Nhid;j++) for(k=0;k<Nout;k++) Who[j][k]= randweight();}#undef randweight#define sigma(x) 1/(1+exp(-(x)))float evaluate(float *in,float *out){ int i,j,k; float s,e,d; for(j=0;j<Nhid;j++) { s=0.0; for(i=0;i<=Nin;i++) s+=in[i]*Wih[i][j]; Hid[j]=sigma(s); }; for(k=0;k<Nout;k++) { s=0.0; for(j=0;j<=Nhid;j++) s+=Hid[j]*Who[j][k]; Out[k]=sigma(s); }; e=0.0; for(k=0;k<Nout;k++) { d=(out[k]-Out[k]); Dout[k]=d*Out[k]*(1-Out[k]); e+=d*d; }; return 0.5*e;}#undef sigma#define swap(x,y) {pt=x; x=y; y=pt;}float calcNetErr(){ int i,j,k,p; float **pt; float *in,*out; float s; NetErr=0.0; BackCalcs+=Ntrain; if(Online) { for(p=0;p<Ntrain;p++) { if(Impulse!=0) { swap(Dih,DDih); swap(Dho,DDho); }; in=TrainIn[p]; out=TrainOut[p]; NetErr+=evaluate(in,out); for(j=0;j<Nhid;j++) { s=0.0; for(k=0;k<Nout;k++) s+=Who[j][k]*Dout[k]; s*=Hid[j]*(1-Hid[j]); if(Impulse!=0.0) for(i=0;i<=Nin;i++) Wih[i][j]+=Dih[i][j]= LearnRate*s*in[i]+Impulse*DDih[i][j]; else for(i=0;i<=Nin;i++) Wih[i][j]+=LearnRate*s*in[i]; }; for(j=0;j<=Nhid;j++) { if(Impulse!=0.0) { for(k=0;k<Nout;k++) Who[j][k]+=Dho[j][k]= LearnRate*Dout[k]*Hid[j]+Impulse*DDho[j][k]; } else { for(k=0;k<Nout;k++) Who[j][k]+=LearnRate*Dout[k]*Hid[j]; }; }; }; NetErr/=Ntrain; return NetErr; } else { if(Impulse!=0) { swap(Dih,DDih); swap(Dho,DDho); }; for(i=0;i<=Nin;i++) for(j=0;j<Nhid;j++) Dih[i][j]=0.0; for(j=0;j<=Nhid;j++) for(k=0;k<Nout;k++) Dho[j][k]=0.0; for(p=0;p<Ntrain;p++) { in=TrainIn[p]; out=TrainOut[p]; NetErr+=evaluate(in,out); for(j=0;j<Nhid;j++) { s=0.0; for(k=0;k<Nout;k++) s+=Who[j][k]*Dout[k]; s*=Hid[j]*(1-Hid[j]); for(i=0;i<=Nin;i++) Dih[i][j]+=s*in[i]; }; for(j=0;j<=Nhid;j++) for(k=0;k<Nout;k++) Dho[j][k]+=Dout[k]*Hid[j]; }; NetErr/=Ntrain; return NetErr; };} #undef swapvoid updateNet(){ int i,j,k; float l; if(!Online) { l=LearnRate/Ntrain; if(Impulse!=0.0) { for(i=0;i<=Nin;i++) for(j=0;j<Nhid;j++) { Dih[i][j]*=l; Wih[i][j]+=Dih[i][j]+=Impulse*DDih[i][j]; }; for(j=0;j<=Nhid;j++) for(k=0;k<Nout;k++) { Dho[j][k]*=l; Who[j][k]+=Dho[j][k]+=Impulse*DDho[j][k]; }; } else { for(i=0;i<=Nin;i++) for(j=0;j<Nhid;j++) Wih[i][j]+=Dih[i][j]*=l; for(j=0;j<=Nhid;j++) for(k=0;k<Nout;k++) Who[j][k]+=Dho[j][k]*=l; }; };}void printnet(){ int i,j,k,c; c=0; for(j=0;j<Nhid;j++) { if(c>70-6*Nin) { printf("\n"); c=0; }; printf("("); for(i=0;i<Nin;i++) printf("%5.2f,",Wih[i][j]); printf("b:%5.2f)",Wih[Nin][j]); c+=9+6*Nin; }; printf("\n"); c=0; for(k=0;k<Nout;k++) { if(c>70-6*Nhid) { printf("\n"); c=0; }; printf("("); for(j=0;j<Nhid;j++) printf("%5.2f,",Who[j][k]); printf("b:%5.2f)",Who[Nhid][k]); c+=9+6*Nhid; }; printf("\n");}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -