📄 bp1_7.c
字号:
#include<math.h>
#include<stdlib.h>
#include<time.h>
#include<stdio.h>
#include<conio.h>
#include<string.h>
#include<graphics.h>
#define closegr closegraph
#define PI 3.14159265
#define seed 999
#define study_Times 2000000
#define point_study_times 10000
#define percision 0.005
#define N 14 /* 在0至2*PI中等间距选取N个样本点.*/
#define sigmoid_s 2.5
#define display_screen 1000
#define save_file 10000
float w0[5][2],w1[2][6],w2[2][3],w3[3]; /* Right weights & thresholder.*/
float delta_w0[5][2],delta_w1[2][6],delta_w2[2][3],delta_w3[3];
float x0[2],x1[6],x2[3],x3[3],x4; /* Neurons.*/
float max=1.880085,min=0.0;
float d1[5],d2[2],d3[2],d; /* The differenceerences of each neuron.*/
float eta=0.25,alpha=0.85; /*Two parameters of adjusting right weight.*/
float expect_Value=0.0;
long counter=0,counter1=0;
/*
void find_Max_Min(){
float angle;
max=min=angle=0;
while (angle<2*PI){
if (max<(angle*sin(angle)+2)) max=angle*sin(angle)+2;
if (min>(angle*sin(angle)+2)) min=angle*sin(angle)+2;
angle+=0.01;
}
}
*/
void initial_Matrix(){
int i,j;
float temp;
time_t t;
srand((unsigned)time(&t));
/*
* Initial right weights and thresholders of the 0 layer.
*/
for (i=0;i<5;i++){
for (j=0;j<2;j++){
temp=random(seed);
w0[i][j]=(temp+1)/(seed+1);
}
}
/*
* Initial right weights and thresholders of the 1st layer.
*/
for (i=0;i<2;i++){
for (j=0;j<6;j++){
temp=random(seed);
w1[i][j]=(temp+1)/(seed+1);
}
}
/*
* Initial right weights and thresholders of the 2nd layer.
*/
for (i=0;i<2;i++){
for (j=0;j<3;j++){
temp=random(seed);
w2[i][j]=(temp+1)/(seed+1);
}
}
/*
* Initial right weights and thresholders of the 3rd layer.
*/
for(i=0;i<3;i++){
temp=random(seed);
w3[i]=(temp+1)/(seed+1);
}
}
/*
* nomalizing Function
*/
float nomalizing(float x){
return (x/(2*PI));
}
/*
* Anti_nomalizing Function
*/
float anti_nomalizing(float y){
return (y*(max-min)+min);
}
float expect_Function(float ex){
return ((ex-min)/(max-min));
}
/*
* Create excite function Sigmoid(float x).
*/
float sigmoid(float x){
return (1/(1+exp(-sigmoid_s*x)));
}
float forward_Propagation(){
int i,j;
float sum;
/* The first neuron layer outputs.*/
for (i=0;i<5;i++) {
sum=0.0;
for (j=0;j<2;j++){
sum+=w0[i][j]*x0[j];
}
x1[i]=sigmoid(sum);
}
/* The second neuron layer outputs.*/
for (i=0;i<2;i++) {
sum=0.0;
for (j=0;j<6;j++){
sum+=w1[i][j]*x1[j];
}
x2[i]=sigmoid(sum);
}
/* The third neuron layer outputs.*/
for (i=0;i<2;i++){
sum=0.0;
for (j=0;j<3;j++){
sum+=w2[i][j]*x2[j];
}
x3[i]=sigmoid(sum);
}
/* The output neuron.*/
sum=0.0;
for (i=0;i<3;i++){
sum+=w3[i]*x3[i];
}
return (sigmoid(sum));
}
/*
* Get the difference of every neuron.
*/
void difference(){
int i,j;
float sum1;
/* The differenceerence of output layer.*/
d=sigmoid_s*x4*(1-x4)*(x4-expect_Value);
/* The differenceerences of the 3rd layer.*/
for (i=0;i<2;i++){
d3[i]=sigmoid_s*x3[i]*(1-x3[i])*w3[i]*d;
}
/* The differenceerences of the 2nd layer.*/
for (i=0;i<2;i++){
sum1=0.0;
for (j=0;j<2;j++){
sum1+=w2[j][i]*d3[j];
}
d2[i]=sigmoid_s*x2[i]*(1-x2[i])*sum1;
}
/* The differenceerences of the 1st layer.*/
for (i=0;i<5;i++){
sum1=0.0;
for (j=0;j<2;j++){
sum1+=w1[j][i]*d2[j];
}
d1[i]=sigmoid_s*x1[i]*(1-x1[i])*sum1;
}
}
/*
* Initialize the right weights and thresholders.
*/
void initial_Delta_W(){
int j,k;
for(k=0;k<3;k++){
delta_w3[k]=0;
}
for(k=0;k<2;k++){
for(j=0;j<3;j++){
delta_w2[k][j]=0;
}
}
for(k=0;k<2;k++){
for(j=0;j<6;j++){
delta_w1[k][j]=0;
}
}
for(k=0;k<5;k++){
for(j=0;j<2;j++){
delta_w0[k][j]=0;
}
}
}
/*
* Back propagation to modify the right weights and thresholders.
*/
void back_Propagation(){
int i,j;
difference();
initial_Delta_W();
/* Adjust right weights and thresholder of the last layer.*/
for(i=0;i<3;i++){
w3[i]+=-eta*d*x3[i]+alpha*delta_w3[i];
delta_w3[i]=-eta*d*x3[i]+alpha*delta_w3[i];
}
/* Adjust right weithts of the 2nd layer.*/
for(i=0;i<3;i++){
for(j=0;j<2;j++){
w2[i][j]+=-eta*d3[i]*x2[j]+alpha*delta_w2[i][j];
delta_w2[i][j]=-eta*d3[i]*x2[j]+alpha*delta_w2[i][j];
}
}
/* Adjust right weithts of the 1st layer.*/
for(i=0;i<2;i++){
for(j=0;j<6;j++){
w1[i][j]+=-eta*d2[i]*x1[j]+alpha*delta_w1[i][j];
delta_w1[i][j]=-eta*d2[i]*x1[j]+alpha*delta_w1[i][j];
}
}
/* Adjust right weithts of the 0 layer.*/
for(i=0;i<5;i++){
for(j=0;j<2;j++){
w0[i][j]+=-eta*d1[i]*x0[j]+alpha*delta_w0[i][j];
delta_w0[i][j]=-eta*d2[i]*x0[j]+alpha*delta_w0[i][j];
}
}
}
/*
* Union the input and output.
*/
float neural_Net(float inputx){
x0[0]=nomalizing(inputx);
/*printf("x0=%f,",x0); */
x4=forward_Propagation();
/*printf("x4=%f\n",x4); */
return (anti_nomalizing(x4));
}
/*
* Establish neural network through study continually.
*/
void establish_BPNN(){
float sample[N] ,true_[N];
/*{0.333342,0.456436,1.205333,1.549008,2.05333,2.468170,3.114638,3.419228,3.302437,4.162284,4.670834,
5.376603,5.412093,6.075183}*/
float percision1,temp;
FILE *fp;
int i,k,l,m;
int flag=0,flag1[N];
/*Random select 10 sample points between 0 and 2*PI.
sample[N]={6.12234,3.2093487,4.10984,0,1.0918743,5.9872987,2.98786,3.97232,5.08641,6.28}
sample[N]={0.01,0.5,1.3,1.8,2.5,2.9,3.5,4.1,4.6,4.8,5.2,5.5,5.8,6.283}
sample[N]={0.1,0.5,1,1.6,2,2.5,3.8,4.9,5.9,6.28}
*/
time_t t;
srand((unsigned)time(&t));
for (i=0;i<N;i++){
temp=random(seed);
sample[i]=(temp+1)/(seed)*(2*PI/N)+2*PI*i/N;
}
/* Save the data of the right weights and threshoulders to file 'weight.txt'.*/
if((fp=fopen("d:\weight.txt","w"))==NULL){
printf("Can't create the file 'weight.txt'!");
exit(0);
}
fprintf(fp,"This BP neural network is to imitate the function:y=sin(3x)cosx+1 .\n");
fprintf(fp,"The structure of BP neural network is '1-5-2-2-1'.\n");
for (i=0;i<N;i++){
true_[i]=sin(3*(sample[i]))*cos(sample[i])+1;
flag1[i]=0;
printf("sample %d x:%f y:%f\n",i+1,sample[i],true_[i]);
fprintf(fp,"sample %d x:%f y:%f\n",i+1,sample[i],true_[i]);
/**/
}
printf("\n");
printf("The right weights is initilizing.........\n");
printf(".........................................\n");
printf(".........................................\n");
printf("\n");
initial_Matrix();
/* output to file weight.txt*/
fprintf(fp,"The initialized right weights.\n");
save_weight(fp);
/* print to screen.*/
printf("The initialized right weights.\n");
display_weight();
percision1=0.1; /* Percision from 0.1 to 0.005 */
while (percision1>percision){
flag=0;
while (flag==0&&counter<study_Times){
for (l=0;l<N;l++) {
expect_Value=expect_Function(true_[l]);
counter1=0;
while (fabs(neural_Net(sample[l])-true_[l])>percision1&&counter1<point_study_times&&counter1<counter/100+1){
counter1++;
back_Propagation();
}
}
counter++; /* Study a time.*/
/*
if (counter%display_screen==0){
printf("Already study %ld times\n",counter);
for (k=0;k<N;k++){
printf("%d x: %f y: %f BPoutput: %f\n", k+1,sample[k],true_[k],neural_Net(sample[k]));
}
display_weight();
}
*/
if (counter%save_file==0){
fprintf(fp,"Already study %ld times\n",counter);
for (k=0;k<N;k++){
fprintf(fp,"%d x: %f y: %f BPoutput: %f\n", k+1,sample[k],true_[k],neural_Net(sample[k]));
}
save_weight(fp);
}
/* Whether all sample points are all in the range of percision.*/
for (m=0;m<N;m++) {
/*
printf("%d\n",fabs(neural_Net(sample[m])-true_[m])<=percision);
*/
flag1[m]=fabs(neural_Net(sample[m])-true_[m])<=percision1;
}
flag=1;
for (m=0;m<N;m++) {
if (flag1[m]) {
/*
printf("Already study %d times\n",flag1[m]);
*/
flag=flag&&flag1[m];
}
else {
flag=0;
}
}
}
percision1-=0.005;
}
printf("The BP neural network has established.\n");
printf("Total study %ld times\n",counter);
for (k=0;k<N;k++){
printf("%d x: %f y: %f BPoutput: %f\n", k+1,sample[k],true_[k],neural_Net(sample[k]));
}
display_weight();
/* output to file weight.txt*/
fprintf(fp,"The BP neural network has established.\n");
fprintf(fp,"Total study %ld times\n",counter);
for (k=0;k<N;k++){
fprintf(fp,"%d x: %f y: %f BPoutput: %f\n", k+1,sample[k],true_[k],neural_Net(sample[k]));
}
save_weight(fp);
fclose(fp);
}
/* Print the right weight on the screen.*/
display_weight(){
int i,j;
printf("\n");
printf("The 1st layer right weights.\n");
for(i=0;i<5;i++){
printf("%f ",w0[i][0]);
}
printf("\n");
printf("The 2nd layer right weights.\n");
for(i=0;i<2;i++){
for(j=0;j<5;j++){
printf("%f ",w1[i][j]);
}
}
printf("\n");
printf("The 3rd layer right weights.\n");
for(i=0;i<2;i++){
for(j=0;j<2;j++){
printf("%f ",w2[i][j]);
}
}
printf("\n");
printf("The last layer right weights.\n");
for(i=0;i<2;i++){
printf("%f "w3[i]);
}
}
/* Save the right weight to the file 'weight.txt.*/
save_weight(FILE *f){
int i,j;
fprintf(f,"\n");
fprintf(f,"The 1st layer right weights.\n");
for(i=0;i<5;i++){
fprintf(f,"W%d1=%f theta=%f\n",i+1,w0[i][0],w0[i][1]);
}
fprintf(f,"\n");
fprintf(f,"The 2nd layer right weights.\n");
for(i=0;i<2;i++){
for(j=0;j<5;j++){
fprintf(f,"W%d%d=%f ",i+1,j+1,w1[i][j]);
}
fprintf(f,"theta=%f\n",w1[i][j]);
}
fprintf(f,"\n");
fprintf(f,"The 3rd layer right weights.\n");
for(i=0;i<2;i++){
for(j=0;j<2;j++){
fprintf(f,"W%d%d=%f ",i+1,j+1,w2[i][j]);
}
fprintf(f,"theta=%f\n",w2[i][j]);
}
fprintf(f,"\n");
fprintf(f,"The last layer right weights.\n");
for(i=0;i<2;i++){
fprintf(f,"W1%d=%f ",i+1,w3[i]);
}
fprintf(f,"theta=%f\n",w3[i]);
fprintf(f,"\n");
}
/* Graph initilizing. */
void initgr(void){
int gd=DETECT,gm=0; /* Auto detect VGA */
registerbgidriver(EGAVGA_driver);/* Register BGI driver */
initgraph(&gd,&gm,"e\\tc3\\tc\\");
}
main(){
int i;
float Nx,Ny;
float x,y;
float dangle;
char *s;
char a[]={" (1/2)PI PI (3/2)PI (2)PI "};
x0[1]=-1;
x1[5]=-1;
x2[2]=-1;
x3[2]=-1;
printf("This BP neural network is to imitate the function:y=sin(3x)cosx+1 .\n");
printf("The structure of BP neural network is '1-5-2-2-1'.\n");
establish_BPNN();
printf("\n");
printf("Put anykey to see curves of BPNN imitation and function y=sin(3x)cosx+1.\n");
getch();
initgr(); /* BGI initialize. */
cleardevice();
setbkcolor(15);
setcolor(1);
line(100,270,490,270); /*Draw X axes.*/
line(480,267,490,270);
line(480,273,490,270);
line(100,20,100,460); /*Draw Y axes.*/
line(100,20,97,30);
line(100,20,103,30);
/*Add scales number at Y axes.*/
settextstyle(0,0,1);
for(i=4;i>=-3;i--){
sprintf(s,"%d",i);
outtextxy(80,268-60*i,s);
}
/*Add scales number at X axes.*/
outtextxy(100,275,a);
/*Add scales at X axes. */
for(i=100;i<=460;i+=90)line(i,270-2,i,270+2);
/*Add scales at Y axes. */
for(i=30;i<460;i+=60)line(100-2,i,100+2,i);
setcolor(GREEN);
line(430,20,450,20);
setcolor(RED);
line(430,40,450,40);
setcolor(BLUE);
s="y=sin(3x)cosx+1";
outtextxy(460,15,s);
s="BPNN imitation";
outtextxy(460,36,s);
settextstyle(0,0,2);
s="x";
outtextxy(500,265,s);
s="y";
outtextxy(80,5,s);
/*Draw a graph of function y=x*sin(x)+2 and BP neural network.*/
for(dangle=0;dangle<=2*PI;){
x=100.0+dangle*180/PI;
y=270-(sin(3*dangle)*cos(dangle)+1)*60;
putpixel(x,y,GREEN); /* Print curve of original function.*/
Nx=x;
Ny=270-(neural_Net(dangle))*60;
putpixel(Nx,Ny,RED); /* Print curve of BP neural network.*/
dangle+=0.001;
}
getch();
/* Pause */
closegraph(); /* Recover the module of text. */
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -