⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bppaper.c

📁 神经网络BP算法源代码
💻 C
📖 第 1 页 / 共 2 页
字号:
			/*计算各层的输入值和输出值*/
			for(k=0; k<num_layers; k++) {
				for(h=0; h<arch[k]; h++) {
					if(k != 0) 
						lastlayer = arch[k-1];
					/* if it's the input layer then there is always one input */
					else 
						lastlayer = 1;
					/*计算输出值*/
					out = runUnit(unitinputs[k][h], weights[k][h],lastlayer+bias[k], squasher[k][h]);
					/* record the output of this unit */
					outputs[k][h] = out;
					/* put the results into unitinputs matrix for next layer */
					/*计算输入值*/
					if(k+1 != num_layers) {
						/* except for the last layer */
						for(x=0; x<arch[k+1]; x++)
							unitinputs[k+1][x][h] = out;
					}
				}
			}
			
			/*计算误差*/
			/* calculate error */
			for(k=0; k<arch[num_layers-1]; k++)
				thisError += pow((((float)targets[j][k])-outputs[num_layers-1][k]),2);
			thisError *= 0.5;
			if(i == iterations - 1) 
				printf("Error #%d = %f\n", j, thisError);
			/* add this into the TSS for this iteration */
			TSS += thisError;
			
			/*以下的CHECKPOITING代码完成断点检查的功能*/
			/******************************************************************/
			/* CHECKPOINTING: implement that here
			/******************************************************************/
			if(checkpoint > 0 && i != 0 && (i % checkpoint == 0 || i == iterations-1)) {
				if(checkpointOn == 0) {
					file = fopen("checkpoint", "w");
					if(!file) {
						perror("fopen");
						exit(1);
					}
					for(x=0; x<num_layers; x++) {
						for(k=0; k<arch[x]; k++) {
							if(x != 0) 
								lastlayer = arch[x-1];
							else 
								lastlayer = 0;
							for(h=0; h<(lastlayer+bias[x]); h++) {
								fprintf(file,"%f\t", weights[x][k][h]);
							}
							if(lastlayer + bias[x] > 0)
								fprintf(file,"\n");
						}
					}
					checkpointOn = 1;
				}
				for(k=0; k<arch[num_layers-1]; k++) {
					fprintf(file,"%3.5f", inputs[j][k]);
					fprintf(file,"\t%3.5f", targets[j][k]);
					fprintf(file,"\t%3.5f\n", outputs[num_layers-1][k]);
				}
				if(j == iterations-1) {  //j == iterations-1??这里的j是否应该是i
					fclose(file);
					checkpointOn = 0;
				}
			}
			/******************************************************************/
			/* CHECKPOINTING: end
			/******************************************************************/
			if(i == iterations-1) {
				for(k=0; k<arch[num_layers-1]; k++) {
					printf("\t%d", targets[j][k]);
					printf("\t%3.5f\n", outputs[num_layers-1][k]);
				}
			}
			
			/*误差反传开始*/
			/******************************************************************/
			/* GDR: backprop starts here
			/******************************************************************/
			for(k=num_layers-1; k>=1; k--) {
				for(h=0; h<arch[k]; h++) {
					delta = 0.0;
					/* if last layer, then delta calculation is different */
					if(k == num_layers - 1) {
						if(BPALG == 0) {
						delta = (((float)targets[j][h]) - outputs[k][h]) * NthDerivative(squasher[k][h],1,outputs[k][h]);
						}
						else if(BPALG == 1) {
							delta = 4.0*pow(((float)targets[j][h])-outputs[k][h],3.0)
							*exp((((float)targets[j][h])-outputs[k][h])*
							(((float)targets[j][h])-outputs[k][h]))*NthDerivative(squasher[k][h],1,outputs[k][h]);
						}
					} else {
						sum = 0.0;
						/* SUM(delta_n * Wnj) */
						for(p=0; p<arch[k+1]; p++)
							sum += deltas[k+1][p] * weights[k+1][p][h];
						delta = NthDerivative(squasher[k][h],1,outputs[k][h]) * sum;
					}
					deltas[k][h] = delta;
					/* now calculate deltaweights */
					for(p=0; p<arch[k-1]+bias[k]; p++) {
						/* equation 6.32 */
						/* we're doing epoch only, if doing 'by pattern'
						 * then change this code below */
						deltaweights[k][h][p] += rho * delta *unitinputs[k][h][p] + deltaweights[k][h][p];
						//printf("dweight[%d][%d][%d] = %f\n", k, h, p, deltaweights[k][h][p]);
					}
				}
			}
			/******************************************************************/
			/* GDR: backprop end
			/******************************************************************/
		}
		/* apply the delta weights now */
		for(k=1; k<num_layers; k++) {
			for(h=0; h<arch[k]; h++) {
				for(p=0; p<arch[k-1]+bias[k]; p++)
					weights[k][h][p] += deltaweights[k][h][p];
			}
		}
		//printf("weights[0][0][0] = %f, weights[1][0][0] = %f\n",
		// weights[0][0][0], weights[1][0][0]);
		if(i % 1000 == 0)
			printf("%d\t%f\n", i, TSS*2);
	}
	/************************************************************************/
	/* END: main loop */
	/************************************************************************/
	/* we're not going to free all the memory here b/c looping and freeing all
	 * the memory we allocated would take longer than letting the OS free the
	 * entire section of memory when the program ends. If this is going to be
	 * used for something else (i.e: the program doesn't end here) then all this
	 * memory must be freed. */
	return 1;
}

void readParamFile(char *filename, int *num_inputs, int *num_outputs,int *training_set_size, int *num_layers, int **arch, int **bias){
	FILE *infile;
	int i;
	int junk;
	printf("Reading parameters file... ... \n");
	infile = fopen(filename, "r");
	if(!infile) {
		printf("Reading parameters file failed \n");
		perror("fopen");
		exit(1);
	}
	fscanf(infile, "num_inputs: %d\n", num_inputs);
	fscanf(infile, "num_outputs: %d\n", num_outputs);
	fscanf(infile, "training_set_size: %d\n", training_set_size);
	fscanf(infile, "num_layers: %d\n", num_layers);
	/* malloc room for the dimensions of the layers and read them in */
	*arch = (int*)malloc(*num_layers * sizeof(int));
	fscanf(infile, "layers: ");
	for(i=0; i<*num_layers; i++) 
		fscanf(infile, "%d\t", &((*arch)[i]));
	fscanf(infile, "\n");
	/* read in booleans for whether each layer has a bias input */
	fscanf(infile, "layer_biases: ");
	*bias = (int*)malloc(*num_layers * sizeof(int));
	for(i=0; i<*num_layers; i++) 
		fscanf(infile, "%d\t", &((*bias)[i]));
	fclose(infile);
	printf("Reading parameters file end\n");
}

void printParams(int num_inputs, int num_outputs, int training_set_size,int num_layers, int *arch, int *bias){
	int i;
	printf("num_inputs: %d\n", num_inputs);
	printf("num_outputs: %d\n", num_outputs);
	printf("training_set_size: %d\n", training_set_size);
	printf("num_layers: %d\n", num_layers);
	printf("layers: ");
	for(i=0; i<num_layers; i++) 
		printf("%d ", arch[i]);
	printf("\n");
	printf("layer_biases: ");
	for(i=0; i<num_layers; i++) 
		printf("%d ", bias[i]);
	printf("\n");
}

void setupInputs(float ***unitinputs, int num_layers, int *arch, int *bias){
	int i, j, k, lastlayer;
	printf("Initializing unitinputs... ... \n");
	for(i=0; i<num_layers; i++) {
		for(j=0; j<arch[i]; j++) {
			if(i != 0) 
				lastlayer = arch[i-1];
			else 
				lastlayer = 1;
			for(k=0; k<(lastlayer+bias[i]); k++)
				unitinputs[i][j][k] = 1.0;
		}
	}
	printf("Initializing unitinputs end \n");
}

void zeroDeltaWeights(float ***deltaweights, int num_layers, int *arch,int *bias){
	int i, j, k, lastlayer;
	printf("Initializing deltaweights... ... \n");
	for(i=0; i<num_layers; i++) {
		for(j=0; j<arch[i]; j++) {
			if(i != 0) 
				lastlayer = arch[i-1];
			else 
				lastlayer = 1;
			for(k=0; k<(lastlayer+bias[i]); k++)
				deltaweights[i][j][k] = 0.0;
		}
	}
	printf("Initializing deltaweights end \n");
}

float runUnit(float *inputs, float *weights, int numInputs, int squasher){
	int i;
	float net = 0.0;
	float out;
	for(i=0; i<numInputs; i++)
		net += (inputs[i] * weights[i]);
	return NthDerivative(squasher, 0, net);
}

void print2D(float **mat, int dim1, int dim2) {
	int i, j;
	for(i=0; i<dim1; i++)
		for(j=0; j<dim2; j++)
			printf("\tmat[%d][%d] = %f\n", i, j, mat[i][j]);
}

void printWeights(float ***weights, int numLayers, int *arch, int *bias){
	int i, j, k, lastlayer;
	for(i=0; i<numLayers; i++) {
		printf("Layer[%d]\n", i);
		for(j=0; j<arch[i]; j++) {
			printf("\tU[%d]\n", j);
			if(i != 0) lastlayer = arch[i-1];
			else lastlayer = 0;
			for(k=0; k<(lastlayer+bias[i]); k++) {
				printf(" w[%d][%d][%d] = %f", i, j, k, weights[i][j][k]);
			}
			printf("\n");
		}
	}
}

float NthDerivative(int squasher, int deriv, float value){
	float out;
	switch(squasher) {
		case SIGMOID:
			switch(deriv) {
				case 0:
					out = (1.0/(1.0+exp(-(SigmoidAlpha*value))));
					break;
				case 1:
					out = value*(1.0-value);
					break;
				default:
					printf("DERIV. NOT IMPLEMENTED.\n");
					exit(1);
			}
			break;
		case LINEAR:
			switch(deriv) {
				case 0:
					out = value;
					break;
				default:
					printf("DERIV. NOT IMPLEMENTED.\n");
					exit(1);
			}
			break;
		default:
			printf("INVALID SQUASHER\n");
			exit(1);
	}
	return out;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -