⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 bpn.cpp

📁 2002年
💻 CPP
字号:
#include "bpn.h"

#include <math.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include "smp_func.h"

using namespace std;

LAYER::LAYER(int num_units, int num_punits){
	units = 0;
	p_units = 0;
	Resize(num_units, num_punits);
}

LAYER::~LAYER(){
} 

bool LAYER::Resize(int num_units, int num_punits){
	if(num_units < 1 || num_punits < 0) return false;
	units = num_units;
	p_units = num_punits;

	Output.resize(units+1);
	Output[0] = 1;	//BIAS
	Error.resize(units+1);

	if(num_punits > 0){//not the first layer
		Weight.resize(units+1);
		dWeight.resize(units+1);
		last_dWeight.resize(units+1);

		int i;
		for(i=1; i<=units; i++){
			Weight[i].resize(p_units+1);
			dWeight[i].resize(p_units+1);
			last_dWeight[i].resize(p_units+1);

			int j;
			for(j=0; j<p_units+1; j++){
				dWeight[i][j] = 0;
				last_dWeight[i][j] = 0;
			}
		}
	}

	Eta = 1;
	Alpha = 0.5;
	
	return true;
}

void LAYER::SetFuncType(FuncType functype){
	this->functype = functype;
}

REAL BPN::sigmoid(REAL Input){
	return 1 / (1 + exp(-1*Gain * Input));
}

REAL BPN::dsigmoid(REAL Out){
	return Gain * Out * (1-Out);
}

REAL BPN::purelinear(REAL Input){
	return Input;
}

REAL BPN::dpurelinear(REAL Out){
	return 1;
}

REAL BPN::tanh(REAL Input){
	REAL t = exp(Input);
	t = Sqr(t);
	return (1 - 2/(t+1));
}

REAL BPN::dtanh(REAL Out){
	return (1 - Sqr(Out));
}

void BPN::Resize(int NUM_LAYERS, int* Units){
	if(NUM_LAYERS < 2) return;

	this->NUM_LAYERS = NUM_LAYERS;	
	
	Layers.resize(NUM_LAYERS);
	Layers[0].Resize(Units[0], 0);

	int i;
	for (i=1; i<NUM_LAYERS; i++) {
		Layers[i].Resize(Units[i], Units[i-1]);		
	}

	InputLayer  = &Layers[0];
	OutputLayer = &Layers[NUM_LAYERS - 1];
}

BPN::BPN(int NUM_LAYERS, int* LAYER_SIZE, FuncType* functype, bool BIASED){
	this->NUM_LAYERS = 0;
	Gain = 1;

	if(NUM_LAYERS < 2) return;

	this->NUM_LAYERS  = NUM_LAYERS;

	if(BIASED)
		this->BIAS = 1;
	else
		this->BIAS = 0;

	int* Units = new int[NUM_LAYERS];

	if (LAYER_SIZE == NULL){
		int i;
		for(i = 0; i<NUM_LAYERS; i++){
			Units[i] = 1;
		}
	}
	else{
		int i;
		for(i = 0; i<NUM_LAYERS; i++){
			Units[i] = LAYER_SIZE[i];
		}
	}

	this->INPUT_SIZE  = Units[0];
	this->OUTPUT_SIZE = Units[NUM_LAYERS-1];

	Resize(NUM_LAYERS, Units);

	delete Units;
	
	int i;
	for(i=1; i<NUM_LAYERS; i++){
		if (functype ==NULL)
			Layers[i].functype = logsig;
		else
			Layers[i].functype = functype[i-1];
	}

	Input_HI = 1;
	Input_LO = -1;
	Target_HI = 1;
	Target_LO = -1;
	Input_MAX = 1;
	Input_MIN = -1;
	Target_MAX = 1;
	Target_MIN = -1;

	epoch = 10;
	batch_period = 1;
	traintype = adapt;
	
	//RandomWeights(-0.5,0.5);
}

BPN::~BPN(){
}

void BPN::RandomWeights(REAL Low, REAL High)
{
	int l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].Weight[i][j] = REAL(rand()) / RAND_MAX *(High-Low) + Low;
			}    
		}
	}
}

int BPN::RandomInt(int Low, int High){
	return (rand()%(High-Low+1) + Low);
}

int BPN::RandomInt(int High){
	return RandomInt(0,High);
}

void BPN::SetInput(REAL* Input)
{ 
	int i;
	for(i=1; i<=INPUT_SIZE; i++) {
		InputLayer->Output[i] = Input[i-1];
	}
}

void BPN::GetOutput(REAL* Output)
{
	int i;

	for (i=1; i<=OutputLayer->units; i++) {
		Output[i-1] = OutputLayer->Output[i];
	}
}

void BPN::PropagateLayer(int Lower, int Upper)
{
	int  i,j;
	REAL Sum;

	for (i=1; i<=Layers[Upper].units; i++) {
		Sum = 0;
		for (j=0; j<=Layers[Lower].units; j++) {
			Sum += Layers[Upper].Weight[i][j] * Layers[Lower].Output[j];
		}
		switch(Layers[Upper].functype){
		case logsig:   Layers[Upper].Output[i] = sigmoid(Sum); break;
		case purelin:  Layers[Upper].Output[i] = purelinear(Sum); break;
		case tansig:   Layers[Upper].Output[i] = tanh(Sum); break;
		default:  Layers[Upper].Output[i] = purelinear(Sum);
		}
	}
}

void BPN::PropagateNet()
{
	int i;   
	for (i = 0; i < NUM_LAYERS-1; i++)
		PropagateLayer(i, i+1);
}

void BPN::ComputeOutputError(REAL* Desire)
{
	int  i;
	REAL Out, Err;
   
	Error = 0;
	for (i=1; i<=OutputLayer->units; i++) {
		Out = OutputLayer->Output[i];
		Err = Desire[i-1]-Out;

		switch(OutputLayer->functype){
		case logsig:   
			OutputLayer->Error[i] = dsigmoid(Out) * Err; break;
		case purelin:
			OutputLayer->Error[i] = dpurelinear(Out) * Err; break;
		case tansig:
			OutputLayer->Error[i] = dtanh(Out) * Err; break;
		default:  OutputLayer->Error[i] = dpurelinear(Out) * Err;
		}
		Error += 0.5 * Sqr(Err);
	}
}

void BPN::SetOutputError(REAL* Errors){
	int i;
	REAL Out, Err;
	for (i=1; i<=OutputLayer->units; i++) {
		Out = OutputLayer->Output[i];
		Err = Errors[i-1];

		switch(OutputLayer->functype){
		case logsig:   
			OutputLayer->Error[i] = dsigmoid(Out) * Err; break;
		case purelin:
			OutputLayer->Error[i] = dpurelinear(Out) * Err; break;
		case tansig:
			OutputLayer->Error[i] = dtanh(Out) * Err; break;
		default:  OutputLayer->Error[i] = dpurelinear(Out) * Err;
		}

		Error += 0.5 * Sqr(Err);
	}
}

void BPN::BackpropagateLayer(int Upper, int Lower)
{
	int  i,j;
	REAL Out, Err;
   
	for (i=1; i<=Layers[Lower].units; i++) {
		Out = Layers[Lower].Output[i];
		Err = 0;
		for (j=1; j<=Layers[Upper].units; j++) {
			Err += Layers[Upper].Weight[j][i] * Layers[Upper].Error[j];
		}
		switch(Layers[Lower].functype){
		case logsig:   
			Layers[Lower].Error[i] = dsigmoid(Out) * Err; break;
		case purelin:
			Layers[Lower].Error[i] = dpurelinear(Out) * Err; break;
		case tansig:
			Layers[Lower].Error[i] = dtanh(Out) * Err; break;
		default:  Layers[Lower].Error[i] = dpurelinear(Out) * Err;
		}
	}
}


void BPN::BackpropagateNet()
{
	int i;
	for (i=NUM_LAYERS-1; i>0; i--) {
		BackpropagateLayer(i, i-1);
	}
	GeneratedWeights();
}

void BPN::ResetdWeights(){
	int  l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].dWeight[i][j] = 0;
			}
		}
	}
}

void BPN::GeneratedWeights(){
	int  l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].dWeight[i][j] += Layers[l].Eta * Layers[l].Error[i] * Layers[l-1].Output[j];
			}
		}
	}
}

void BPN::AdjustWeights(){
	int  l,i,j;
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].Weight[i][j] += Layers[l].dWeight[i][j] 
						+ Layers[l].Alpha * Layers[l].last_dWeight[i][j];
				Layers[l].last_dWeight[i][j] = Layers[l].dWeight[i][j];
				Layers[l].dWeight[i][j] = 0;
			}
		}
	}
}

void BPN::SimulateNet(REAL* Input, REAL* Output){
	SetInput(Input);
	PropagateNet();
	GetOutput(Output);
}

void BPN::SimulateNet(int num_samples, REAL* Input, REAL* Output){
	int i;
	for(i=0; i<num_samples; i++){
		SimulateNet(Input + INPUT_SIZE * i, Output + OUTPUT_SIZE * i);
	}
}

void BPN::ForwardBack(REAL* Input, REAL* Desire){
	SetInput(Input);
	PropagateNet();

	ComputeOutputError(Desire);
	BackpropagateNet();
}

void BPN::Adapt(int num_samples, REAL* Input, REAL* Desire)
{
	int i, t;
	for(i=0; i<epoch*num_samples; i++){
		t = RandomInt(num_samples-1);
		ForwardBack(Input + INPUT_SIZE * t, Desire + OUTPUT_SIZE * t);
		AdjustWeights();
	}
}

void BPN::BatchTrain(int num_samples, REAL* Input, REAL* Desire){
	int i, j, t, n;
	n = epoch*num_samples / batch_period;
	for(i=0; i < n; i++){
		for(j=0; j < batch_period; j++){
			t = RandomInt(num_samples-1);
			ForwardBack(Input + INPUT_SIZE * t, Desire + OUTPUT_SIZE * t);
		}
		AdjustWeights();
	}
}

void BPN::TestNet(REAL* Input, REAL* Desire){
	SetInput(Input);
	PropagateNet();
	ComputeOutputError(Desire);
}

REAL BPN::TestNet(int num_samples, REAL* Input, REAL* Desire){
	REAL Error = 0; 
	int i;
	for(i=0; i<num_samples; i++){
		TestNet(Input + INPUT_SIZE * i, Desire + OUTPUT_SIZE * i);
		Error += GetError();
	}
	return Error / num_samples;
}

void BPN::SetEpoch(int epoch){
	this->epoch = epoch;
}

void BPN::SetBatchPeriod(int period){
	this->batch_period = period;
}

void BPN::TrainNet(int num_samples, REAL* Input, REAL* Desire){
	switch(traintype){
	case adapt:
		Adapt(num_samples, Input, Desire);
		break;
	case batch:
		BatchTrain(num_samples, Input, Desire);
		break;
	default:
		Adapt(num_samples, Input, Desire);
		break;
	}
}

REAL BPN::Normalize_Input(REAL y){
	return((y - Input_MIN) / (Input_MAX - Input_MIN) * (Input_HI - Input_LO) + Input_LO);
}

REAL BPN::DeNormalize_Input(REAL y){
	return((y - Input_LO) / (Input_HI - Input_LO) * (Input_MAX - Input_MIN) + Input_MIN);
}

REAL BPN::Normalize_Target(REAL y){
	return((y - Target_MIN) / (Target_MAX - Target_MIN) * (Target_HI - Target_LO) + Target_LO);
}

REAL BPN::DeNormalize_Target(REAL y){
	return((y - Target_LO) / (Target_HI - Target_LO) * (Target_MAX - Target_MIN) + Target_MIN);
}

void BPN::Normalize(int num_samples, REAL* Input, REAL* Desire){
	int i;
	for(i=0; i<num_samples * INPUT_SIZE ; i++){
		Input[i] = Normalize_Input(Input[i]);
	}
	for(i=0; i<num_samples * OUTPUT_SIZE ; i++){
		Desire[i] = Normalize_Target(Desire[i]);
	}
}

void BPN::DeNormalize(int num_samples, REAL* Output){
	int i;
	for(i=0; i<num_samples * OUTPUT_SIZE; i++){
		Output[i] = DeNormalize_Target(Output[i]);
	}
}

bool BPN::SaveNet(char* filename){
	int l,i,j;
	FILE* fp=fopen(filename,"w");
	if (fp == NULL) return false;

	fprintf(fp,"%d\n",NUM_LAYERS);
	for(i=0; i<NUM_LAYERS; i++)
		fprintf(fp,"%d ",Layers[i].units);
	fprintf(fp,"\n");
	for (l=1; l<NUM_LAYERS; l++) {
		for (i=0; i<=Layers[l-1].units; i++) {
			for (j=1; j<=Layers[l].units; j++) {
				fprintf(fp,"%f ",Layers[l].Weight[j][i]);
			}
			fprintf(fp,"\n");
		}
	}
	for(l=1; l<NUM_LAYERS; l++){
		switch(Layers[l].functype){
		case logsig:    fprintf(fp, "logsig\n");  break;
		case tansig:    fprintf(fp, "tansig\n");  break;
		case purelin:   fprintf(fp, "purelin\n"); break;
		default:
			fprintf(fp, "logsig\n");
		}
	}
	fclose(fp);
	return true;
}

bool BPN::RestoreNetFromString(char* string){
	int l,i,j;
	bool Success = false;
	
	if (string == NULL) return false;

	int num_layers;

	if (!sscanf(string,"%d",&num_layers)){
		return false;
	}
	if(num_layers < 2) return false;

	get_int(&string);
		
	int* Units = new int[num_layers];

	for(i=0; i<num_layers; i++){
		if(!sscanf(string,"%d",&Units[i])){
			delete Units;
			return false;
		}
		else
			get_int(&string);
	}

	FuncType* functype = new FuncType[num_layers-1];

	float*** weight=new float**[num_layers];
	for (i=1; i<num_layers; i++) {
		weight[i]  = new float*[Units[i]+1];
		for (j=1; j<=Units[i]; j++)
			weight[i][j] = new float[Units[i-1]+1];
	}

	for (l=1; l<num_layers; l++) {
		for (i=0; i<=Units[l-1]; i++) {
			for (j=1; j<=Units[l]; j++) {
				if (!sscanf(string,"%f",&weight[l][j][i]))
					goto destruct;
				else
					get_float(&string);
			}
		}
	}

	char t[100];
	for(l=1; l<num_layers; l++){
		get_word(&string);
		sscanf(string,"%s",&t[0]);
		if (strcmp(t,"logsig") ==0){
			functype[l-1] = logsig;
			string += 6;
		}
		else if(strcmp(t,"tansig") ==0){
			functype[l-1] = tansig;
			string += 6;
		}
		else if(strcmp(t,"purelin") ==0){
			functype[l-1] = purelin;
			string += 7;
		}
	}

	Success = true;

	Resize(num_layers, Units);

	for (l=1; l<num_layers; l++) {
		for (i=1; i<=Layers[l].units; i++) {
			for (j=0; j<=Layers[l-1].units; j++) {
				Layers[l].Weight[i][j] = weight[l][i][j];
			}    
		}
		Layers[l].functype = functype[l-1];
	}
	
destruct:
	for (i=1; i<num_layers; i++) {
		for (j=1; j<=Units[i]; j++){
			delete weight[i][j];
		}
		delete weight[i];
	}
	NUM_LAYERS = num_layers;
	INPUT_SIZE = Units[0];
	OUTPUT_SIZE = Units[num_layers-1];
	
	delete weight;
	delete Units;
	delete functype;
	return Success;
}

bool BPN::RestoreNet(char* filename, int mode){
	FILE *fp = fopen(filename, "r");
	if (fp == NULL) return false;

	char string[2000];
	int i = 0;
	while(!feof(fp)){
		fscanf(fp,"%c",&string[i++]);
	}
	string[i-1] = 0;

	fclose(fp);
	return RestoreNetFromString(string);
}

REAL BPN::GetError(){
	return Error;
}

void BPN::GetInputError(REAL* Errors){
	int i;
	for(i=1; i<=InputLayer->units; i++){
		Errors[i-1] = InputLayer->Error[i];
	}
}

bool BPN::SetInputRange(REAL min, REAL max){
	if(max < min) return false;
	Input_MAX = max;
	Input_MIN = min;
	return true;
}

bool BPN::SetTargetRange(REAL min, REAL max){
	if(max < min) return false;
	Target_MAX = max;
	Target_MIN = min;
	return true;
}

bool BPN::SetInsideRange(REAL input_min, REAL input_max, REAL target_min, REAL target_max){
	if(input_max < input_min || target_max < target_min) return false;
	Input_HI = input_max;
	Input_LO = input_min;
	Target_HI = target_max;
	Target_LO = target_min;
	return true;
}

bool BPN::SetFunctype(FuncType functype, int layer){
	if (layer >= NUM_LAYERS || layer < 0)
		return false;
	else{
		Layers[layer].SetFuncType(functype);
		return true;
	}
}

void BPN::SetTraintype(TrainType traintype){
	this->traintype = traintype;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -