⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 fann_train.c

📁 一个功能强大的神经网络分析程序
💻 C
📖 第 1 页 / 共 2 页
字号:
/* INTERNAL FUNCTION   Update weights for incremental training*/void fann_update_weights(struct fann *ann){	struct fann_neuron *neuron_it, *last_neuron, *prev_neurons;	fann_type tmp_error;	struct fann_layer *layer_it;	unsigned int i;		/* store some variabels local for fast access */	const float learning_rate = ann->learning_rate;	const struct fann_neuron *first_neuron = ann->first_layer->first_neuron;	struct fann_layer *first_layer = ann->first_layer;	const struct fann_layer *last_layer = ann->last_layer;	fann_type *error_begin = ann->train_errors;	#ifdef DEBUGTRAIN	printf("\nupdate weights\n");#endif		for(layer_it = (first_layer+1); layer_it != last_layer; layer_it++){#ifdef DEBUGTRAIN		printf("layer[%d]\n", layer_it - first_layer);#endif		last_neuron = layer_it->last_neuron;		if(ann->connection_rate >= 1 && !ann->shortcut_connections){			/* optimization for fully connected networks */			/* but not shortcut connected networks */						prev_neurons = (layer_it-1)->first_neuron;			for(neuron_it = layer_it->first_neuron;				neuron_it != last_neuron; neuron_it++){				tmp_error = error_begin[neuron_it - first_neuron] * learning_rate;				for(i = neuron_it->num_connections ; i-- ; ){					neuron_it->weights[i] += tmp_error * prev_neurons[i].value;				}			}		}else{			for(neuron_it = layer_it->first_neuron;				neuron_it != last_neuron; neuron_it++){				tmp_error = error_begin[neuron_it - first_neuron] * learning_rate;				for(i = neuron_it->num_connections ; i-- ; ){					neuron_it->weights[i] += tmp_error * neuron_it->connected_neurons[i]->value;				}			}		}	}}/* INTERNAL FUNCTION   Update slopes for batch training*/void fann_update_slopes_batch(struct fann *ann){	struct fann_neuron *neuron_it, *last_neuron, *prev_neurons;	fann_type tmp_error, *weights_begin;	struct fann_layer *layer_it;	unsigned int i;		/* store some variabels local for fast access */	const struct fann_neuron *first_neuron = ann->first_layer->first_neuron;	struct fann_layer *first_layer = ann->first_layer;	const struct fann_layer *last_layer = ann->last_layer;	fann_type *error_begin = ann->train_errors;	fann_type *slope_begin, *neuron_slope;	/* if no room allocated for the slope variabels, allocate it now */	if(ann->train_slopes == NULL){		ann->train_slopes = (fann_type *)calloc(ann->total_connections, sizeof(fann_type));		if(ann->train_slopes == NULL){			fann_error((struct fann_error *)ann, FANN_E_CANT_ALLOCATE_MEM);			return;		}		memset(ann->train_slopes, 0, (ann->total_connections) * sizeof(fann_type));		}		slope_begin = ann->train_slopes;	weights_begin = fann_get_weights(ann);	#ifdef DEBUGTRAIN	printf("\nupdate slopes\n");#endif		for(layer_it = (first_layer+1); layer_it != last_layer; layer_it++){#ifdef DEBUGTRAIN		printf("layer[%d]\n", layer_it - first_layer);#endif		last_neuron = layer_it->last_neuron;		if(ann->connection_rate >= 1 && !ann->shortcut_connections){			/* optimization for fully connected networks */			/* but not shortcut connected networks */						prev_neurons = (layer_it-1)->first_neuron;			for(neuron_it = layer_it->first_neuron;				neuron_it != last_neuron; neuron_it++){				tmp_error = error_begin[neuron_it - first_neuron];				neuron_slope = slope_begin + (neuron_it->weights - weights_begin);				for(i = neuron_it->num_connections ; i-- ; ){					neuron_slope[i] += tmp_error * prev_neurons[i].value;				}			}		}else{			for(neuron_it = layer_it->first_neuron;				neuron_it != last_neuron; neuron_it++){				tmp_error = error_begin[neuron_it - first_neuron];				neuron_slope = slope_begin + (neuron_it->weights - weights_begin);				for(i = neuron_it->num_connections ; i-- ; ){					neuron_slope[i] += tmp_error * neuron_it->connected_neurons[i]->value;				}			}		}	}}/* INTERNAL FUNCTION   Clears arrays used for training before a new training session.   Also creates the arrays that do not exist yet. */void fann_clear_train_arrays(struct fann *ann){	unsigned int i;		/* if no room allocated for the slope variabels, allocate it now */	if(ann->train_slopes == NULL){		ann->train_slopes = (fann_type *)calloc(ann->total_connections, sizeof(fann_type));		if(ann->train_slopes == NULL){			fann_error((struct fann_error *)ann, FANN_E_CANT_ALLOCATE_MEM);			return;		}	}	memset(ann->train_slopes, 0, (ann->total_connections) * sizeof(fann_type));		/* if no room allocated for the variabels, allocate it now */	if(ann->prev_steps == NULL){		ann->prev_steps = (fann_type *)calloc(ann->total_connections, sizeof(fann_type));		if(ann->prev_steps == NULL){			fann_error((struct fann_error *)ann, FANN_E_CANT_ALLOCATE_MEM);			return;		}	}	memset(ann->prev_steps, 0, (ann->total_connections) * sizeof(fann_type));			/* if no room allocated for the variabels, allocate it now */	if(ann->prev_train_slopes == NULL){		ann->prev_train_slopes = (fann_type *)calloc(ann->total_connections, sizeof(fann_type));		if(ann->prev_train_slopes == NULL){			fann_error((struct fann_error *)ann, FANN_E_CANT_ALLOCATE_MEM);			return;		}	}		if(ann->training_algorithm == FANN_TRAIN_RPROP){		for(i = 0; i < ann->total_connections; i++){			ann->prev_train_slopes[i] = (fann_type)0.0125;		}	} else {		memset(ann->prev_train_slopes, 0, (ann->total_connections) * sizeof(fann_type));	}}/* INTERNAL FUNCTION   Update weights for batch training */void fann_update_weights_batch(struct fann *ann, unsigned int num_data){	fann_type *train_slopes = ann->train_slopes;	fann_type *weights = fann_get_weights(ann);	const float epsilon = ann->learning_rate/num_data;	unsigned int i = ann->total_connections;	while(i--){		weights[i] += train_slopes[i] * epsilon;		train_slopes[i] = 0.0;	}}/* INTERNAL FUNCTION   The quickprop training algorithm */void fann_update_weights_quickprop(struct fann *ann, unsigned int num_data){	fann_type *train_slopes = ann->train_slopes;	fann_type *weights = fann_get_weights(ann);	fann_type *prev_steps = ann->prev_steps;	fann_type *prev_train_slopes = ann->prev_train_slopes;	fann_type w, prev_step, slope, prev_slope, next_step;		float epsilon = ann->learning_rate/num_data;	float decay = ann->quickprop_decay; /*-0.0001;*/	float mu = ann->quickprop_mu; /*1.75;*/	float shrink_factor = (float)(mu / (1.0 + mu));	unsigned int i = ann->total_connections;	while(i--){		w = weights[i];		prev_step = prev_steps[i];		slope = train_slopes[i] +  decay * w;		prev_slope = prev_train_slopes[i];		next_step = 0.0;			/* The step must always be in direction opposite to the slope. */			if(prev_step > 0.001) {			/* If last step was positive...  */			if(slope > 0.0) {				/*  Add in linear term if current slope is still positive.*/				next_step += epsilon * slope;			}					/*If current slope is close to or larger than prev slope...  */			if(slope > (shrink_factor * prev_slope)) {				next_step += mu * prev_step;      /* Take maximum size negative step. */			} else {				next_step += prev_step * slope / (prev_slope - slope); /* Else, use quadratic estimate. */			}		} else if(prev_step < -0.001){			/* If last step was negative...  */  			if(slope < 0.0){				/*  Add in linear term if current slope is still negative.*/				next_step += epsilon * slope;			}					/* If current slope is close to or more neg than prev slope... */			if(slope < (shrink_factor * prev_slope)){				next_step += mu * prev_step;      /* Take maximum size negative step. */			} else {				next_step += prev_step * slope / (prev_slope - slope); /* Else, use quadratic estimate. */			}		} else {			/* Last step was zero, so use only linear term. */			next_step += epsilon * slope;		}		/* update global data arrays */		prev_steps[i] = next_step;		weights[i] = w + next_step;		prev_train_slopes[i] = slope;		train_slopes[i] = 0.0;	}}/* INTERNAL FUNCTION   The iRprop- algorithm*/void fann_update_weights_irpropm(struct fann *ann, unsigned int num_data){	fann_type *train_slopes = ann->train_slopes;	fann_type *weights = fann_get_weights(ann);	fann_type *prev_steps = ann->prev_steps;	fann_type *prev_train_slopes = ann->prev_train_slopes;	fann_type prev_step, slope, prev_slope, next_step, same_sign;	/* These should be set from variables */	float increase_factor = ann->rprop_increase_factor;/*1.2;*/	float decrease_factor = ann->rprop_decrease_factor;/*0.5;*/	float delta_min = ann->rprop_delta_min;/*0.0;*/	float delta_max = ann->rprop_delta_max;/*50.0;*/	unsigned int i = ann->total_connections;	while(i--){			prev_step = fann_max(prev_steps[i], (fann_type)0.001); /* prev_step may not be zero because then the training will stop */		slope = train_slopes[i];		prev_slope = prev_train_slopes[i];		next_step = 0.0;		same_sign = prev_slope * slope;			if(same_sign > 0.0) {			next_step = fann_min(prev_step * increase_factor, delta_max);		} else if(same_sign < 0.0) {			next_step = fann_max(prev_step * decrease_factor, delta_min);			slope = 0;		}		if(slope < 0){			weights[i] -= next_step;		}else{			weights[i] += next_step;		}		/*if(i == 2){			printf("weight=%f, slope=%f, next_step=%f, prev_step=%f\n", weights[i], slope, next_step, prev_step);			}*/			/* update global data arrays */		prev_steps[i] = next_step;		prev_train_slopes[i] = slope;		train_slopes[i] = 0.0;	}}#endif

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -