⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 fann.c

📁 一个功能强大的神经网络分析程序
💻 C
📖 第 1 页 / 共 3 页
字号:
	decimal_point = ann->decimal_point;	multiplier = ann->multiplier;#endif	fann_update_stepwise_hidden(ann);	fann_update_stepwise_output(ann);	/* determine how many neurons there should be in each layer */	i = 0;	for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++){		/* we do not allocate room here, but we make sure that		   last_neuron - first_neuron is the number of neurons */		layer_it->first_neuron = NULL;		layer_it->last_neuron = layer_it->first_neuron + layers[i++] +1; /* +1 for bias */		ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;	}		ann->num_output = (ann->last_layer-1)->last_neuron - (ann->last_layer-1)->first_neuron -1;	ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron -1;		/* allocate room for the actual neurons */	fann_allocate_neurons(ann);	if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM){		fann_destroy(ann);		return NULL;	}	#ifdef DEBUG	printf("creating fully shortcut connected network with learning rate %f.\n", learning_rate);	printf("input\n");	printf("  layer       : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);#endif		num_neurons_in = ann->num_input;	last_layer = ann->last_layer;	for(layer_it = ann->first_layer+1; layer_it != last_layer; layer_it++){		num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;		num_connections = num_neurons_in * num_neurons_out + num_neurons_out;		ann->total_connections += num_connections;				/* Now split out the connections on the different neurons */		for(i = 0; i != num_neurons_out; i++){			layer_it->first_neuron[i].num_connections = num_neurons_in+1;		}		#ifdef DEBUG		printf("  layer       : %d neurons, 1 bias\n", num_neurons_out);#endif		/* used in the next run of the loop */		num_neurons_in += num_neurons_out;	}		fann_allocate_connections(ann);	if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM){		fann_destroy(ann);		return NULL;	}	/* Connections are created from all neurons to all neurons in later layers	 */	num_neurons_in = ann->num_input+1;	for(layer_it = ann->first_layer+1; layer_it != last_layer; layer_it++){		for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron-1; neuron_it++){			i = 0;			for(layer_it2 = ann->first_layer; layer_it2 != layer_it; layer_it2++){				for(neuron_it2 = layer_it2->first_neuron; neuron_it2 != layer_it2->last_neuron-1; neuron_it2++){										neuron_it->weights[i] = (fann_type)fann_random_weight();					/* these connections are still initialized for fully connected networks, to allow					   operations to work, that are not optimized for fully connected networks.					*/					neuron_it->connected_neurons[i] = neuron_it2;					i++;				}			}			/* The connection to the bias neuron */			neuron_it->weights[i] = (fann_type)fann_random_weight();			neuron_it->connected_neurons[i] = neuron_it2;		}		num_neurons_in += layer_it->last_neuron - layer_it->first_neuron;	}#ifdef DEBUG	printf("output\n");#endif		return ann;}/* runs the network. */FANN_EXTERNAL fann_type * FANN_API fann_run(struct fann *ann, fann_type *input){	struct fann_neuron *neuron_it, *last_neuron, *neurons, **neuron_pointers;	unsigned int activation_function, i, num_connections, num_neurons, num_input, num_output;	fann_type neuron_value, *output;	fann_type *weights;	struct fann_layer *layer_it, *layer_it2, *last_layer;			/* store some variabels local for fast access */#ifndef FIXEDFANN	fann_type steepness;	const fann_type activation_steepness_output = ann->activation_steepness_output;	const fann_type activation_steepness_hidden = ann->activation_steepness_hidden;#endif		unsigned int activation_function_output = ann->activation_function_output;	unsigned int activation_function_hidden = ann->activation_function_hidden;	struct fann_neuron *first_neuron = ann->first_layer->first_neuron;#ifdef FIXEDFANN	int multiplier = ann->multiplier;	unsigned int decimal_point = ann->decimal_point;#endif		/* values used for the stepwise linear sigmoid function */	fann_type rh1 = 0, rh2 = 0, rh3 = 0, rh4 = 0, rh5 = 0, rh6 = 0;	fann_type ro1 = 0, ro2 = 0, ro3 = 0, ro4 = 0, ro5 = 0, ro6 = 0;	fann_type h1 = 0, h2 = 0, h3 = 0, h4 = 0, h5 = 0, h6 = 0;	fann_type o1 = 0, o2 = 0, o3 = 0, o4 = 0, o5 = 0, o6 = 0;	switch(ann->activation_function_hidden){#ifdef FIXEDFANN		case FANN_SIGMOID:		case FANN_SIGMOID_SYMMETRIC:#endif		case FANN_SIGMOID_STEPWISE:		case FANN_SIGMOID_SYMMETRIC_STEPWISE:						/* the hidden results */			rh1 = ann->activation_results_hidden[0];			rh2 = ann->activation_results_hidden[1];			rh3 = ann->activation_results_hidden[2];			rh4 = ann->activation_results_hidden[3];			rh5 = ann->activation_results_hidden[4];			rh6 = ann->activation_results_hidden[5];						/* the hidden parameters */			h1 = ann->activation_values_hidden[0];			h2 = ann->activation_values_hidden[1];			h3 = ann->activation_values_hidden[2];			h4 = ann->activation_values_hidden[3];			h5 = ann->activation_values_hidden[4];			h6 = ann->activation_values_hidden[5];			break;		default:			break;	}				switch(ann->activation_function_output){#ifdef FIXEDFANN		case FANN_SIGMOID:		case FANN_SIGMOID_SYMMETRIC:#endif		case FANN_SIGMOID_STEPWISE:		case FANN_SIGMOID_SYMMETRIC_STEPWISE:						/* the output results */			ro1 = ann->activation_results_output[0];			ro2 = ann->activation_results_output[1];			ro3 = ann->activation_results_output[2];			ro4 = ann->activation_results_output[3];			ro5 = ann->activation_results_output[4];			ro6 = ann->activation_results_output[5];						/* the output parameters */			o1 = ann->activation_values_output[0];			o2 = ann->activation_values_output[1];			o3 = ann->activation_values_output[2];			o4 = ann->activation_values_output[3];			o5 = ann->activation_values_output[4];			o6 = ann->activation_values_output[5];			break;		default:			break;	}		/* first set the input */	num_input = ann->num_input;	for(i = 0; i != num_input; i++){#ifdef FIXEDFANN		if(fann_abs(input[i]) > multiplier){			printf("Warning input number %d is out of range -%d - %d with value %d, integer overflow may occur.\n", i, multiplier, multiplier, input[i]);		}#endif		first_neuron[i].value = input[i];	}		last_layer = ann->last_layer;	for(layer_it = ann->first_layer+1; layer_it != last_layer; layer_it++){		#ifdef FIXEDFANN		((layer_it-1)->last_neuron-1)->value = multiplier;#else		/* set the bias neuron */		((layer_it-1)->last_neuron-1)->value = 1;				steepness = (layer_it == last_layer-1) ? 			activation_steepness_output : activation_steepness_hidden;#endif				activation_function = (layer_it == last_layer-1) ?			activation_function_output : activation_function_hidden;				last_neuron = layer_it->last_neuron-1;		for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++){			neuron_value = 0;			num_connections = neuron_it->num_connections;			weights = neuron_it->weights;						if(ann->connection_rate >= 1){				if(ann->shortcut_connections){					/* first go through the connections to the previous layers,					   then let the normal operation go through the rest.					*/					for(layer_it2 = ann->first_layer;						layer_it2 != layer_it-1; layer_it2++){						neurons = layer_it2->first_neuron;						num_neurons = layer_it2->last_neuron - neurons - 1; /* don't use bias from previous layers */						i = num_neurons & 3; /* same as modulo 4 */						switch(i) {							case 3:								neuron_value += fann_mult(weights[2], neurons[2].value);							case 2:								neuron_value += fann_mult(weights[1], neurons[1].value);							case 1:								neuron_value += fann_mult(weights[0], neurons[0].value);							case 0:								break;						}													for(;i != num_neurons; i += 4){							neuron_value +=								fann_mult(weights[i], neurons[i].value) +								fann_mult(weights[i+1], neurons[i+1].value) +								fann_mult(weights[i+2], neurons[i+2].value) +								fann_mult(weights[i+3], neurons[i+3].value);						}													num_connections -= num_neurons;						weights += num_neurons;					}				}								neurons = (layer_it-1)->first_neuron;								i = num_connections & 3; /* same as modulo 4 */				switch(i) {					case 3:						neuron_value += fann_mult(weights[2], neurons[2].value);					case 2:						neuron_value += fann_mult(weights[1], neurons[1].value);					case 1:						neuron_value += fann_mult(weights[0], neurons[0].value);					case 0:						break;				}								for(;i != num_connections; i += 4){					neuron_value +=						fann_mult(weights[i], neurons[i].value) +						fann_mult(weights[i+1], neurons[i+1].value) +						fann_mult(weights[i+2], neurons[i+2].value) +						fann_mult(weights[i+3], neurons[i+3].value);				}			}else{				neuron_pointers = neuron_it->connected_neurons;								i = num_connections & 3; /* same as modulo 4 */				switch(i) {					case 3:						neuron_value += fann_mult(weights[2], neuron_pointers[2]->value);					case 2:						neuron_value += fann_mult(weights[1], neuron_pointers[1]->value);					case 1:						neuron_value += fann_mult(weights[0], neuron_pointers[0]->value);					case 0:						break;				}								for(;i != num_connections; i += 4){					neuron_value +=						fann_mult(weights[i], neuron_pointers[i]->value) +						fann_mult(weights[i+1], neuron_pointers[i+1]->value) +						fann_mult(weights[i+2], neuron_pointers[i+2]->value) +						fann_mult(weights[i+3], neuron_pointers[i+3]->value);				}			}						switch(activation_function){#ifdef FIXEDFANN				case FANN_SIGMOID:				case FANN_SIGMOID_STEPWISE:					if(layer_it == last_layer-1){						neuron_it->value = (fann_type)fann_stepwise(o1, o2, o3, o4, o5, o6, ro1, ro2, ro3, ro4, ro5, ro6, 0, multiplier, neuron_value);					}else{						neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, 0, multiplier, neuron_value);					}					break;				case FANN_SIGMOID_SYMMETRIC:				case FANN_SIGMOID_SYMMETRIC_STEPWISE:					if(layer_it == last_layer-1){						neuron_it->value = (fann_type)fann_stepwise(o1, o2, o3, o4, o5, o6, ro1, ro2, ro3, ro4, ro5, ro6, -multiplier, multiplier, neuron_value);					}else{						neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, -multiplier, multiplier, neuron_value);					}					break;#else				case FANN_LINEAR:					neuron_it->value = (fann_type)fann_linear(steepness, neuron_value);					break;									case FANN_SIGMOID:					neuron_it->value = (fann_type)fann_sigmoid(steepness, neuron_value);					break;									case FANN_SIGMOID_SYMMETRIC:					neuron_it->value = (fann_type)fann_sigmoid_symmetric(steepness, neuron_value);					break;									case FANN_SIGMOID_STEPWISE:					if(layer_it == last_layer-1){						neuron_it->value = (fann_type)fann_stepwise(o1, o2, o3, o4, o5, o6, ro1, ro2, ro3, ro4, ro5, ro6, 0, 1, neuron_value);					}else{						neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, 0, 1, neuron_value);					}					break;				case FANN_SIGMOID_SYMMETRIC_STEPWISE:					if(layer_it == last_layer-1){						neuron_it->value = (fann_type)fann_stepwise(o1, o2, o3, o4, o5, o6, ro1, ro2, ro3, ro4, ro5, ro6, -1, 1, neuron_value);					}else{						neuron_it->value = (fann_type)fann_stepwise(h1, h2, h3, h4, h5, h6, rh1, rh2, rh3, rh4, rh5, rh6, -1, 1, neuron_value);					}					break;

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -