⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 fann.c

📁 一个功能强大的神经网络分析程序
💻 C
📖 第 1 页 / 共 3 页
字号:
/*  Fast Artificial Neural Network Library (fann)  Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)    This library is free software; you can redistribute it and/or  modify it under the terms of the GNU Lesser General Public  License as published by the Free Software Foundation; either  version 2.1 of the License, or (at your option) any later version.    This library is distributed in the hope that it will be useful,  but WITHOUT ANY WARRANTY; without even the implied warranty of  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU  Lesser General Public License for more details.    You should have received a copy of the GNU Lesser General Public  License along with this library; if not, write to the Free Software  Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA*/ #include <stdio.h>#include <stdlib.h>#include <stdarg.h>#include <string.h>#include <time.h>#include <math.h>#include "config.h"#include "fann.h"#include "fann_errno.h"/* create a neural network. */FANN_EXTERNAL struct fann * FANN_API fann_create(float connection_rate, float learning_rate,	unsigned int num_layers, /* the number of layers, including the input and output layer */	...) /* the number of neurons in each of the layers, starting with the input layer and ending with the output layer */{	struct fann *ann;	va_list layer_sizes;	unsigned int *layers = (unsigned int *)calloc(num_layers, sizeof(unsigned int));	int i = 0;	va_start(layer_sizes, num_layers);	for ( i=0 ; i<(int)num_layers ; i++ ) {		layers[i] = va_arg(layer_sizes, unsigned int);	}	va_end(layer_sizes);	ann = fann_create_array(connection_rate, learning_rate, num_layers, layers);	free(layers);	return ann;}/* create a neural network. */FANN_EXTERNAL struct fann * FANN_API fann_create_array(float connection_rate, float learning_rate, unsigned int num_layers, unsigned int * layers){	struct fann_layer *layer_it, *last_layer, *prev_layer;	struct fann *ann;	struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron;	unsigned int prev_layer_size, i, j;	unsigned int num_neurons_in, num_neurons_out;	unsigned int min_connections, max_connections, num_connections;	unsigned int connections_per_neuron, allocated_connections;	unsigned int random_number, found_connection;	#ifdef FIXEDFANN	unsigned int decimal_point;	unsigned int multiplier;#endif	if(connection_rate > 1){		connection_rate = 1;	}		/* seed random */	fann_seed_rand();		/* allocate the general structure */	ann = fann_allocate_structure(learning_rate, num_layers);	if(ann == NULL){		return NULL;	}	ann->connection_rate = connection_rate;#ifdef FIXEDFANN	decimal_point = ann->decimal_point;	multiplier = ann->multiplier;#endif	fann_update_stepwise_hidden(ann);	fann_update_stepwise_output(ann);	/* determine how many neurons there should be in each layer */	i = 0;	for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++){		/* we do not allocate room here, but we make sure that		   last_neuron - first_neuron is the number of neurons */		layer_it->first_neuron = NULL;		layer_it->last_neuron = layer_it->first_neuron + layers[i++] +1; /* +1 for bias */		ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;	}		ann->num_output = (ann->last_layer-1)->last_neuron - (ann->last_layer-1)->first_neuron -1;	ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron -1;		/* allocate room for the actual neurons */	fann_allocate_neurons(ann);	if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM){		fann_destroy(ann);		return NULL;	}	#ifdef DEBUG	printf("creating network with learning rate %f and connection rate %f\n", learning_rate, connection_rate);	printf("input\n");	printf("  layer       : %d neurons, 1 bias\n", ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);#endif		num_neurons_in = ann->num_input;	for(layer_it = ann->first_layer+1; layer_it != ann->last_layer; layer_it++){		num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;		/*爄f all neurons in each layer should be connected to at least one neuron		  in the previous layer, and one neuron in the next layer.		  and the bias node should be connected to the all neurons in the next layer.		  Then this is the minimum amount of neurons */		min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out;		max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */		num_connections = fann_max(min_connections,			(unsigned int)(0.5+(connection_rate * max_connections)) + num_neurons_out);				ann->total_connections += num_connections;				connections_per_neuron = num_connections/num_neurons_out;		allocated_connections = 0;		/* Now split out the connections on the different neurons */		for(i = 0; i != num_neurons_out; i++){			layer_it->first_neuron[i].num_connections = connections_per_neuron;			allocated_connections += connections_per_neuron;						if(allocated_connections < (num_connections*(i+1))/num_neurons_out){				layer_it->first_neuron[i].num_connections++;				allocated_connections++;			}		}				/* used in the next run of the loop */		num_neurons_in = num_neurons_out;	}		fann_allocate_connections(ann);	if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM){		fann_destroy(ann);		return NULL;	}		if(connection_rate >= 1){		prev_layer_size = ann->num_input+1;		prev_layer = ann->first_layer;		last_layer = ann->last_layer;		for(layer_it = ann->first_layer+1; layer_it != last_layer; layer_it++){			last_neuron = layer_it->last_neuron-1;			for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++){				for(i = 0; i != prev_layer_size; i++){					neuron_it->weights[i] = (fann_type)fann_random_weight();					/* these connections are still initialized for fully connected networks, to allow					   operations to work, that are not optimized for fully connected networks.					*/					neuron_it->connected_neurons[i] = prev_layer->first_neuron+i;				}							}			prev_layer_size = layer_it->last_neuron - layer_it->first_neuron;			prev_layer = layer_it;#ifdef DEBUG			printf("  layer       : %d neurons, 1 bias\n", prev_layer_size-1);#endif		}	} else {		/* make connections for a network, that are not fully connected */				/* generally, what we do is first to connect all the input		   neurons to a output neuron, respecting the number of		   available input neurons for each output neuron. Then		   we go through all the output neurons, and connect the		   rest of the connections to input neurons, that they are		   not allready connected to.		*/				/* first clear all the connections, because we want to		   be able to see which connections are allready connected */		memset((ann->first_layer+1)->first_neuron->connected_neurons, 0, ann->total_connections * sizeof(struct fann_neuron*));				for(layer_it = ann->first_layer+1;			layer_it != ann->last_layer; layer_it++){						num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;			num_neurons_in = (layer_it-1)->last_neuron - (layer_it-1)->first_neuron - 1;						/* first connect the bias neuron */			bias_neuron = (layer_it-1)->last_neuron-1;			last_neuron = layer_it->last_neuron-1;			for(neuron_it = layer_it->first_neuron;				neuron_it != last_neuron; neuron_it++){								neuron_it->connected_neurons[0] = bias_neuron;				neuron_it->weights[0] = (fann_type)fann_random_weight();			}						/* then connect all neurons in the input layer */			last_neuron = (layer_it-1)->last_neuron - 1;			for(neuron_it = (layer_it-1)->first_neuron;				neuron_it != last_neuron; neuron_it++){								/* random neuron in the output layer that has space				   for more connections */				do {					random_number = (int) (0.5+fann_rand(0, num_neurons_out-1));					random_neuron = layer_it->first_neuron + random_number;					/* checks the last space in the connections array for room */				}while(random_neuron->connected_neurons[random_neuron->num_connections-1]);								/* find an empty space in the connection array and connect */				for(i = 0; i < random_neuron->num_connections; i++){					if(random_neuron->connected_neurons[i] == NULL){						random_neuron->connected_neurons[i] = neuron_it;						random_neuron->weights[i] = (fann_type)fann_random_weight();						break;					}				}			}						/* then connect the rest of the unconnected neurons */			last_neuron = layer_it->last_neuron - 1;			for(neuron_it = layer_it->first_neuron;				neuron_it != last_neuron; neuron_it++){				/* find empty space in the connection array and connect */				for(i = 0; i < neuron_it->num_connections; i++){					/* continue if allready connected */					if(neuron_it->connected_neurons[i] != NULL) continue;										do {						found_connection = 0;						random_number = (int) (0.5+fann_rand(0, num_neurons_in-1));						random_neuron = (layer_it-1)->first_neuron + random_number;												/* check to see if this connection is allready there */						for(j = 0; j < i; j++){							if(random_neuron == neuron_it->connected_neurons[j]){								found_connection = 1;								break;							}						}											}while(found_connection);										/* we have found a neuron that is not allready					   connected to us, connect it */					neuron_it->connected_neurons[i] = random_neuron;					neuron_it->weights[i] = (fann_type)fann_random_weight();				}			}			#ifdef DEBUG			printf("  layer       : %d neurons, 1 bias\n", num_neurons_out);#endif		}				/* TODO it would be nice to have the randomly created		   connections sorted for smoother memory access.		*/	}	#ifdef DEBUG	printf("output\n");#endif		return ann;} /* create a neural network with shortcut connections. */FANN_EXTERNAL struct fann * FANN_API fann_create_shortcut(float learning_rate,	unsigned int num_layers, /* the number of layers, including the input and output layer */	...) /* the number of neurons in each of the layers, starting with the input layer and ending with the output layer */{	struct fann *ann;	va_list layer_sizes;	unsigned int *layers = (unsigned int *)calloc(num_layers, sizeof(unsigned int));	int i = 0;	va_start(layer_sizes, num_layers);	for ( i=0 ; i<(int)num_layers ; i++ ) {		layers[i] = va_arg(layer_sizes, unsigned int);	}	va_end(layer_sizes);	ann = fann_create_shortcut_array(learning_rate, num_layers, layers);	free(layers);	return ann;}/* create a neural network with shortcut connections. */FANN_EXTERNAL struct fann * FANN_API fann_create_shortcut_array(float learning_rate, unsigned int num_layers, unsigned int * layers){	struct fann_layer *layer_it, *layer_it2, *last_layer;	struct fann *ann;	struct fann_neuron *neuron_it, *neuron_it2 = 0;	unsigned int i;	unsigned int num_neurons_in, num_neurons_out;	unsigned int num_connections;	#ifdef FIXEDFANN	unsigned int decimal_point;	unsigned int multiplier;#endif	/* seed random */	fann_seed_rand();		/* allocate the general structure */	ann = fann_allocate_structure(learning_rate, num_layers);	if(ann == NULL){		return NULL;	}	ann->connection_rate = 1;	ann->shortcut_connections = 1;#ifdef FIXEDFANN

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -