📄 fann.c
字号:
/*
Fast Artificial Neural Network Library (fann)
Copyright (C) 2003 Steffen Nissen (lukesky@diku.dk)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <stdio.h>
#include <stdlib.h>
#include <stdarg.h>
#include <string.h>
#include <time.h>
#include <math.h>
#include "config.h"
#include "fann.h"
FANN_EXTERNAL struct fann *FANN_API fann_create_standard(unsigned int num_layers, ...)
{
struct fann *ann;
va_list layer_sizes;
int i;
unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
if(layers == NULL)
{
fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
return NULL;
}
va_start(layer_sizes, num_layers);
for(i = 0; i < (int) num_layers; i++)
{
layers[i] = va_arg(layer_sizes, unsigned int);
}
va_end(layer_sizes);
ann = fann_create_standard_array(num_layers, layers);
free(layers);
return ann;
}
FANN_EXTERNAL struct fann *FANN_API fann_create_standard_array(unsigned int num_layers,
const unsigned int *layers)
{
return fann_create_sparse_array(1, num_layers, layers);
}
FANN_EXTERNAL struct fann *FANN_API fann_create_sparse(float connection_rate,
unsigned int num_layers, ...)
{
struct fann *ann;
va_list layer_sizes;
int i;
unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
if(layers == NULL)
{
fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
return NULL;
}
va_start(layer_sizes, num_layers);
for(i = 0; i < (int) num_layers; i++)
{
layers[i] = va_arg(layer_sizes, unsigned int);
}
va_end(layer_sizes);
ann = fann_create_sparse_array(connection_rate, num_layers, layers);
free(layers);
return ann;
}
FANN_EXTERNAL struct fann *FANN_API fann_create_sparse_array(float connection_rate,
unsigned int num_layers,
const unsigned int *layers)
{
struct fann_layer *layer_it, *last_layer, *prev_layer;
struct fann *ann;
struct fann_neuron *neuron_it, *last_neuron, *random_neuron, *bias_neuron;
#ifdef DEBUG
unsigned int prev_layer_size;
#endif
unsigned int num_neurons_in, num_neurons_out, i, j;
unsigned int min_connections, max_connections, num_connections;
unsigned int connections_per_neuron, allocated_connections;
unsigned int random_number, found_connection;
#ifdef FIXEDFANN
unsigned int decimal_point;
unsigned int multiplier;
#endif
if(connection_rate > 1)
{
connection_rate = 1;
}
/* seed random */
#ifndef FANN_NO_SEED
fann_seed_rand();
#endif
/* allocate the general structure */
ann = fann_allocate_structure(num_layers);
if(ann == NULL)
{
fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
return NULL;
}
ann->connection_rate = connection_rate;
#ifdef FIXEDFANN
decimal_point = ann->decimal_point;
multiplier = ann->multiplier;
fann_update_stepwise(ann);
#endif
/* determine how many neurons there should be in each layer */
i = 0;
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
{
/* we do not allocate room here, but we make sure that
* last_neuron - first_neuron is the number of neurons */
layer_it->first_neuron = NULL;
layer_it->last_neuron = layer_it->first_neuron + layers[i++] + 1; /* +1 for bias */
ann->total_neurons += layer_it->last_neuron - layer_it->first_neuron;
}
ann->num_output = (ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron - 1;
ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
/* allocate room for the actual neurons */
fann_allocate_neurons(ann);
if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
{
fann_destroy(ann);
return NULL;
}
#ifdef DEBUG
printf("creating network with connection rate %f\n", connection_rate);
printf("input\n");
printf(" layer : %d neurons, 1 bias\n",
ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1);
#endif
num_neurons_in = ann->num_input;
for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
{
num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
/*�if all neurons in each layer should be connected to at least one neuron
* in the previous layer, and one neuron in the next layer.
* and the bias node should be connected to the all neurons in the next layer.
* Then this is the minimum amount of neurons */
min_connections = fann_max(num_neurons_in, num_neurons_out) + num_neurons_out;
max_connections = num_neurons_in * num_neurons_out; /* not calculating bias */
num_connections = fann_max(min_connections,
(unsigned int) (0.5 + (connection_rate * max_connections)) +
num_neurons_out);
connections_per_neuron = num_connections / num_neurons_out;
allocated_connections = 0;
/* Now split out the connections on the different neurons */
for(i = 0; i != num_neurons_out; i++)
{
layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
allocated_connections += connections_per_neuron;
layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
layer_it->first_neuron[i].activation_function = FANN_SIGMOID_STEPWISE;
#ifdef FIXEDFANN
layer_it->first_neuron[i].activation_steepness = ann->multiplier / 2;
#else
layer_it->first_neuron[i].activation_steepness = 0.5;
#endif
if(allocated_connections < (num_connections * (i + 1)) / num_neurons_out)
{
layer_it->first_neuron[i].last_con++;
allocated_connections++;
}
}
/* bias neuron also gets stuff */
layer_it->first_neuron[i].first_con = ann->total_connections + allocated_connections;
layer_it->first_neuron[i].last_con = ann->total_connections + allocated_connections;
ann->total_connections += num_connections;
/* used in the next run of the loop */
num_neurons_in = num_neurons_out;
}
fann_allocate_connections(ann);
if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
{
fann_destroy(ann);
return NULL;
}
if(connection_rate >= 1)
{
#ifdef DEBUG
prev_layer_size = ann->num_input + 1;
#endif
prev_layer = ann->first_layer;
last_layer = ann->last_layer;
for(layer_it = ann->first_layer + 1; layer_it != last_layer; layer_it++)
{
last_neuron = layer_it->last_neuron - 1;
for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
{
for(i = neuron_it->first_con; i != neuron_it->last_con; i++)
{
ann->weights[i] = (fann_type) fann_random_weight();
/* these connections are still initialized for fully connected networks, to allow
* operations to work, that are not optimized for fully connected networks.
*/
ann->connections[i] = prev_layer->first_neuron + (i - neuron_it->first_con);
}
}
#ifdef DEBUG
prev_layer_size = layer_it->last_neuron - layer_it->first_neuron;
#endif
prev_layer = layer_it;
#ifdef DEBUG
printf(" layer : %d neurons, 1 bias\n", prev_layer_size - 1);
#endif
}
}
else
{
/* make connections for a network, that are not fully connected */
/* generally, what we do is first to connect all the input
* neurons to a output neuron, respecting the number of
* available input neurons for each output neuron. Then
* we go through all the output neurons, and connect the
* rest of the connections to input neurons, that they are
* not allready connected to.
*/
/* All the connections are cleared by calloc, because we want to
* be able to see which connections are allready connected */
for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
{
num_neurons_out = layer_it->last_neuron - layer_it->first_neuron - 1;
num_neurons_in = (layer_it - 1)->last_neuron - (layer_it - 1)->first_neuron - 1;
/* first connect the bias neuron */
bias_neuron = (layer_it - 1)->last_neuron - 1;
last_neuron = layer_it->last_neuron - 1;
for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
{
ann->connections[neuron_it->first_con] = bias_neuron;
ann->weights[neuron_it->first_con] = (fann_type) fann_random_weight();
}
/* then connect all neurons in the input layer */
last_neuron = (layer_it - 1)->last_neuron - 1;
for(neuron_it = (layer_it - 1)->first_neuron; neuron_it != last_neuron; neuron_it++)
{
/* random neuron in the output layer that has space
* for more connections */
do
{
random_number = (int) (0.5 + fann_rand(0, num_neurons_out - 1));
random_neuron = layer_it->first_neuron + random_number;
/* checks the last space in the connections array for room */
}
while(ann->connections[random_neuron->last_con - 1]);
/* find an empty space in the connection array and connect */
for(i = random_neuron->first_con; i < random_neuron->last_con; i++)
{
if(ann->connections[i] == NULL)
{
ann->connections[i] = neuron_it;
ann->weights[i] = (fann_type) fann_random_weight();
break;
}
}
}
/* then connect the rest of the unconnected neurons */
last_neuron = layer_it->last_neuron - 1;
for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
{
/* find empty space in the connection array and connect */
for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
{
/* continue if allready connected */
if(ann->connections[i] != NULL)
continue;
do
{
found_connection = 0;
random_number = (int) (0.5 + fann_rand(0, num_neurons_in - 1));
random_neuron = (layer_it - 1)->first_neuron + random_number;
/* check to see if this connection is allready there */
for(j = neuron_it->first_con; j < i; j++)
{
if(random_neuron == ann->connections[j])
{
found_connection = 1;
break;
}
}
}
while(found_connection);
/* we have found a neuron that is not allready
* connected to us, connect it */
ann->connections[i] = random_neuron;
ann->weights[i] = (fann_type) fann_random_weight();
}
}
#ifdef DEBUG
printf(" layer : %d neurons, 1 bias\n", num_neurons_out);
#endif
}
/* TODO it would be nice to have the randomly created
* connections sorted for smoother memory access.
*/
}
#ifdef DEBUG
printf("output\n");
#endif
return ann;
}
FANN_EXTERNAL struct fann *FANN_API fann_create_shortcut(unsigned int num_layers, ...)
{
struct fann *ann;
int i;
va_list layer_sizes;
unsigned int *layers = (unsigned int *) calloc(num_layers, sizeof(unsigned int));
if(layers == NULL)
{
fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
return NULL;
}
va_start(layer_sizes, num_layers);
for(i = 0; i < (int) num_layers; i++)
{
layers[i] = va_arg(layer_sizes, unsigned int);
}
va_end(layer_sizes);
ann = fann_create_shortcut_array(num_layers, layers);
free(layers);
return ann;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -