📄 fann.c
字号:
FANN_EXTERNAL void FANN_API fann_destroy(struct fann *ann)
{
if(ann == NULL)
return;
fann_safe_free(ann->weights);
fann_safe_free(ann->connections);
fann_safe_free(ann->first_layer->first_neuron);
fann_safe_free(ann->first_layer);
fann_safe_free(ann->output);
fann_safe_free(ann->train_errors);
fann_safe_free(ann->train_slopes);
fann_safe_free(ann->prev_train_slopes);
fann_safe_free(ann->prev_steps);
fann_safe_free(ann->prev_weights_deltas);
fann_safe_free(ann->errstr);
fann_safe_free(ann->cascade_activation_functions);
fann_safe_free(ann->cascade_activation_steepnesses);
#ifndef FIXEDFANN
fann_safe_free( ann->scale_mean_in );
fann_safe_free( ann->scale_deviation_in );
fann_safe_free( ann->scale_new_min_in );
fann_safe_free( ann->scale_factor_in );
fann_safe_free( ann->scale_mean_out );
fann_safe_free( ann->scale_deviation_out );
fann_safe_free( ann->scale_new_min_out );
fann_safe_free( ann->scale_factor_out );
#endif
fann_safe_free(ann);
}
FANN_EXTERNAL void FANN_API fann_randomize_weights(struct fann *ann, fann_type min_weight,
fann_type max_weight)
{
fann_type *last_weight;
fann_type *weights = ann->weights;
last_weight = weights + ann->total_connections;
for(; weights != last_weight; weights++)
{
*weights = (fann_type) (fann_rand(min_weight, max_weight));
}
#ifndef FIXEDFANN
if(ann->prev_train_slopes != NULL)
{
fann_clear_train_arrays(ann);
}
#endif
}
FANN_EXTERNAL void FANN_API fann_print_connections(struct fann *ann)
{
struct fann_layer *layer_it;
struct fann_neuron *neuron_it;
unsigned int i;
int value;
char *neurons;
unsigned int num_neurons = fann_get_total_neurons(ann) - fann_get_num_output(ann);
neurons = (char *) malloc(num_neurons + 1);
if(neurons == NULL)
{
fann_error(NULL, FANN_E_CANT_ALLOCATE_MEM);
return;
}
neurons[num_neurons] = 0;
printf("Layer / Neuron ");
for(i = 0; i < num_neurons; i++)
{
printf("%d", i % 10);
}
printf("\n");
for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
{
for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++)
{
memset(neurons, (int) '.', num_neurons);
for(i = neuron_it->first_con; i < neuron_it->last_con; i++)
{
if(ann->weights[i] < 0)
{
#ifdef FIXEDFANN
value = (int) ((ann->weights[i] / (double) ann->multiplier) - 0.5);
#else
value = (int) ((ann->weights[i]) - 0.5);
#endif
if(value < -25)
value = -25;
neurons[ann->connections[i] - ann->first_layer->first_neuron] = (char)('a' - value);
}
else
{
#ifdef FIXEDFANN
value = (int) ((ann->weights[i] / (double) ann->multiplier) + 0.5);
#else
value = (int) ((ann->weights[i]) + 0.5);
#endif
if(value > 25)
value = 25;
neurons[ann->connections[i] - ann->first_layer->first_neuron] = (char)('A' + value);
}
}
printf("L %3d / N %4d %s\n", layer_it - ann->first_layer,
neuron_it - ann->first_layer->first_neuron, neurons);
}
}
free(neurons);
}
/* Initialize the weights using Widrow + Nguyen's algorithm.
*/
FANN_EXTERNAL void FANN_API fann_init_weights(struct fann *ann, struct fann_train_data *train_data)
{
fann_type smallest_inp, largest_inp;
unsigned int dat = 0, elem, num_connect, num_hidden_neurons;
struct fann_layer *layer_it;
struct fann_neuron *neuron_it, *last_neuron, *bias_neuron;
#ifdef FIXEDFANN
unsigned int multiplier = ann->multiplier;
#endif
float scale_factor;
for(smallest_inp = largest_inp = train_data->input[0][0]; dat < train_data->num_data; dat++)
{
for(elem = 0; elem < train_data->num_input; elem++)
{
if(train_data->input[dat][elem] < smallest_inp)
smallest_inp = train_data->input[dat][elem];
if(train_data->input[dat][elem] > largest_inp)
largest_inp = train_data->input[dat][elem];
}
}
num_hidden_neurons =
ann->total_neurons - (ann->num_input + ann->num_output +
(ann->last_layer - ann->first_layer));
scale_factor =
(float) (pow
((double) (0.7f * (double) num_hidden_neurons),
(double) (1.0f / (double) ann->num_input)) / (double) (largest_inp -
smallest_inp));
#ifdef DEBUG
printf("Initializing weights with scale factor %f\n", scale_factor);
#endif
bias_neuron = ann->first_layer->last_neuron - 1;
for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer; layer_it++)
{
last_neuron = layer_it->last_neuron;
if(ann->network_type == FANN_NETTYPE_LAYER)
{
bias_neuron = (layer_it - 1)->last_neuron - 1;
}
for(neuron_it = layer_it->first_neuron; neuron_it != last_neuron; neuron_it++)
{
for(num_connect = neuron_it->first_con; num_connect < neuron_it->last_con;
num_connect++)
{
if(bias_neuron == ann->connections[num_connect])
{
#ifdef FIXEDFANN
ann->weights[num_connect] =
(fann_type) fann_rand(-scale_factor, scale_factor * multiplier);
#else
ann->weights[num_connect] = (fann_type) fann_rand(-scale_factor, scale_factor);
#endif
}
else
{
#ifdef FIXEDFANN
ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor * multiplier);
#else
ann->weights[num_connect] = (fann_type) fann_rand(0, scale_factor);
#endif
}
}
}
}
#ifndef FIXEDFANN
if(ann->prev_train_slopes != NULL)
{
fann_clear_train_arrays(ann);
}
#endif
}
FANN_EXTERNAL void FANN_API fann_print_parameters(struct fann *ann)
{
struct fann_layer *layer_it;
#ifndef FIXEDFANN
unsigned int i;
#endif
printf("Input layer :%4d neurons, 1 bias\n", ann->num_input);
for(layer_it = ann->first_layer + 1; layer_it != ann->last_layer - 1; layer_it++)
{
if(ann->network_type == FANN_NETTYPE_SHORTCUT)
{
printf(" Hidden layer :%4d neurons, 0 bias\n",
layer_it->last_neuron - layer_it->first_neuron);
}
else
{
printf(" Hidden layer :%4d neurons, 1 bias\n",
layer_it->last_neuron - layer_it->first_neuron - 1);
}
}
printf("Output layer :%4d neurons\n", ann->num_output);
printf("Total neurons and biases :%4d\n", fann_get_total_neurons(ann));
printf("Total connections :%4d\n", ann->total_connections);
printf("Connection rate :%8.3f\n", ann->connection_rate);
printf("Network type : %s\n", FANN_NETTYPE_NAMES[ann->network_type]);
#ifdef FIXEDFANN
printf("Decimal point :%4d\n", ann->decimal_point);
printf("Multiplier :%4d\n", ann->multiplier);
#else
printf("Training algorithm : %s\n", FANN_TRAIN_NAMES[ann->training_algorithm]);
printf("Training error function : %s\n", FANN_ERRORFUNC_NAMES[ann->train_error_function]);
printf("Training stop function : %s\n", FANN_STOPFUNC_NAMES[ann->train_stop_function]);
#endif
#ifdef FIXEDFANN
printf("Bit fail limit :%4d\n", ann->bit_fail_limit);
#else
printf("Bit fail limit :%8.3f\n", ann->bit_fail_limit);
printf("Learning rate :%8.3f\n", ann->learning_rate);
printf("Learning momentum :%8.3f\n", ann->learning_momentum);
printf("Quickprop decay :%11.6f\n", ann->quickprop_decay);
printf("Quickprop mu :%8.3f\n", ann->quickprop_mu);
printf("RPROP increase factor :%8.3f\n", ann->rprop_increase_factor);
printf("RPROP decrease factor :%8.3f\n", ann->rprop_decrease_factor);
printf("RPROP delta min :%8.3f\n", ann->rprop_delta_min);
printf("RPROP delta max :%8.3f\n", ann->rprop_delta_max);
printf("Cascade output change fraction :%11.6f\n", ann->cascade_output_change_fraction);
printf("Cascade candidate change fraction :%11.6f\n", ann->cascade_candidate_change_fraction);
printf("Cascade output stagnation epochs :%4d\n", ann->cascade_output_stagnation_epochs);
printf("Cascade candidate stagnation epochs :%4d\n", ann->cascade_candidate_stagnation_epochs);
printf("Cascade max output epochs :%4d\n", ann->cascade_max_out_epochs);
printf("Cascade max candidate epochs :%4d\n", ann->cascade_max_cand_epochs);
printf("Cascade weight multiplier :%8.3f\n", ann->cascade_weight_multiplier);
printf("Cascade candidate limit :%8.3f\n", ann->cascade_candidate_limit);
for(i = 0; i < ann->cascade_activation_functions_count; i++)
printf("Cascade activation functions[%d] : %s\n", i,
FANN_ACTIVATIONFUNC_NAMES[ann->cascade_activation_functions[i]]);
for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
printf("Cascade activation steepnesses[%d] :%8.3f\n", i,
ann->cascade_activation_steepnesses[i]);
printf("Cascade candidate groups :%4d\n", ann->cascade_num_candidate_groups);
printf("Cascade no. of candidates :%4d\n", fann_get_cascade_num_candidates(ann));
/* TODO: dump scale parameters */
#endif
}
FANN_GET(unsigned int, num_input)
FANN_GET(unsigned int, num_output)
FANN_EXTERNAL unsigned int FANN_API fann_get_total_neurons(struct fann *ann)
{
if(ann->network_type)
{
return ann->total_neurons;
}
else
{
/* -1, because there is always an unused bias neuron in the last layer */
return ann->total_neurons - 1;
}
}
FANN_GET(unsigned int, total_connections)
FANN_EXTERNAL enum fann_nettype_enum FANN_API fann_get_network_type(struct fann *ann)
{
/* Currently two types: LAYER = 0, SHORTCUT = 1 */
/* Enum network_types must be set to match the return values */
return ann->network_type;
}
FANN_EXTERNAL float FANN_API fann_get_connection_rate(struct fann *ann)
{
return ann->connection_rate;
}
FANN_EXTERNAL unsigned int FANN_API fann_get_num_layers(struct fann *ann)
{
return ann->last_layer - ann->first_layer;
}
FANN_EXTERNAL void FANN_API fann_get_layer_array(struct fann *ann, unsigned int *layers)
{
struct fann_layer *layer_it;
for (layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++) {
unsigned int count = layer_it->last_neuron - layer_it->first_neuron;
/* Remove the bias from the count of neurons. */
switch (fann_get_network_type(ann)) {
case FANN_NETTYPE_LAYER: {
--count;
break;
}
case FANN_NETTYPE_SHORTCUT: {
/* The bias in the first layer is reused for all layers */
if (layer_it == ann->first_layer)
--count;
break;
}
default: {
/* Unknown network type, assume no bias present */
break;
}
}
*layers++ = count;
}
}
FANN_EXTERNAL void FANN_API fann_get_bias_array(struct fann *ann, unsigned int *bias)
{
struct fann_layer *layer_it;
for (layer_it = ann->first_layer; layer_it != ann->last_layer; ++layer_it, ++bias) {
switch (fann_get_network_type(ann)) {
case FANN_NETTYPE_LAYER: {
/* Report one bias in each layer except the last */
if (layer_it != ann->last_layer-1)
*bias = 1;
else
*bias = 0;
break;
}
case FANN_NETTYPE_SHORTCUT: {
/* The bias in the first layer is reused for all layers */
if (layer_it == ann->first_layer)
*bias = 1;
else
*bias = 0;
break;
}
default: {
/* Unknown network type, assume no bias present */
*bias = 0;
break;
}
}
}
}
FANN_EXTERNAL void FANN_API fann_get_connection_array(struct fann *ann, struct fann_connection *connections)
{
struct fann_neuron *first_neuron;
struct fann_layer *layer_it;
struct fann_neuron *neuron_it;
unsigned int index;
unsigned int source_index;
unsigned int destination_index;
first_neuron = ann->first_layer->first_neuron;
source_index = 0;
destination_index = 0;
/* The following assumes that the last unused bias has no connections */
/* for each layer */
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++){
/* for each neuron */
for(neuron_it = layer_it->first_neuron; neuron_it != layer_it->last_neuron; neuron_it++){
/* for each connection */
for (index = neuron_it->first_con; index < neuron_it->last_con; index++){
/* Assign the source, destination and weight */
connections->from_neuron = ann->connections[source_index] - first_neuron;
connections->to_neuron = destination_index;
connections->weight = ann->weights[source_index];
connections++;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -