📄 fann_io.c
字号:
{
#else
if(strncmp(read_version, "FANN_FLO_1.1\n", strlen("FANN_FLO_1.1\n")) == 0)
{
#endif
free(read_version);
return fann_create_from_fd_1_1(conf, configuration_file);
}
#ifndef FIXEDFANN
/* Maintain compatibility with 2.0 version that doesnt have scale parameters. */
if(strncmp(read_version, "FANN_FLO_2.0\n", strlen("FANN_FLO_2.0\n")) != 0 &&
strncmp(read_version, "FANN_FLO_2.1\n", strlen("FANN_FLO_2.1\n")) != 0)
#else
if(strncmp(read_version, "FANN_FIX_2.0\n", strlen("FANN_FIX_2.0\n")) != 0 &&
strncmp(read_version, "FANN_FIX_2.1\n", strlen("FANN_FIX_2.1\n")) != 0)
#endif
{
free(read_version);
fann_error(NULL, FANN_E_WRONG_CONFIG_VERSION, configuration_file);
return NULL;
}
}
free(read_version);
#ifdef FIXEDFANN
fann_scanf("%u", "decimal_point", &decimal_point);
multiplier = 1 << decimal_point;
#endif
fann_scanf("%u", "num_layers", &num_layers);
ann = fann_allocate_structure(num_layers);
if(ann == NULL)
{
return NULL;
}
fann_scanf("%f", "learning_rate", &ann->learning_rate);
fann_scanf("%f", "connection_rate", &ann->connection_rate);
fann_scanf("%u", "network_type", (unsigned int *)&ann->network_type);
fann_scanf("%f", "learning_momentum", &ann->learning_momentum);
fann_scanf("%u", "training_algorithm", (unsigned int *)&ann->training_algorithm);
fann_scanf("%u", "train_error_function", (unsigned int *)&ann->train_error_function);
fann_scanf("%u", "train_stop_function", (unsigned int *)&ann->train_stop_function);
fann_scanf("%f", "cascade_output_change_fraction", &ann->cascade_output_change_fraction);
fann_scanf("%f", "quickprop_decay", &ann->quickprop_decay);
fann_scanf("%f", "quickprop_mu", &ann->quickprop_mu);
fann_scanf("%f", "rprop_increase_factor", &ann->rprop_increase_factor);
fann_scanf("%f", "rprop_decrease_factor", &ann->rprop_decrease_factor);
fann_scanf("%f", "rprop_delta_min", &ann->rprop_delta_min);
fann_scanf("%f", "rprop_delta_max", &ann->rprop_delta_max);
fann_scanf("%f", "rprop_delta_zero", &ann->rprop_delta_zero);
fann_scanf("%u", "cascade_output_stagnation_epochs", &ann->cascade_output_stagnation_epochs);
fann_scanf("%f", "cascade_candidate_change_fraction", &ann->cascade_candidate_change_fraction);
fann_scanf("%u", "cascade_candidate_stagnation_epochs", &ann->cascade_candidate_stagnation_epochs);
fann_scanf("%u", "cascade_max_out_epochs", &ann->cascade_max_out_epochs);
fann_scanf("%u", "cascade_max_cand_epochs", &ann->cascade_max_cand_epochs);
fann_scanf("%u", "cascade_num_candidate_groups", &ann->cascade_num_candidate_groups);
fann_scanf(FANNSCANF, "bit_fail_limit", &ann->bit_fail_limit);
fann_scanf(FANNSCANF, "cascade_candidate_limit", &ann->cascade_candidate_limit);
fann_scanf(FANNSCANF, "cascade_weight_multiplier", &ann->cascade_weight_multiplier);
fann_scanf("%u", "cascade_activation_functions_count", &ann->cascade_activation_functions_count);
/* reallocate mem */
ann->cascade_activation_functions =
(enum fann_activationfunc_enum *)realloc(ann->cascade_activation_functions,
ann->cascade_activation_functions_count * sizeof(enum fann_activationfunc_enum));
if(ann->cascade_activation_functions == NULL)
{
fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
fann_destroy(ann);
return NULL;
}
fscanf(conf, "cascade_activation_functions=");
for(i = 0; i < ann->cascade_activation_functions_count; i++)
fscanf(conf, "%u ", (unsigned int *)&ann->cascade_activation_functions[i]);
fann_scanf("%u", "cascade_activation_steepnesses_count", &ann->cascade_activation_steepnesses_count);
/* reallocate mem */
ann->cascade_activation_steepnesses =
(fann_type *)realloc(ann->cascade_activation_steepnesses,
ann->cascade_activation_steepnesses_count * sizeof(fann_type));
if(ann->cascade_activation_steepnesses == NULL)
{
fann_error((struct fann_error*)ann, FANN_E_CANT_ALLOCATE_MEM);
fann_destroy(ann);
return NULL;
}
fscanf(conf, "cascade_activation_steepnesses=");
for(i = 0; i < ann->cascade_activation_steepnesses_count; i++)
fscanf(conf, FANNSCANF" ", &ann->cascade_activation_steepnesses[i]);
#ifdef FIXEDFANN
ann->decimal_point = decimal_point;
ann->multiplier = multiplier;
#endif
#ifdef FIXEDFANN
fann_update_stepwise(ann);
#endif
#ifdef DEBUG
printf("creating network with %d layers\n", num_layers);
printf("input\n");
#endif
fscanf(conf, "layer_sizes=");
/* determine how many neurons there should be in each layer */
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
{
if(fscanf(conf, "%u ", &layer_size) != 1)
{
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, "layer_sizes", configuration_file);
fann_destroy(ann);
return NULL;
}
/* we do not allocate room here, but we make sure that
* last_neuron - first_neuron is the number of neurons */
layer_it->first_neuron = NULL;
layer_it->last_neuron = layer_it->first_neuron + layer_size;
ann->total_neurons += layer_size;
#ifdef DEBUG
if(ann->network_type == FANN_NETTYPE_SHORTCUT && layer_it != ann->first_layer)
{
printf(" layer : %d neurons, 0 bias\n", layer_size);
}
else
{
printf(" layer : %d neurons, 1 bias\n", layer_size - 1);
}
#endif
}
ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
if(ann->network_type == FANN_NETTYPE_LAYER)
{
/* one too many (bias) in the output layer */
ann->num_output--;
}
#ifndef FIXEDFANN
#define SCALE_LOAD( what, where ) \
fscanf( conf, #what "_" #where "=" ); \
for(i = 0; i < ann->num_##where##put; i++) \
{ \
if(fscanf( conf, "%f ", (float *)&ann->what##_##where[ i ] ) != 1) \
{ \
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONFIG, #what "_" #where, configuration_file); \
fann_destroy(ann); \
return NULL; \
} \
}
if(fscanf(conf, "scale_included=%u\n", &scale_included) == 1 && scale_included == 1)
{
fann_allocate_scale(ann);
SCALE_LOAD( scale_mean, in )
SCALE_LOAD( scale_deviation, in )
SCALE_LOAD( scale_new_min, in )
SCALE_LOAD( scale_factor, in )
SCALE_LOAD( scale_mean, out )
SCALE_LOAD( scale_deviation, out )
SCALE_LOAD( scale_new_min, out )
SCALE_LOAD( scale_factor, out )
}
#undef SCALE_LOAD
#endif
/* allocate room for the actual neurons */
fann_allocate_neurons(ann);
if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
{
fann_destroy(ann);
return NULL;
}
last_neuron = (ann->last_layer - 1)->last_neuron;
fscanf(conf, "neurons (num_inputs, activation_function, activation_steepness)=");
for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
{
if(fscanf
(conf, "(%u, %u, " FANNSCANF ") ", &num_connections, (unsigned int *)&neuron_it->activation_function,
&neuron_it->activation_steepness) != 3)
{
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
fann_destroy(ann);
return NULL;
}
neuron_it->first_con = ann->total_connections;
ann->total_connections += num_connections;
neuron_it->last_con = ann->total_connections;
}
fann_allocate_connections(ann);
if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
{
fann_destroy(ann);
return NULL;
}
connected_neurons = ann->connections;
weights = ann->weights;
first_neuron = ann->first_layer->first_neuron;
fscanf(conf, "connections (connected_to_neuron, weight)=");
for(i = 0; i < ann->total_connections; i++)
{
if(fscanf(conf, "(%u, " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
{
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
fann_destroy(ann);
return NULL;
}
connected_neurons[i] = first_neuron + input_neuron;
}
#ifdef DEBUG
printf("output\n");
#endif
return ann;
}
/* INTERNAL FUNCTION
Create a network from a configuration file descriptor. (backward compatible read of version 1.1 files)
*/
struct fann *fann_create_from_fd_1_1(FILE * conf, const char *configuration_file)
{
unsigned int num_layers, layer_size, input_neuron, i, network_type, num_connections;
unsigned int activation_function_hidden, activation_function_output;
#ifdef FIXEDFANN
unsigned int decimal_point, multiplier;
#endif
fann_type activation_steepness_hidden, activation_steepness_output;
float learning_rate, connection_rate;
struct fann_neuron *first_neuron, *neuron_it, *last_neuron, **connected_neurons;
fann_type *weights;
struct fann_layer *layer_it;
struct fann *ann;
#ifdef FIXEDFANN
if(fscanf(conf, "%u\n", &decimal_point) != 1)
{
fann_error(NULL, FANN_E_CANT_READ_CONFIG, "decimal_point", configuration_file);
return NULL;
}
multiplier = 1 << decimal_point;
#endif
if(fscanf(conf, "%u %f %f %u %u %u " FANNSCANF " " FANNSCANF "\n", &num_layers, &learning_rate,
&connection_rate, &network_type, &activation_function_hidden,
&activation_function_output, &activation_steepness_hidden,
&activation_steepness_output) != 8)
{
fann_error(NULL, FANN_E_CANT_READ_CONFIG, "parameters", configuration_file);
return NULL;
}
ann = fann_allocate_structure(num_layers);
if(ann == NULL)
{
return NULL;
}
ann->connection_rate = connection_rate;
ann->network_type = (enum fann_nettype_enum)network_type;
ann->learning_rate = learning_rate;
#ifdef FIXEDFANN
ann->decimal_point = decimal_point;
ann->multiplier = multiplier;
#endif
#ifdef FIXEDFANN
fann_update_stepwise(ann);
#endif
#ifdef DEBUG
printf("creating network with learning rate %f\n", learning_rate);
printf("input\n");
#endif
/* determine how many neurons there should be in each layer */
for(layer_it = ann->first_layer; layer_it != ann->last_layer; layer_it++)
{
if(fscanf(conf, "%u ", &layer_size) != 1)
{
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
fann_destroy(ann);
return NULL;
}
/* we do not allocate room here, but we make sure that
* last_neuron - first_neuron is the number of neurons */
layer_it->first_neuron = NULL;
layer_it->last_neuron = layer_it->first_neuron + layer_size;
ann->total_neurons += layer_size;
#ifdef DEBUG
if(ann->network_type == FANN_NETTYPE_SHORTCUT && layer_it != ann->first_layer)
{
printf(" layer : %d neurons, 0 bias\n", layer_size);
}
else
{
printf(" layer : %d neurons, 1 bias\n", layer_size - 1);
}
#endif
}
ann->num_input = ann->first_layer->last_neuron - ann->first_layer->first_neuron - 1;
ann->num_output = ((ann->last_layer - 1)->last_neuron - (ann->last_layer - 1)->first_neuron);
if(ann->network_type == FANN_NETTYPE_LAYER)
{
/* one too many (bias) in the output layer */
ann->num_output--;
}
/* allocate room for the actual neurons */
fann_allocate_neurons(ann);
if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
{
fann_destroy(ann);
return NULL;
}
last_neuron = (ann->last_layer - 1)->last_neuron;
for(neuron_it = ann->first_layer->first_neuron; neuron_it != last_neuron; neuron_it++)
{
if(fscanf(conf, "%u ", &num_connections) != 1)
{
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_NEURON, configuration_file);
fann_destroy(ann);
return NULL;
}
neuron_it->first_con = ann->total_connections;
ann->total_connections += num_connections;
neuron_it->last_con = ann->total_connections;
}
fann_allocate_connections(ann);
if(ann->errno_f == FANN_E_CANT_ALLOCATE_MEM)
{
fann_destroy(ann);
return NULL;
}
connected_neurons = ann->connections;
weights = ann->weights;
first_neuron = ann->first_layer->first_neuron;
for(i = 0; i < ann->total_connections; i++)
{
if(fscanf(conf, "(%u " FANNSCANF ") ", &input_neuron, &weights[i]) != 2)
{
fann_error((struct fann_error *) ann, FANN_E_CANT_READ_CONNECTIONS, configuration_file);
fann_destroy(ann);
return NULL;
}
connected_neurons[i] = first_neuron + input_neuron;
}
fann_set_activation_steepness_hidden(ann, activation_steepness_hidden);
fann_set_activation_steepness_output(ann, activation_steepness_output);
fann_set_activation_function_hidden(ann, (enum fann_activationfunc_enum)activation_function_hidden);
fann_set_activation_function_output(ann, (enum fann_activationfunc_enum)activation_function_output);
#ifdef DEBUG
printf("output\n");
#endif
return ann;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -