📄 fann_cpp.h
字号:
training or testing, and can therefore sometimes be a bit off if the weights
have been changed since the last calculation of the value.
See also:
<test_data>, <fann_get_MSE>
This function appears in FANN >= 1.1.0.
*/
float get_MSE()
{
float mse = 0.0f;
if (ann != NULL)
{
mse = fann_get_MSE(ann);
}
return mse;
}
/* Method: reset_MSE
Resets the mean square error from the network.
This function also resets the number of bits that fail.
See also:
<get_MSE>, <get_bit_fail_limit>, <fann_reset_MSE>
This function appears in FANN >= 1.1.0
*/
void reset_MSE()
{
if (ann != NULL)
{
fann_reset_MSE(ann);
}
}
/* Method: set_callback
Sets the callback function for use during training. The user_data is passed to
the callback. It can point to arbitrary data that the callback might require and
can be NULL if it is not used.
See <FANN::callback_type> for more information about the callback function.
The default callback function simply prints out some status information.
This function appears in FANN >= 2.0.0.
*/
void set_callback(callback_type callback, void *user_data)
{
if (ann != NULL)
{
// Allocated data is also deleted in the destroy method called by the destructor
user_context *user_instance = static_cast<user_context *>(fann_get_user_data(ann));
if (user_instance != NULL)
delete user_instance;
user_instance = new user_context();
user_instance->user_callback = callback;
user_instance->user_data = user_data;
user_instance->net = this;
fann_set_user_data(ann, user_instance);
if (callback != NULL)
fann_set_callback(ann, &FANN::neural_net::internal_callback);
else
fann_set_callback(ann, NULL);
}
}
/* Method: print_parameters
Prints all of the parameters and options of the neural network
See also:
<fann_print_parameters>
This function appears in FANN >= 1.2.0.
*/
void print_parameters()
{
if (ann != NULL)
{
fann_print_parameters(ann);
}
}
/* Method: get_training_algorithm
Return the training algorithm as described by <FANN::training_algorithm_enum>.
This training algorithm is used by <train_on_data> and associated functions.
Note that this algorithm is also used during <cascadetrain_on_data>, although only
FANN::TRAIN_RPROP and FANN::TRAIN_QUICKPROP is allowed during cascade training.
The default training algorithm is FANN::TRAIN_RPROP.
See also:
<set_training_algorithm>, <FANN::training_algorithm_enum>,
<fann_get_training_algorithm>
This function appears in FANN >= 1.0.0.
*/
training_algorithm_enum get_training_algorithm()
{
fann_train_enum training_algorithm = FANN_TRAIN_INCREMENTAL;
if (ann != NULL)
{
training_algorithm = fann_get_training_algorithm(ann);
}
return static_cast<training_algorithm_enum>(training_algorithm);
}
/* Method: set_training_algorithm
Set the training algorithm.
More info available in <get_training_algorithm>
This function appears in FANN >= 1.0.0.
*/
void set_training_algorithm(training_algorithm_enum training_algorithm)
{
if (ann != NULL)
{
fann_set_training_algorithm(ann,
static_cast<fann_train_enum>(training_algorithm));
}
}
/* Method: get_learning_rate
Return the learning rate.
The learning rate is used to determine how aggressive training should be for some of the
training algorithms (FANN::TRAIN_INCREMENTAL, FANN::TRAIN_BATCH, FANN::TRAIN_QUICKPROP).
Do however note that it is not used in FANN::TRAIN_RPROP.
The default learning rate is 0.7.
See also:
<set_learning_rate>, <set_training_algorithm>,
<fann_get_learning_rate>
This function appears in FANN >= 1.0.0.
*/
float get_learning_rate()
{
float learning_rate = 0.0f;
if (ann != NULL)
{
learning_rate = fann_get_learning_rate(ann);
}
return learning_rate;
}
/* Method: set_learning_rate
Set the learning rate.
More info available in <get_learning_rate>
This function appears in FANN >= 1.0.0.
*/
void set_learning_rate(float learning_rate)
{
if (ann != NULL)
{
fann_set_learning_rate(ann, learning_rate);
}
}
/*************************************************************************************************************/
/* Method: get_activation_function
Get the activation function for neuron number *neuron* in layer number *layer*,
counting the input layer as layer 0.
It is not possible to get activation functions for the neurons in the input layer.
Information about the individual activation functions is available at <FANN::activation_function_enum>.
Returns:
The activation function for the neuron or -1 if the neuron is not defined in the neural network.
See also:
<set_activation_function_layer>, <set_activation_function_hidden>,
<set_activation_function_output>, <set_activation_steepness>,
<set_activation_function>, <fann_get_activation_function>
This function appears in FANN >= 2.1.0
*/
activation_function_enum get_activation_function(int layer, int neuron)
{
unsigned int activation_function = 0;
if (ann != NULL)
{
activation_function = fann_get_activation_function(ann, layer, neuron);
}
return static_cast<activation_function_enum>(activation_function);
}
/* Method: set_activation_function
Set the activation function for neuron number *neuron* in layer number *layer*,
counting the input layer as layer 0.
It is not possible to set activation functions for the neurons in the input layer.
When choosing an activation function it is important to note that the activation
functions have different range. FANN::SIGMOID is e.g. in the 0 - 1 range while
FANN::SIGMOID_SYMMETRIC is in the -1 - 1 range and FANN::LINEAR is unbound.
Information about the individual activation functions is available at <FANN::activation_function_enum>.
The default activation function is FANN::SIGMOID_STEPWISE.
See also:
<set_activation_function_layer>, <set_activation_function_hidden>,
<set_activation_function_output>, <set_activation_steepness>,
<get_activation_function>, <fann_set_activation_function>
This function appears in FANN >= 2.0.0.
*/
void set_activation_function(activation_function_enum activation_function, int layer, int neuron)
{
if (ann != NULL)
{
fann_set_activation_function(ann,
static_cast<fann_activationfunc_enum>(activation_function), layer, neuron);
}
}
/* Method: set_activation_function_layer
Set the activation function for all the neurons in the layer number *layer*,
counting the input layer as layer 0.
It is not possible to set activation functions for the neurons in the input layer.
See also:
<set_activation_function>, <set_activation_function_hidden>,
<set_activation_function_output>, <set_activation_steepness_layer>,
<fann_set_activation_function_layer>
This function appears in FANN >= 2.0.0.
*/
void set_activation_function_layer(activation_function_enum activation_function, int layer)
{
if (ann != NULL)
{
fann_set_activation_function_layer(ann,
static_cast<fann_activationfunc_enum>(activation_function), layer);
}
}
/* Method: set_activation_function_hidden
Set the activation function for all of the hidden layers.
See also:
<set_activation_function>, <set_activation_function_layer>,
<set_activation_function_output>, <set_activation_steepness_hidden>,
<fann_set_activation_function_hidden>
This function appears in FANN >= 1.0.0.
*/
void set_activation_function_hidden(activation_function_enum activation_function)
{
if (ann != NULL)
{
fann_set_activation_function_hidden(ann,
static_cast<fann_activationfunc_enum>(activation_function));
}
}
/* Method: set_activation_function_output
Set the activation function for the output layer.
See also:
<set_activation_function>, <set_activation_function_layer>,
<set_activation_function_hidden>, <set_activation_steepness_output>,
<fann_set_activation_function_output>
This function appears in FANN >= 1.0.0.
*/
void set_activation_function_output(activation_function_enum activation_function)
{
if (ann != NULL)
{
fann_set_activation_function_output(ann,
static_cast<fann_activationfunc_enum>(activation_function));
}
}
/* Method: get_activation_steepness
Get the activation steepness for neuron number *neuron* in layer number *layer*,
counting the input layer as layer 0.
It is not possible to get activation steepness for the neurons in the input layer.
The steepness of an activation function says something about how fast the activation function
goes from the minimum to the maximum. A high value for the activation function will also
give a more agressive training.
When training neural networks where the output values should be at the extremes (usually 0 and 1,
depending on the activation function), a steep activation function can be used (e.g. 1.0).
The default activation steepness is 0.5.
Returns:
The activation steepness for the neuron or -1 if the neuron is not defined in the neural network.
See also:
<set_activation_steepness_layer>, <set_activation_steepness_hidden>,
<set_activation_steepness_output>, <set_activation_function>,
<set_activation_steepness>, <fann_get_activation_steepness>
This function appears in FANN >= 2.1.0
*/
fann_type get_activation_steepness(int layer, int neuron)
{
fann_type activation_steepness = 0;
if (ann != NULL)
{
activation_steepness = fann_get_activation_steepness(ann, layer, neuron);
}
return activation_steepness;
}
/* Method: set_activation_steepness
Set the activation steepness for neuron number *neuron* in layer number *layer*,
counting the input layer as layer 0.
It is not possible to set activation steepness for the neurons in the input layer.
The steepness of an activation function says something about how fast the activation function
goes from the minimum to the maximum. A high value for the activation function will also
give a more agress
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -