⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 fann_cpp.h

📁 改进的人工神经网络源代码
💻 H
📖 第 1 页 / 共 3 页
字号:
        /** Set the steepness of the sigmoid function used in the hidden layers.
        Only usefull if sigmoid function is used in the hidden layers (default 0.5). */
        void set_activation_steepness_hidden(fann_type steepness)
        {
            if (ann != NULL)
            {
                fann_set_activation_steepness_hidden(ann, steepness);
            }
        }

        /** Get the steepness parameter for the sigmoid function used in the output layer. */
        fann_type get_activation_steepness_output()
        {
            fann_type activation_steepness = 0;
            if (ann != NULL)
            {
                activation_steepness = fann_get_activation_steepness_output(ann);
            }
            return activation_steepness;
        }

        /** Set the steepness of the sigmoid function used in the output layer.
        Only useful if sigmoid function is used in the output layer (default 0.5). */
        void set_activation_steepness_output(fann_type steepness)
        {
            if (ann != NULL)
            {
                fann_set_activation_steepness_output(ann, steepness);
            }
        }

        /** Get the error function used during training. (default FANN::ERRORFUNC_TANH) */
        void set_train_error_function(error_function_enum train_error_function)
        {
            if (ann != NULL)
            {
                fann_set_train_error_function(ann, train_error_function);
            }
        }

        /** Get the error function used during training. */
        error_function_enum get_train_error_function()
        {
            unsigned int train_error_function = 0;
            if (ann != NULL)
            {
                train_error_function = fann_get_train_error_function(ann);
            }
            return (error_function_enum)train_error_function;
        }

        /** Decay is used to make the weights do not go so high (default -0.0001). */
        float get_quickprop_decay()
        {
            float quickprop_decay = 0.0f;
            if (ann != NULL)
            {
                quickprop_decay = fann_get_quickprop_decay(ann);
            }
            return quickprop_decay;
        }

        /** Decay is used to make the weights do not go so high (default -0.0001). */
        void set_quickprop_decay(float quickprop_decay)
        {
            if (ann != NULL)
            {
                fann_set_quickprop_decay(ann, quickprop_decay);
            }
        }

        /** Mu is a factor used to increase and decrease the stepsize (default 1.75). */
        float get_quickprop_mu()
        {
            float quickprop_mu = 0.0f;
            if (ann != NULL)
            {
                quickprop_mu = fann_get_quickprop_mu(ann);
            }
            return quickprop_mu;
        }

        /** Mu is a factor used to increase and decrease the stepsize (default 1.75). */
        void set_quickprop_mu(float quickprop_mu)
        {
            if (ann != NULL)
            {
                fann_set_quickprop_mu(ann, quickprop_mu);
            }
        }

        /** Tells how much the stepsize should increase during learning (default 1.2). */
        float get_rprop_increase_factor()
        {
            float factor = 0.0f;
            if (ann != NULL)
            {
                fann_get_rprop_increase_factor(ann);
            }
            return factor;
        }

        /** Tells how much the stepsize should increase during learning (default 1.2). */
        void set_rprop_increase_factor(float rprop_increase_factor)
        {
            if (ann != NULL)
            {
                fann_set_rprop_increase_factor(ann, rprop_increase_factor);
            }
        }

        /** Tells how much the stepsize should decrease during learning (default 0.5). */
        float get_rprop_decrease_factor()
        {
            float factor = 0.0f;
            if (ann != NULL)
            {
                factor = fann_get_rprop_decrease_factor(ann);
            }
            return factor;
        }

        /** Tells how much the stepsize should decrease during learning (default 0.5). */
        void set_rprop_decrease_factor(float rprop_decrease_factor)
        {
            if (ann != NULL)
            {
                fann_set_rprop_decrease_factor(ann, rprop_decrease_factor);
            }
        }

        /** The minimum stepsize (default 0.0). */
        float get_rprop_delta_min()
        {
            float delta = 0.0f;
            if (ann != NULL)
            {
                delta = fann_get_rprop_delta_min(ann);
            }
            return delta;
        }

        /** The minimum stepsize (default 0.0). */
        void set_rprop_delta_min(float rprop_delta_min)
        {
            if (ann != NULL)
            {
                fann_set_rprop_delta_min(ann, rprop_delta_min);
            }
        }

        /** The maximum stepsize (default 50.0). */
        float get_rprop_delta_max()
        {
            float delta = 0.0f;
            if (ann != NULL)
            {
                delta = fann_get_rprop_delta_max(ann);
            }
            return delta;
        }

        /** The maximum stepsize (default 50.0). */
        void set_rprop_delta_max(float rprop_delta_max)
        {
            if (ann != NULL)
            {
                fann_set_rprop_delta_max(ann, rprop_delta_max);
            }
        }

        /** Get the number of input neurons. */
        unsigned int get_num_input()
        {
            unsigned int num_input = 0;
            if (ann != NULL)
            {
                num_input = fann_get_num_input(ann);
            }
            return num_input;
        }

        /** Get the number of output neurons. */
        unsigned int get_num_output()
        {
            unsigned int num_output = 0;
            if (ann != NULL)
            {
                num_output = fann_get_num_output(ann);
            }
            return num_output;
        }

        /** Get the total number of neurons in the entire network.    */
        unsigned int get_total_neurons()
        {
            if (ann == NULL)
            {
                return 0;
            }
            return fann_get_total_neurons(ann);
        }

        /** Get the total number of connections in the entire network. */
        unsigned int get_total_connections()
        {
            if (ann == NULL)
            {
                return 0;
            }
            return fann_get_total_connections(ann);
        }

#ifdef FIXEDFANN
        /** Returns the position of the decimal point. */
        unsigned int get_decimal_point()
        {
            if (ann == NULL)
            {
                return 0;
            }
            return fann_get_decimal_point(ann);
        }

        /** Returns the multiplier that fix point data is multiplied with. */
        unsigned int get_multiplier()
        {
            if (ann == NULL)
            {
                return 0;
            }
            return fann_get_multiplier(ann);
        }
#endif /* FIXEDFANN */

        /*********************************************************************/

        /** Change where errors are logged to */
        static void set_error_log(struct fann_error *errdat, FILE *log_file)
        {
            fann_set_error_log(errdat, log_file);
        }

        /** Returns the last error number */
        static unsigned int get_errno(struct fann_error *errdat)
        {
            return fann_get_errno(errdat);
        }

        /** Resets the last error number */
        static void reset_errno(struct fann_error *errdat)
        {
            fann_reset_errno(errdat);
        }

        /** Resets the last error string */
        static void reset_errstr(struct fann_error *errdat)
        {
            fann_reset_errstr(errdat);
        }

        /** Returns the last errstr.
        This function calls fann_reset_errno and fann_reset_errstr */
        static char * get_errstr(struct fann_error *errdat)
        {
            return fann_get_errstr(errdat);
        }

        /** Prints the last error to stderr */
        static void print_error(struct fann_error *errdat)
        {
            fann_print_error(errdat);
        }

        /*********************************************************************/

#ifdef FANN_EXTENSIONS_H_INCLUDED
        /** EXTENSION Get the type of network as defined in fann_network_types */
        unsigned int get_network_type()
        {
            unsigned int network_type = 0;
            if (ann != NULL)
            {
                network_type = fann_get_network_type(ann);
            }
            return (network_type_enum)network_type;
        }

        /** EXTENSION Get the connection rate used when the network was created */
        float get_connection_rate()
        {
            if (ann == NULL)
            {
                return 0;
            }
            return fann_get_connection_rate(ann);
        }

        /** EXTENSION Get the number of layers in the network */        unsigned int get_num_layers()        {
            if (ann == NULL)
            {
                return 0;
            }
            return fann_get_num_layers(ann);
        }
        /** EXTENSION Get the number of neurons in each layer in the network.            Bias is not included so the layers match the fann_create functions            The layers array must be preallocated to at least            sizeof(unsigned int) * fann_num_layers() long. */        void get_layer_array(unsigned int *layers)        {            if (ann != NULL)            {                fann_get_layer_array(ann, layers);            }        }        /** EXTENSION Get the number of bias in each layer in the network.            The bias array must be preallocated to at least            sizeof(unsigned int) * fann_num_layers() long. */        void get_bias_array(unsigned int *bias)        {            if (ann != NULL)            {                fann_get_bias_array(ann, bias);            }        }        /** EXTENSION Get the connections in the network.            The connections array must be preallocated to at least            sizeof(connection) * fann_get_total_connections() long. */        void get_connection_array(connection *connections)        {            if (ann != NULL)            {                fann_get_connection_array(ann, connections);            }        }
        /** EXTENSION Set connections in the network.            Only the weights can be changed, connections and weights are ignored            if they do not already exist in the network.            The array must have sizeof(connection) * num_connections size. */        void set_weight_array(connection *connections, unsigned int num_connections)        {            if (ann != NULL)            {                fann_set_weight_array(ann, connections, num_connections);            }        }
        /** EXTENSION Set a connection in the network.            Only the weight can be changed. The connection/weight is            ignored if it does not already exist in the network. */        void set_weight(unsigned int from_neuron, unsigned int to_neuron, fann_type weight)        {            if (ann != NULL)            {                fann_set_weight(ann, from_neuron, to_neuron, weight);            }        }
#ifdef USE_SAVE_EX        /** EXTENSION Save the network, return false when an error occurs. */        bool save_ex(const char *configuration_file)        {            if (ann == NULL)
            {
                return false;            }            if (fann_save_ex(ann, configuration_file) == -1)            {                return false;            }            return true;        }#endif /* USE_SAVE_EX */#endif /* FANN_EXTENSIONS_H_INCLUDED */

        /*********************************************************************/

    private:
        /** Pointer the encapsulated fann neural net structure */
        struct fann *ann;
    };

    /*************************************************************************/
};

#endif /* FANN_CPP_H_INCLUDED */

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -