📄 network.cpp
字号:
/* * lwneuralnet++ : C++ implementation of class network * By Luca Cinti and Lorenzo Masetti, based on * lightweight neural net library written in C by * Peter van Rossum. */
#include "stdafx.h"
#include <stdarg.h>#include <stdio.h>#include <stdlib.h>#include <string.h>#include <malloc.h>#include <math.h>
#include <time.h>
#include <sstream>#include <string>#include "network.h"/**************************************** * Compile-time options ****************************************//*!\brief [Private] Default momentum of a network. */#define DEFAULT_MOMENTUM 0.0/*!\brief [Private] Default learning rate of a network. */#define DEFAULT_LEARNING_RATE 0.25/*!\brief [Private] Default range with which weights of a network will be * initialized. */#define RANDOM_LARGE_WEIGHTS_LOGISTIC 0.1#define RANDOM_LARGE_WEIGHTS_TANH 0.05/*! \brief [Private] default Max value for a learning rate in SuperSab mode \brief [Private] default Min value for a learning rate in SuperSab mode \brief [Private] default nu_up in SuperSab mode \brief [Private] default nu_down in SuperSab mode */#define DEFAULT_MAX_NU 40#define DEFAULT_MIN_NU 0.00001#define DEFAULT_NUUP 1.2#define DEFAULT_NUDOWN 0.8
#ifndef _WINDOWS
#define min(X, Y) ((X) < (Y) ? (X) : (Y))
#define max(X, Y) ((X) > (Y) ? (X) : (Y))
#endif
const int network::LOGISTIC = NET_LOGISTIC;
const int network::TANH = NET_TANH;/**************************************** * Initialization ****************************************//*!\brief Assign random values to all weights in the network. * \param range doubleing point number. * * All weights in the neural network are assigned a random value * from the interval [-range, range]. */voidnetwork::randomize (double range){ int l, nu, nl; srand ( (unsigned)time( NULL ) ); for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { for (nl = 0; nl < layer[l - 1].no_of_neurons; nl++) { layer[l].neuron[nu].weight[nl] = 2.0 * range * ((double) rand () / RAND_MAX - 0.5); } } }}#if 0/*!\brief Set weights of the network to 0. */voidnetwork::reset_weights (){ int l, nu, nl; for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { for (nl = 0; nl < layer[l - 1].no_of_neurons; nl++) { layer[l].neuron[nu].weight[nl] = 0.0; } } }}#endif/*!\brief[Private] Set deltas of the network to 0. */voidnetwork::reset_deltas (){ int l, nu, nl; for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { for (nl = 0; nl < layer[l - 1].no_of_neurons; nl++) { layer[l].neuron[nu].delta[nl] = 0.0; } } }}/*!\brief[Private] Set sumdeltas of the network to 0. */voidnetwork::reset_sumdeltas (){ int l, nu, nl; for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { for (nl = 0; nl < layer[l - 1].no_of_neurons; nl++) { layer[l].neuron[nu].sumdeltas[nl] = 0.0; } } }}/*!\brief[Private] Set deltas and sumdeltas of the network to 0. */voidnetwork::reset_deltas_and_sumdeltas (){ int l, nu, nl; for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { for (nl = 0; nl < layer[l - 1].no_of_neurons; nl++) { layer[l].neuron[nu].delta[nl] = 0.0; layer[l].neuron[nu].sumdeltas[nl] = 0.0; } } }}/**************************************** * Memory Management ****************************************//*!\brief [Private] Allocate memory for the neurons in a layer of a network. * \param layer Pointer to layer of a neural network. * \param no_of_neurons Integer. * * Allocate memory for a list of no_of_neuron neurons in the specified * layer. */voidnetwork::allocate_layer (layer_t * layer, int no_of_neurons){ layer->no_of_neurons = no_of_neurons; layer->neuron = (neuron_t *) calloc (no_of_neurons, sizeof (neuron_t));}/*!\brief [Private] Allocate memory for the weights connecting two layers of a network. * \param lower Pointer to one layer of a neural network. * \param upper Pointer to the next layer of a neural network. * * Allocate memory for the weights connecting two layers of a neural * network. The neurons in these layers should previously have been * allocated with allocate_layer(). */voidnetwork::allocate_weights (layer_t * lower, layer_t * upper){ int n; for (n = 0; n < upper->no_of_neurons; n++) { upper->neuron[n].weight = (double *) calloc (lower->no_of_neurons, sizeof (double)); upper->neuron[n].delta = (double *) calloc (lower->no_of_neurons, sizeof (double)); upper->neuron[n].sumdeltas = (double *) calloc (lower->no_of_neurons, sizeof (double)); }}/*!\brief[Private] Allocate memory for a network. * \param act (network::LOGISTIC or network::TANH) * \param layers Integer. * \param arglist Pointer to sequence of integers. * * Allocate memory for a neural network with no_of_layer layers, * including the input and output layer. The number of neurons in each * layer is given in arglist, with arglist[0] being the number of * neurons in the input layer and arglist[no_of_layers-1] the number of * neurons in the output layer. */voidnetwork::allocate_l (int act, int layers, const int *arglist){ int l; /* sanity check */ if (layers < 2) { throw runtime_error ("A network must have at least two layers"); } /* allocate memory for the network */ no_of_layers = layers; layer = (layer_t *) calloc (no_of_layers, sizeof (layer_t)); for (l = 0; l < no_of_layers; l++) { allocate_layer (&layer[l], arglist[l]); } for (l = 1; l < no_of_layers; l++) { allocate_weights (&layer[l - 1], &layer[l]); } /* abbreviations for input and output layer */ input_layer = &layer[0]; output_layer = &layer[no_of_layers - 1]; /* default values for network constants */ momentum = DEFAULT_MOMENTUM; learning_rate = DEFAULT_LEARNING_RATE; activation = act; /* default parameters for SuperSab training */ nus = NULL; maxnu = DEFAULT_MAX_NU; minnu = DEFAULT_MIN_NU; nuup = DEFAULT_NUUP; nudown = DEFAULT_NUDOWN; /* default (meaningless) values for other fields */ no_of_patterns = 0; global_error = 0.0; /* initialize weights and deltas */ randomize ((act == NET_LOGISTIC)?RANDOM_LARGE_WEIGHTS_LOGISTIC:RANDOM_LARGE_WEIGHTS_TANH); reset_deltas_and_sumdeltas ();}/*!\brief Constructor for a network * \param activ network::LOGISTIC or network::TANH * \param layers Integer. * \param ... Sequence of integers. * * Allocate memory for a neural network with no_of_layer layers, * including the input and output layer. The number of neurons in each * layer is given as ..., starting with the input layer and ending with * the output layer. */network::network (int activ, int layers, ...){ int l, *arglist; va_list args; /* sanity check */ if (layers < 2) { throw runtime_error ("A network must have at least two layers"); } no_of_layers = layers; arglist = (int *) calloc (no_of_layers, sizeof (int)); /* the list of var. length starts from parameter layers */ va_start (args, layers); for (l = 0; l < no_of_layers; l++) { arglist[l] = va_arg (args, int); } va_end (args); allocate_l (activ, no_of_layers, arglist); free (arglist);} /*!\brief Constructor for a network * \param activ activation function (network::LOGISTIC or network::TANH) * \param layers vector of integers containing the number of neurons of each * layer * * Allocate memory for a neural network with layers.size() layers, * including the input and output layer. The number of neurons in each * layer is given in the vector, starting with the input layer and ending with * the output layer. * * The parameters of the network are set to default values. * (for example momentum is 0). * You can change them later by the mutators methods. * * layers.size() < 2 throws a runtime_error exception */network::network (int activ, vector<int> layers) { /* sanity check */ if (layers.size() < 2) { throw runtime_error ("A network must have at least two layers"); } no_of_layers = layers.size(); int* arglist = (int *) calloc (no_of_layers, sizeof (int)); for (int l = 0; l<no_of_layers; l++) { arglist[l] = layers[l]; } allocate_l (activ, no_of_layers, arglist); free (arglist);}/*!\brief Constructor. Load network from a binary file. * If filename does not exist throws an exception */network::network (const char *filename, bool binary){ if (binary) { do_load (filename); } else { do_textload (filename); }}/*!\brief Destructor. Free memory allocated for a network. */network::~network (){ destroy ();}/*!\brief Code for the destructor */voidnetwork::destroy (){ int l, n; for (l = 0; l < no_of_layers; l++) { if (l != 0) { for (n = 0; n < layer[l].no_of_neurons; n++) { free (layer[l].neuron[n].weight); free (layer[l].neuron[n].delta); free (layer[l].neuron[n].sumdeltas); } } free (layer[l].neuron); } free (layer); free (nus);}/******************************** * Accessors and mutators (not inline) ********************************//*!\brief Retrieve number of neurons on a layer of a network * \param l layer index * \return number of neurons on layer l */intnetwork::get_no_of_neurons (int l) const{ /* sanity check */ if ((l < 0) || (l >= no_of_layers)) { return 0; } return layer[l].no_of_neurons;}/*!\brief Retrieve a weight of a network. * \param l Number of lower layer. * \param nl Number of neuron in the lower layer. * \param nu Number of neuron in the next layer. * \return Weight connecting the neuron numbered nl in the layer * numbered l-1 with the neuron numbered nu in the layer numbered l. */doublenetwork::get_weight (int l, int nl, int nu) const{ if ((l <= 0) || (l >= no_of_layers)) { return 0; } if ((nu<0) || (nu >= layer[l].no_of_neurons)) { return 0; } if ((nl<0) || (nl >= layer[l-1].no_of_neurons)) { return 0; } return layer[l].neuron[nu].weight[nl];}/**************************************** * File I/O for binary files ****************************************//*!\brief[Private] Write a network to a file. * \param file Pointer to file descriptor. */voidnetwork::fbprint (FILE * file) const{ int l, nu; int informations_dim = no_of_layers + 2; int* informations = new int[informations_dim]; double constants[3]; /* write network dimensions */ informations[0] = activation; informations[1] = no_of_layers; for (l = 0; l < no_of_layers; l++) { informations[l + 2] = layer[l].no_of_neurons; } fwrite (informations, sizeof (int), informations_dim, file); /* write network constants */ constants[0] = momentum; constants[1] = learning_rate; constants[2] = global_error; fwrite (constants, sizeof (double), 3, file); /* write network weights */ for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { fwrite (layer[l].neuron[nu].weight, sizeof (double), layer[l - 1].no_of_neurons, file); } }
delete[] informations;}/*!\brief[Private] Read a network from a binary file. * \param file Pointer to a file descriptor. */voidnetwork::fbscan (FILE * file){ int no_of_layers, l, nu, funct, *arglist; /* read function */ fread (&funct, sizeof (int), 1, file); /* tricky solution for importing files with old format * (used by C - lwneuralnet) * without the number of the function at the beginning: * Function number can be 0 or 1 while number of layers * must be >= 2. So if funct>1 the file should be in * old format and we can set funct = 0 (logistic) and * take the first number as no_of_layers */ if (funct > NET_TANH) { fprintf(stderr, "lwneuralnet++: Warning importing file of old lwneuralnet format.\n\n"); no_of_layers = funct; funct = NET_LOGISTIC; } else { /* read network dimensions */ fread (&no_of_layers, sizeof (int), 1, file); } if (no_of_layers<2) { throw runtime_error ("Error in network file format. Number of layers must be at least 2."); } arglist = (int *) calloc (no_of_layers, sizeof (int)); if (arglist == NULL) { throw runtime_error ("Error in network file format"); } if (fread (arglist, sizeof (int), no_of_layers, file) < no_of_layers) { throw runtime_error ("Error in network file format. No information about layers"); }; /* allocate memory for the network */ allocate_l (funct, no_of_layers, arglist); /* read network constants */ fread (&momentum, sizeof (double), 1, file); fread (&learning_rate, sizeof (double), 1, file); fread (&global_error, sizeof (double), 1, file); set_activation (funct); /* read network weights */ for (l = 1; l < no_of_layers; l++) { for (nu = 0; nu < layer[l].no_of_neurons; nu++) { fread (layer[l].neuron[nu].weight, sizeof (double), layer[l - 1].no_of_neurons, file); } } free (arglist);}/*!\brief Write a network to a binary file. * \param filename Pointer to name of file to write to. * \return true on success, false on failure. */bool network::save (const char *filename) const{ FILE * file; file = fopen (filename, "w"); if (file == NULL) { return false; } fbprint (file); return (fclose (file) == 0);}/*!\brief Read a network from a binary file. * \param filename Pointer to name of file to read from. * If filename does not exist throws an exception */voidnetwork::load (const char *filename){ destroy (); do_load (filename);}/*! \brief[Private] Read a network from a binary file. * \param filename Pointer to name of file to read from. * If filename does not exist throws an exception */voidnetwork::do_load (const char *filename){ FILE *file; file = fopen (filename, "r"); if (file == NULL) { throw runtime_error ("lwneuralnet++: File " + string (filename) + " not found "); } fbscan (file); fclose (file);}/**************************************** * File I/O for Text Files ****************************************//*!\brief[Private] Write a network to a file. * \param file Pointer to file descriptor. */voidnetwork::fprint (FILE * file) const{ int l, nu, nl; /* write function */ fprintf (file, "%i\n", get_activation ()); /* write network dimensions */ fprintf (file, "%i\n", no_of_layers); for (l = 0; l < no_of_layers; l++) { fprintf (file, "%i\n", layer[l].no_of_neurons); }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -