⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 network.c

📁 * Lightweight backpropagation neural network. * This a lightweight library implementating a neura
💻 C
📖 第 1 页 / 共 3 页
字号:
net_set_bias (network_t *net, int l, int nu, float weight){  assert (net != NULL);  assert (0 < l && l < net->no_of_layers);  assert (0 <= nu && nu < net->layer[l].no_of_neurons);  net_set_weight(net, l-1, net->layer[l-1].no_of_neurons, nu, weight);}/**************************************** * File I/O ****************************************//*!\brief Write a network to a file. * \param file Pointer to file descriptor. * \param net Pointer to a neural network. * \return 0 on success, a negative number of failure. */intnet_fprint (FILE *file, const network_t *net){  int l, nu, nl, result;  assert (file != NULL);  assert (net != NULL);  /* write network dimensions */  result = fprintf (file, "%i\n", net->no_of_layers);  if (result < 0) {    return result;  }  for (l = 0; l < net->no_of_layers; l++) {    result = fprintf (file, "%i\n", net->layer[l].no_of_neurons);    if (result < 0) {      return result;    }  }  /* write network constants */  result = fprintf (file, "%f\n", net->momentum);  if (result < 0) {    return result;  }  result = fprintf (file, "%f\n", net->learning_rate);  if (result < 0) {    return result;  }  result = fprintf (file, "%f\n", net->global_error);  if (result < 0) {    return result;  }  /* write network weights */  for (l = 1; l < net->no_of_layers; l++) {    for (nu = 0; nu < net->layer[l].no_of_neurons; nu++) {      for (nl = 0; nl <= net->layer[l - 1].no_of_neurons; nl++) {        result = fprintf (file, "%f\n", net->layer[l].neuron[nu].weight[nl]);        if (result < 0) {          return result;        }      }    }  }  return 0;}/*!\brief Read a network from a file. * \param file Pointer to a file descriptor. * \return Pointer to the read neural network on success, NULL on failure. */network_t *net_fscan (FILE *file){  int no_of_layers, l, nu, nl, *arglist, result;  network_t *net;  assert (file != NULL);  /* read network dimensions */  result = fscanf (file, "%i", &no_of_layers);  if (result <= 0) {    return NULL;  }  arglist = calloc (no_of_layers, sizeof (int));  if (arglist == NULL) {    return NULL;  }  for (l = 0; l < no_of_layers; l++) {    result = fscanf (file, "%i", &arglist[l]);    if (result <= 0) {      return NULL;    }  }  /* allocate memory for the network */  net = net_allocate_l (no_of_layers, arglist);  free (arglist);  if (net == NULL) {    return NULL;  }  /* read network constants */  result = fscanf (file, "%f", &net->momentum);  if (result <= 0) {    net_free (net);    return NULL;  }  result = fscanf (file, "%f", &net->learning_rate);  if (result <= 0) {    net_free (net);    return NULL;  }  result = fscanf (file, "%f", &net->global_error);  if (result <= 0) {    net_free (net);    return NULL;  }  /* read network weights */  for (l = 1; l < net->no_of_layers; l++) {    for (nu = 0; nu < net->layer[l].no_of_neurons; nu++) {      for (nl = 0; nl <= net->layer[l - 1].no_of_neurons; nl++) {        result = fscanf (file, "%f", &net->layer[l].neuron[nu].weight[nl]);        if (result <= 0) {          net_free (net);          return NULL;        }      }    }  }  return net;}/*!\brief Write a network to a stdout. * \param net Pointer to a neural network. * \return 0 on success, a negative number on failure. */intnet_print (const network_t *net){  assert (net != NULL);  return net_fprint (stdout, net);}/*!\brief Write a network to a file. * \param filename Pointer to name of file to write to. * \param net Pointer to a neural network. * \return 0 on success, a negative number on failure. */intnet_save (const char *filename, const network_t *net){  int result;  FILE *file;  assert (filename != NULL);  assert (net != NULL);  file = fopen (filename, "w");  if (file == NULL) {    return EOF;  }  result = net_fprint (file, net);  if (result < 0) {    fclose (file);    return result;  }  return fclose (file);}/*!\brief Read a network from a file. * \param filename Pointer to name of file to read from. * \return Pointer to the read neural network on success, NULL on failure. */network_t *net_load (const char *filename){  FILE *file;  network_t *net;  assert (filename != NULL);  file = fopen (filename, "r");  if (file == NULL) {    return NULL;  }  net = net_fscan (file);  fclose (file);  return net;}/*!\brief Write a network to a binary file. * \param file Pointer to file descriptor. * \param net Pointer to a neural network. * \return 0 on success, a negative number on failure. * * Write a binary representation of a neural network to a file. Note * that the resulting file is not necessarily portable across * platforms. */intnet_fbprint (FILE *file, const network_t *net){  int l, nu;  size_t info_dim = net->no_of_layers + 1;  int info[info_dim];  float constants[3];  assert (file != NULL);  assert (net != NULL);  /* write network dimensions */  info[0] = net->no_of_layers;  for (l = 0; l < net->no_of_layers; l++) {    info[l + 1] = net->layer[l].no_of_neurons;  }  if (fwrite (info, sizeof (int), info_dim, file) < info_dim) {    return -1;  }  /* write network constants */  constants[0] = net->momentum;  constants[1] = net->learning_rate;  constants[2] = net->global_error;  fwrite (constants, sizeof (float), 3, file);  /* write network weights */  for (l = 1; l < net->no_of_layers; l++) {    for (nu = 0; nu < net->layer[l].no_of_neurons; nu++) {      fwrite (net->layer[l].neuron[nu].weight, sizeof (float),              net->layer[l - 1].no_of_neurons + 1, file);    }  }  return 0;}/*!\brief Read a network from a binary file. * \param file Pointer to a file descriptor. * \return Pointer to the read neural network on success, NULL on failure. */network_t *net_fbscan (FILE *file){  int no_of_layers, l, nu, *arglist;  network_t *net;  assert (file != NULL);  /* read network dimensions */  if (fread (&no_of_layers, sizeof (int), 1, file) < 1) {    return NULL;  }  arglist = calloc (no_of_layers, sizeof (int));  if (fread (arglist,sizeof(int),no_of_layers,file) < (size_t) no_of_layers) {    free (arglist);    return NULL;  }  /* allocate memory for the network */  net = net_allocate_l (no_of_layers, arglist);  free (arglist);  /* read network constants */  fread (&net->momentum, sizeof (float), 1, file);  fread (&net->learning_rate, sizeof (float), 1, file);  fread (&net->global_error, sizeof (float), 1, file);  /* read network weights */  for (l = 1; l < net->no_of_layers; l++) {    for (nu = 0; nu < net->layer[l].no_of_neurons; nu++) {      fread (net->layer[l].neuron[nu].weight, sizeof (float),             net->layer[l - 1].no_of_neurons + 1, file);    }  }  return net;}/*!\brief Write a network to a binary file. * \param filename Pointer to name of file to write to. * \param net Pointer to a neural network. * \return 0 on success, a negative number on failure. * * Write a binary representation of a neural network to a file. Note * that the resulting file is not necessarily portable across * platforms. */intnet_bsave (const char *filename, const network_t *net){  FILE *file;  int result;  assert (filename != NULL);  assert (net != NULL);  file = fopen (filename, "wb");  result = net_fbprint (file, net);  if (result < 0) {    fclose (file);    return result;  }  return fclose (file);}/*!\brief Read a network from a binary file. * \param filename Pointer to name of file to read from. * \return Pointer to the read neural network on success, NULL on failure. */network_t *net_bload (const char *filename){  FILE *file;  network_t *net;  assert (filename != NULL);  file = fopen (filename, "rb");  net = net_fbscan (file);  fclose (file);  return net;}/**************************************** * Input and Output ****************************************//*!\brief [Internal] Copy inputs to input layer of a network. */static inline voidset_input (network_t *net, const float *input){  int n;  assert (net != NULL);  assert (input != NULL);  for (n = 0; n < net->input_layer->no_of_neurons; n++) {    net->input_layer->neuron[n].output = input[n];  }}/*!\brief [Interal] Copy outputs from output layer of a network. */static inline voidget_output (const network_t *net, float *output){  int n;    assert (net != NULL);  assert (output != NULL);  for (n = 0; n < net->output_layer->no_of_neurons; n++) {    output[n] = net->output_layer->neuron[n].output;  }}/**************************************** * Sigmoidal function ****************************************/#if 1#include "interpolation.c"/*!\brief [Internal] Activation function of a neuron. */static inline floatsigma (float x){  int index;  index = (int) ((x - min_entry) / interval);  if (index <= 0) {    return interpolation[0];  } else if (index >= num_entries) {    return interpolation[num_entries - 1];  } else {    return interpolation[index];  }}#else /* reference implementation of sigma *//*!\brief [Internal] Activation function of a neuron. */static inline floatsigma (float x){  return 1.0 / (1.0 + exp (-x));}#endif/**************************************** * Forward and Backward Propagation ****************************************//*!\brief [Internal] Forward propagate inputs from one layer to next layer. */static inline voidpropagate_layer (layer_t *lower, layer_t *upper){  int nu, nl;  float value;  assert (lower != NULL);  assert (upper != NULL);  for (nu = 0; nu < upper->no_of_neurons; nu++) {    value = 0.0;    for (nl = 0; nl <= lower->no_of_neurons; nl++) {      value += upper->neuron[nu].weight[nl] * lower->neuron[nl].output;    }#if 0 /* if activation value has to be stored in neuron */    upper->neuron[nu].activation = value;#endif    upper->neuron[nu].output = sigma (value);  }}/*!\brief [Internal] Forward propagate inputs through a network. */static inline voidforward_pass (network_t *net){  int l;  assert (net != NULL);  for (l = 1; l < net->no_of_layers; l++) {    propagate_layer (&net->layer[l - 1], &net->layer[l]);  }}/*!\brief Compute the output error of a network. * \param net Pointer to a neural network. * \param target Pointer to a sequence of floating point numbers. * \return Output error of the neural network. * * Before calling this routine, net_compute() should have been called to * compute the ouputs for given inputs. This routine compares the * actual output of the neural network (which is stored internally in * the neural network) and the intended output (in target). The return * value is the half the square of the Euclidean distance between the  * actual output and the target. This routine also prepares the network for * backpropagation training by storing (internally in the neural * network) the errors associated with each of the outputs. Note * that the targets shoud lie in the interval [0,1], since the outputs * of the neural network will always lie in the interval (0,1). */floatnet_compute_output_error (network_t *net, const float *target){  int n;  float output, error;  assert (net != NULL);  assert (target != NULL);  net->global_error = 0.0;  for (n = 0; n < net->output_layer->no_of_neurons; n++) {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -