📄 neuro_k.c
字号:
/* Beholder RMON ethernet network monitor,Copyright (C) 1993 DNPAP group *//* See file COPYING 'GNU General Public Licence' for copyright details */#include <math.h>#include <stdio.h>#include <stdlib.h>#include <memory.h>#include <time.h>#ifdef UNIX#include <sys/time.h>#endif#include "neuro_d.h"#include "neuro_e.h"#include "neuro_k.h"static CHAR MODULE[] = "neuroKernel";static BOOLEAN initialized = FALSE;static VOID NeuronInit(VOID);static Weight* AllocWeight(LONG number);static VOID FreeWeight(Weight* weight);static Neuron* AllocNeuron(LONG number);static VOID FreeNeuron(Neuron* neuron);static NeuronLayer* AllocNeuronLayer(LONG number);static VOID FreeNeuronLayer(NeuronLayer* layer);static NeuronNet* AllocNeuronNet(VOID);static VOID FreeNeuronNet(NeuronNet* net);static LONG CalcNeuronError(NeuronNet* net, InOutput* out, FLOAT* error);static LONG SetInput(NeuronNet* net, InOutput in[], LONG nrin);static LONG SetNeuronInput(NeuronNet* net, InOutput* in);static LONG CalcForward(NeuronNet* net);static FLOAT NeuronCalcActivation(NeuronNet* net, Neuron* neuron);static LONG SetDelta(NeuronNet* net, InOutput out[], LONG nrout);static LONG SetNeuronDelta(NeuronNet* net, InOutput* out);static VOID AddNeuronDelta(NeuronNet* net, LONG l, LONG n, FLOAT value);static LONG CalcBackward(NeuronNet* net);static LONG CalcWeights(NeuronNet* net, BOOLEAN update, FLOAT lambda, FLOAT mu);static FLOAT ClipWeight(FLOAT value);static LONG GetOutput(NeuronNet* net, InOutput out[], LONG nrout);static LONG GetNeuronOutput(NeuronNet* net, InOutput* out);static FLOAT GetNeuronDerivOutput2(NeuronNet* net, LONG l, LONG n);static FLOAT GetNeuronOutput2(NeuronNet* net, LONG l, LONG n); #ifdef NEURODEBUGstatic VOID PrintWeight(Weight* weight, LONG w, LONG n, LONG l);static VOID PrintNeuron(Neuron* neuron, LONG n, LONG l);static VOID PrintNeuronLayer(NeuronLayer* layer, LONG l);static VOID PrintNeuronNet(NeuronNet* net);#endif static FLOAT GetWeight(NeuronNet* net, LONG l, LONG n, LONG w);static VOID SetWeight(NeuronNet* net, LONG l, LONG n, LONG w, FLOAT value);static FLOAT logistic(Neuron* neuron);static FLOAT dlogistic(Neuron* neuron);static FLOAT step(Neuron* neuron);static FLOAT dstep(Neuron* neuron);static FLOAT sqr(FLOAT x) { return x*x; }NeuronNet* NewNeuronNet(UINT seed, LONG nrlayers, ...){NeuronNet* net;NeuronLayer* layers;Neuron* neurons;Weight* weights;LONG i, j, l, n, w, nrneurons;va_list nrneurons_list; if (!initialized) NeuronInit(); if (seed > 1) srand(seed); else srand((unsigned) time(NULL)); if (nrlayers < 2) { ERROR(MODULE, NEURO_SMALL); return NULL; } va_start(nrneurons_list, nrlayers); if ((net = AllocNeuronNet()) == NULL) { va_end(nrneurons_list); return NULL; } if ((layers = AllocNeuronLayer(nrlayers)) == NULL) { FreeNeuronNet(net); va_end(nrneurons_list); return NULL; } net->nrlayers = nrlayers; net->layers = layers; for (l = 0; l < nrlayers; l++) { nrneurons = va_arg(nrneurons_list, LONG); if (nrneurons == 0 || (neurons = AllocNeuron(nrneurons)) == NULL) break; else { layers[l].nrneurons = nrneurons; layers[l].neurons = neurons; if (l == 0) /* at input layer? */ for (n = 0; n < nrneurons; n++) { layers[l].neurons[n].bias = (FLOAT)(RANDOM(1000)-500)/1000; layers[l].neurons[n].dbias = 0; layers[l].neurons[n].prvdbias = 0; layers[l].neurons[n].func = step; layers[l].neurons[n].dfunc = dstep; } else for (n = 0; n < nrneurons; n++) { layers[l].neurons[n].bias = (FLOAT)(RANDOM(1000)-500)/1000; layers[l].neurons[n].dbias = 0; layers[l].neurons[n].prvdbias = 0; layers[l].neurons[n].func = logistic; layers[l].neurons[n].dfunc = dlogistic; } } } if (l < nrlayers) /* clean up if neuron allocation failed */ { for (i = 0; i < l; i++) FreeNeuron(layers[i].neurons); FreeNeuronLayer(layers); FreeNeuronNet(net); va_end(nrneurons_list); return NULL; } /* first layer doesn't have weights to a lower layer */ for (n = 0; n < layers[0].nrneurons; n++) { layers[0].neurons[n].nrweights = 0; layers[0].neurons[n].weights = NULL; } for (l = 1; l < nrlayers; l++) { for (n = 0; n < layers[l].nrneurons; n++) { if ((weights = AllocWeight(layers[l-1].nrneurons)) == NULL) break; /* the number of weights equals the number of neurons in the lower layer */ else { layers[l].neurons[n].nrweights = layers[l-1].nrneurons; layers[l].neurons[n].weights = weights; for (w = 0; w < layers[l].neurons[n].nrweights; w++) { layers[l].neurons[n].weights[w].fromlayer = l-1; layers[l].neurons[n].weights[w].fromneuron = w; layers[l].neurons[n].weights[w].value = (FLOAT)(RANDOM(1000)-500)/1000; layers[l].neurons[n].weights[w].delta = 0; layers[l].neurons[n].weights[w].prvdelta = 0; } } } if (n < layers[l].nrneurons) { for (i = 0; i < n; i++) FreeWeight(layers[l].neurons[i].weights); for (j = 1; j < l; j++) for (i = 0; i < layers[j].nrneurons; i++) FreeWeight(layers[j].neurons[i].weights); for (i = 0; i < nrlayers; i++) FreeNeuron(layers[i].neurons); FreeNeuronLayer(layers); FreeNeuronNet(net); va_end(nrneurons_list); return NULL; } } va_end(nrneurons_list); return net;}VOID DelNeuronNet(NeuronNet* net){LONG l, n; if (net == NULL) { /* WARNING(MODULE, NEURO_NETNULL); */ return; } for (l = 0; l < net->nrlayers; l++) { for (n = 0; n < net->layers[l].nrneurons; n++) FreeWeight(net->layers[l].neurons[n].weights); FreeNeuron(net->layers[l].neurons); } FreeNeuronLayer(net->layers); FreeNeuronNet(net);}InOutput* NewNeuronInOutput(LONG size){ return MALLOC(size*sizeof(InOutput));}VOID DelNeuronInOutput(InOutput* inout){ FREE(inout);}VOID NeuronInOutputCpy(InOutput* dest, InOutput *source, LONG n){ memcpy(dest, source, (INT)n*sizeof(InOutput));}LONG CalcNeuronNet(NeuronNet* net, InOutput in[], LONG nrin, InOutput out[], LONG nrout, BOOLEAN train, BOOLEAN update, FLOAT lambda, FLOAT mu){LONG rc; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } if ((rc = SetInput(net, in, nrin)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } if ((rc = CalcForward(net)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } if (train) { if ((rc = SetDelta(net, out, nrout)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } if ((rc = CalcBackward(net)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } if ((rc = CalcWeights(net, update, lambda, mu)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } } if ((rc = GetOutput(net, out, nrout)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } return NEURO_OK;}LONG CalcNeuronNetError(NeuronNet* net, InOutput out[], LONG nrout, FLOAT* error){LONG rc;LONG i;FLOAT nerror; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } *error = 0; for (i = 0; i < nrout; i++) { if ((rc = CalcNeuronError(net, &out[i], &nerror)) != NEURO_OK) { ERROR(MODULE, rc); return rc; } *error += nerror; } return NEURO_OK;} LONG CalcNeuronError(NeuronNet* net, InOutput* out, FLOAT* error){LONG l, n; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } if (!((n = out->inoutnr) >= 0 && n < net->layers[l = net->nrlayers-1].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_NINDEX; } *error = (FLOAT)(0.5*sqr(out->value - net->layers[l].neurons[n].output)); return NEURO_OK;} LONG SetInput(NeuronNet* net, InOutput in[], LONG nrin){LONG rc;LONG i; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } for (i = 0; i < nrin; i++) { if ((rc = SetNeuronInput(net, &in[i])) != NEURO_OK) { ERROR(MODULE, rc); return rc; } } return NEURO_OK;}LONG SetNeuronInput(NeuronNet* net, InOutput* in){LONG n;Neuron* neuron; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } if (!((n = in->inoutnr) >= 0 && n < net->layers[0].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_NINDEX; } neuron = &net->layers[0].neurons[n]; neuron->activation = in->value; neuron->output = neuron->func(neuron); return NEURO_OK;}LONG CalcForward(NeuronNet* net){LONG l, n;Neuron* neuron; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } for (l = 1; l < net->nrlayers; l++) { for (n = 0; n < net->layers[l].nrneurons; n++) { neuron = &net->layers[l].neurons[n]; neuron->activation = NeuronCalcActivation(net, neuron); neuron->output = neuron->func(neuron); neuron->delta = 0; } } return NEURO_OK;}FLOAT NeuronCalcActivation(NeuronNet* net, Neuron* neuron){FLOAT activation, input;LONG w; activation = 0; for (w = 0; w < neuron->nrweights; w++) { input = GetNeuronOutput2(net, neuron->weights[w].fromlayer, neuron->weights[w].fromneuron); activation += neuron->weights[w].value*input; } return activation;}LONG SetDelta(NeuronNet* net, InOutput out[], LONG nrout){LONG rc;LONG i; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } for (i = 0; i < nrout; i++) { if ((rc = SetNeuronDelta(net, &out[i])) != NEURO_OK) { ERROR(MODULE, rc); return rc; } } return NEURO_OK;}LONG SetNeuronDelta(NeuronNet* net, InOutput* out){LONG l, n;Neuron* neuron; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } if (!((n = out->inoutnr) >= 0 && n < net->layers[l = net->nrlayers-1].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_NINDEX; } neuron = &net->layers[l].neurons[n]; neuron->delta = neuron->dfunc(neuron)*(out->value-neuron->output); return NEURO_OK;}LONG CalcBackward(NeuronNet* net){LONG l, n, w;Neuron* neuron;Weight* weight; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } for (l = net->nrlayers-1; l > 1; l--) { for (n = 0; n < net->layers[l].nrneurons; n++) { neuron = &net->layers[l].neurons[n]; for (w = 0; w < neuron->nrweights; w++) { weight = &net->layers[l].neurons[n].weights[w]; AddNeuronDelta(net, weight->fromlayer, weight->fromneuron, GetNeuronDerivOutput2(net, weight->fromlayer, weight->fromneuron)*weight->value*neuron->delta); } } } return NEURO_OK;}VOID AddNeuronDelta(NeuronNet* net, LONG l, LONG n, FLOAT value)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -