📄 neuro_k.c
字号:
{ if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return; } if (!(l >= 0 && l < net->nrlayers)) { FATAL(MODULE, NEURO_LINDEX); return; } if (!(n >= 0 && n < net->layers[l].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return; } net->layers[l].neurons[n].delta += value; return;}LONG CalcWeights(NeuronNet* net, BOOLEAN update, FLOAT lambda, FLOAT mu){ LONG l, n, w;FLOAT input;Neuron* neuron;Weight* weight; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } for (l = 1; l < net->nrlayers; l++) { for (n = 0; n < net->layers[l].nrneurons; n++) { neuron = &net->layers[l].neurons[n]; neuron->dbias += lambda*neuron->delta; if (update) { neuron->bias += (1-mu)*neuron->dbias+mu*neuron->prvdbias; neuron->prvdbias = neuron->dbias; neuron->dbias = 0; /* reset delta bias after update */ neuron->bias = ClipWeight(neuron->bias); } for (w = 0; w < neuron->nrweights; w++) { weight = &net->layers[l].neurons[n].weights[w]; input = GetNeuronOutput2(net, neuron->weights[w].fromlayer, neuron->weights[w].fromneuron); weight->delta += lambda*neuron->delta*input; if (update) { weight->value += (1-mu)*weight->delta+mu*weight->prvdelta; weight->prvdelta = weight->delta; weight->delta = 0; /* reset delta weight after update */ weight->value = ClipWeight(weight->value); } } } } return NEURO_OK;}FLOAT ClipWeight(FLOAT value){ if (value > NEURO_MAXWEIGHT) value = NEURO_MAXWEIGHT; else if (value < NEURO_MINWEIGHT) value = NEURO_MINWEIGHT; return value;}FLOAT NeuronValue(FLOAT value){ value = NEURO_MAXMINOUTPUT*value+NEURO_MINOUTPUT; if (value > NEURO_MAXOUTPUT) value = NEURO_MAXOUTPUT; else if (value < NEURO_MINOUTPUT) value = NEURO_MINOUTPUT; return value;}FLOAT NeuronRealValue(FLOAT value){ value = (value-NEURO_MINOUTPUT)/NEURO_MAXMINOUTPUT; return value;}LONG GetOutput(NeuronNet* net, InOutput out[], LONG nrout){LONG rc;LONG i; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } for (i = 0; i < nrout; i++) { if ((rc = GetNeuronOutput(net, &out[i])) != NEURO_OK) { ERROR(MODULE, rc); return rc; } } return NEURO_OK;}LONG GetNeuronOutput(NeuronNet* net, InOutput* out){LONG l, n; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_NETNULL; } if (!((n = out->inoutnr) >= 0 && n < net->layers[l = net->nrlayers-1].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_NINDEX; } out->value = net->layers[l].neurons[n].output; return NEURO_OK;}FLOAT GetNeuronDerivOutput2(NeuronNet* net, LONG l, LONG n){Neuron* neuron; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_MINOUTPUT; } if (!(l >= 0 && l < net->nrlayers)) { FATAL(MODULE, NEURO_LINDEX); return NEURO_MINOUTPUT; } if (!(n >= 0 && n < net->layers[l].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_MINOUTPUT; } neuron = &net->layers[l].neurons[n]; return neuron->dfunc(neuron);}FLOAT GetNeuronOutput2(NeuronNet* net, LONG l, LONG n){ if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_MINOUTPUT; } if (!(l >= 0 && l < net->nrlayers)) { FATAL(MODULE, NEURO_LINDEX); return NEURO_MINOUTPUT; } if (!(n >= 0 && n < net->layers[l].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_MINOUTPUT; } return net->layers[l].neurons[n].output;} #ifdef NEURODEBUGVOID PrintNeuronNet(NeuronNet* net){LONG l, n, w; if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return; } printf("The net has %d layers\n", net->nrlayers); for (l = 0; l < net->nrlayers; l++) { PrintNeuronLayer(&(net->layers[l]), l); for (n = 0; n < net->layers[l].nrneurons; n++) { PrintNeuron(&(net->layers[l].neurons[n]), n, l); for (w = 0; w < net->layers[l].neurons[n].nrweights; w++) PrintWeight(&(net->layers[l].neurons[n].weights[w]), w, n, l); } }}#endifWeight* AllocWeight(LONG number){Weight* weights; weights = MALLOC(number*sizeof(Weight)); DEBUG(weights == NULL); return weights;}VOID FreeWeight(Weight* weight){ FREE(weight);} #ifdef NEURODEBUGVOID PrintWeight(Weight* weight, LONG w, LONG n, LONG l){ printf("weight %d from neuron %d at layer %d to neuron %d at layer %d has value %f\n", w, weight->fromneuron, weight->fromlayer, n, l, weight->value);}#endifNeuron* AllocNeuron(LONG number){Neuron* neurons; neurons = MALLOC(number*sizeof(Neuron)); DEBUG(neurons == NULL); return neurons;}VOID FreeNeuron(Neuron* neuron){ FREE(neuron);} #ifdef NEURODEBUGVOID PrintNeuron(Neuron* neuron, LONG n, LONG l){ printf("neuron %d at layer %d has %d weights and its bias is %f\n", n, l, neuron->nrweights, neuron->bias);}#endifNeuronLayer* AllocNeuronLayer(LONG number){NeuronLayer* layers; layers = MALLOC(number*sizeof(NeuronLayer)); DEBUG(layers == NULL); return layers;}VOID FreeNeuronLayer(NeuronLayer* layer){ FREE(layer);} #ifdef NEURODEBUGVOID PrintNeuronLayer(NeuronLayer* layer, LONG l){ printf("layer %d has %d neurons\n", l, layer->nrneurons);}#endifNeuronNet* AllocNeuronNet(VOID){NeuronNet* net; net = MALLOC(sizeof(NeuronNet)); DEBUG(net == NULL); return net;}VOID FreeNeuronNet(NeuronNet* net){ FREE(net);}FLOAT GetWeight(NeuronNet* net, LONG l, LONG n, LONG w){ if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return NEURO_MINWEIGHT; } if (!(l >= 0 && l < net->nrlayers)) { FATAL(MODULE, NEURO_LINDEX); return NEURO_MINWEIGHT; } if (!(n >= 0 && n < net->layers[l].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return NEURO_MINWEIGHT; } if (!(w >= 0 && w < net->layers[l].neurons[n].nrweights)) { FATAL(MODULE, NEURO_WINDEX); return NEURO_MINWEIGHT; } return net->layers[l].neurons[n].weights[w].value;}VOID SetWeight(NeuronNet* net, LONG l, LONG n, LONG w, FLOAT value){ if (net == NULL) { ERROR(MODULE, NEURO_NETNULL); return; } if (!(l >= 0 && l < net->nrlayers)) { FATAL(MODULE, NEURO_LINDEX); return; } if (!(n >= 0 && n < net->layers[l].nrneurons)) { FATAL(MODULE, NEURO_NINDEX); return; } if (!(w >= 0 && w < net->layers[l].neurons[n].nrweights)) { FATAL(MODULE, NEURO_WINDEX); return; } net->layers[l].neurons[n].weights[w].value = value;} FLOAT logistic(Neuron* neuron){ return (FLOAT)(NEURO_MAXMINOUTPUT/(1.0+exp(-(neuron->activation+neuron->bias)))+NEURO_MINOUTPUT);}FLOAT dlogistic(Neuron* neuron){ return (FLOAT)(0.25*NEURO_MAXMINOUTPUT*(1+neuron->output)*(1-neuron->output));}FLOAT step(Neuron* neuron){ return neuron->activation > 0 ? NEURO_MAXOUTPUT : NEURO_MINOUTPUT;}FLOAT dstep(Neuron* neuron){ return 0;}VOID NeuronInit(VOID){ initialized = TRUE;}#ifdef NEURODEBUGint main(int argc, char* argv[]){NeuronNet* net;NeuronNet* net2;#define NRIN 8#define NROUT 4InOutput in[NRIN];InOutput tout[NROUT];InOutput out;FLOAT lambda, mu, error, perror;LONG p; net = NewNeuronNet(333, 3, 8, 4, 2); if (net == NULL) return -1; PrintNeuronNet(net); DelNeuronNet(net); net2 = NewNeuronNet(0, 3, 2, 2, 1); if (net2 == NULL) return -1; PrintNeuronNet(net2); /* the following statements fill 'in' and 'out' with */ /* the values for the famous XOR test */ in[0].inoutnr = 0; in[0].value = NEURO_MINOUTPUT; in[1].inoutnr = 1; in[1].value = NEURO_MINOUTPUT; in[2].inoutnr = 0; in[2].value = NEURO_MINOUTPUT; in[3].inoutnr = 1; in[3].value = NEURO_MAXOUTPUT; in[4].inoutnr = 0; in[4].value = NEURO_MAXOUTPUT; in[5].inoutnr = 1; in[5].value = NEURO_MINOUTPUT; in[6].inoutnr = 0; in[6].value = NEURO_MAXOUTPUT; in[7].inoutnr = 1; in[7].value = NEURO_MAXOUTPUT; tout[0].inoutnr = 0; tout[0].value = NEURO_MINOUTPUT; tout[1].inoutnr = 0; tout[1].value = NEURO_MAXOUTPUT; tout[2].inoutnr = 0; tout[2].value = NEURO_MAXOUTPUT; tout[3].inoutnr = 0; tout[3].value = NEURO_MINOUTPUT; lambda = 1.0; mu = 0.25; do { error = 0; for (p = 0; p < NRIN; p += 2) { out.inoutnr = tout[p/2].inoutnr; out.value = tout[p/2].value; CalcNeuronNet(net2, &in[p], 2, &out, 1, TRUE, p == NRIN-2 ? TRUE : FALSE, lambda, mu); CalcNeuronNetError(net2, &tout[p/2], 1, &perror); printf("neuron output for pattern %d = %f, error = %f\n", p/2, out.value, perror); error += perror; } printf("error = %f\n", error); } while (error > 0.1); DelNeuronNet(net2); return NEURO_OK;}#endif
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -