⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 layer.cpp

📁 也是遗传算法的源代码
💻 CPP
字号:
/***************************************************************************                          layer.cpp  -  description                             -------------------    begin                : Wed Apr 11 2001    copyright            : (C) 2001 by Matt Grover    email                : mpgrover@sourceforge.net ***************************************************************************//*************************************************************************** *                                                                         * *   This program is free software; you can redistribute it and/or modify  * *   it under the terms of the GNU General Public License as published by  * *   the Free Software Foundation; either version 2 of the License, or     * *   (at your option) any later version.                                   * *                                                                         * ***************************************************************************/using namespace std;#include <iostream>#include <math.h>#include "layer.h"#include <amygdala/utilities.h>Layer::Layer(){    layerId = 0;    learningConst = 0.0;    memTimeConst = 0.0;    synTimeConst = 0.0;    restPtnl = 0.0;    thresholdPtnl = 0.0;    parentNet = 0;    percentInhib = 0.0;    synapticDelay = 0;    layerType = HIDDENLAYER;    constantsSet = false;}Layer::~Layer(){}void Layer::AddNeuron(Neuron* nrn){    nrnLayer.push_back(nrn);    // Add the neuron to the parent network if the pointer    // has been set.  If not, the neurons will all be added    // later on in SetLayerParent(...)    if (parentNet) {        parentNet->AddNeuron(nrn->GetLayerType(), nrn);    }    // Set the constant values if they have not yet been set    if (!constantsSet) {        layerType = nrn->GetLayerType();        learningConst = nrn->GetLearningConst();        memTimeConst = nrn->GetMembraneConst();        synTimeConst = nrn->GetSynapticConst();        restPtnl = nrn->GetRestPotential();        thresholdPtnl = nrn->GetThresholdPotential();    }}void Layer::SetLayerParent(Network* parent){    unsigned int i;    parentNet = parent;    for (i=0; i<nrnLayer.size(); i++) {        parentNet->AddNeuron(layerType, nrnLayer[i]);    }}bool Layer::SetLayerConstants(LayerConstants lconst){    if (constantsSet) {        cout << "The layer constants have already been set for layer " <<                layerId << ".\n";        return false;    }    layerId = lconst.layerId;    layerType = lconst.type;    learningConst = lconst.learningConst;    memTimeConst = lconst.membraneTimeConst;    synTimeConst = lconst.synapticTimeConst;    restPtnl = lconst.restPtnl;    thresholdPtnl = lconst.thresholdPtnl;    constantsSet = true;    return true;}	LayerConstants Layer::GetLayerConstants(){    LayerConstants lconst;    lconst.layerId = layerId;    lconst.type = layerType;    lconst.learningConst = learningConst;    lconst.membraneTimeConst = memTimeConst;    lconst.synapticTimeConst = synTimeConst;    lconst.restPtnl = restPtnl;    lconst.thresholdPtnl = thresholdPtnl;    return lconst;}void Layer::SetPercentInhibitory( float percent ){    unsigned int i;    float randVal;    if ( (percentInhib != 0.0) || (!Neuron::EnforceSign()) ) {        // percentInhib has either been set for this layer or        // sign enforcement is n        return;    }    if (percent < 0.0) {        percentInhib = 0.0;    }    else if (percent > 100.0) {        percentInhib = 100.0;    }    else {        percentInhib = percent;    }    for (i=0; i<nrnLayer.size(); i++) {        randVal = Utilities::RandPercent();        if ( randVal < percentInhib ) {            nrnLayer[i]->Inhibitory(true);        }        else {            nrnLayer[i]->Inhibitory(false);        }    }}bool Layer::ConnectionInhibitory(float& pctInhibitory){    float randVal;    randVal = Utilities::RandPercent();    if ( randVal < pctInhibitory ) {        return true;    }    else {        return false;    }}bool Layer::ConnectLayers(Layer* output, UniConnectType parms, float pctInhibitory = 0.0){    // Assume that layers have already been added to the network.    // FIXME: This will have to be corrected in the future, but will work    // to get us started.    unsigned int i, pre, post;    float weight;    Neuron* outNrn;    Layer::iterator outItr;    bool enforceSign = Neuron::EnforceSign();    if ( parms.maxWeight > 1.0 ) {        parms.maxWeight = 1.0;    }    if ( parms.pctConnect > 100.0 ) {        parms.pctConnect = 100.0;    }    // initialize outItr    outItr = output->begin();    while (outItr != output->end()) {        outNrn = *(outItr++);        // check to see if this neuron will receive a connection.        for (i=0; i<nrnLayer.size(); i++) {            if ( Utilities::RandPercent() < parms.pctConnect ) {                weight = ( float(rand()) / float(RAND_MAX) ) * parms.maxWeight;                if ( weight > parms.maxWeight ) {                    weight = parms.maxWeight;                }                else if ( weight < 0.0 ) {                    weight = 1e-10;                }                if (enforceSign) {                    if ( nrnLayer[i]->Inhibitory() ) {                        weight = ( -1.0 * weight );                    }                }                else if ( ConnectionInhibitory(pctInhibitory) ) {                    weight = ( -1.0 * weight );                }                // get the neuron IDs                pre = nrnLayer[i]->GetID();                post = outNrn->GetID();                parentNet->ConnectNeurons(pre, post, weight, synapticDelay);            }        }    }    return true;}	bool Layer::ConnectLayers(Layer* output, GaussConnectType parms, float pctInhibitory = 0.0){    // Assume that layers have already been added to the network.    // FIXME: This will have to be corrected in the future, but will work    // to get us started.    unsigned int i, pre, post;    float weight;    Neuron* outNrn;    Layer::iterator outItr;    bool enforceSign = Neuron::EnforceSign();    if ( parms.meanWeight < 0.0 ) {        cerr << "GaussConnectType.meanWeight must be > 0!\n";        return false;    }    // initialize outItr    outItr = output->begin();    while (outItr != output->end()) {        outNrn = *(outItr++);        for (i=0; i<nrnLayer.size(); i++) {            // check to see if this neuron will receive a connection from            // nrnLayer[i]            if ( Utilities::RandPercent() < parms.pctConnect ) {                weight = Utilities::GaussRand(parms.meanWeight, parms.stdDev);                if ( weight > 1.0 ) {                    weight = 1.0;                }                else if ( weight < 0.0 ) {                    weight = 1e-10;                }                if (enforceSign) {                    if ( nrnLayer[i]->Inhibitory() ) {                        weight = ( -1.0 * weight );                    }                }                else if ( ConnectionInhibitory(pctInhibitory) ) {                    weight = ( -1.0 * weight );                }                // get the neuron IDs                pre = nrnLayer[i]->GetID();                post = outNrn->GetID();                parentNet->ConnectNeurons(pre, post, weight, synapticDelay);            }        }    }    return true;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -