📄 simplenet.cpp
字号:
/*************************************************************************** simplenet.h - description ------------------- begin : Fri Nov 21 2003 copyright : (C) 2003 by Rudiger Koch email : rkoch@rkoch.org ***************************************************************************//*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/#include <amygdala/simplenet.h>#include <amygdala/netloader.h>#include <amygdala/genome.h>#include <amygdala/neuron.h>#include <amygdala/network.h>#include <amygdala/logging.h>#include <amygdala/physicalproperties.h>#include <amygdala/neuronproperties.h>#include <amygdala/basicneuron.h>#include <amygdala/flatlayer.h>#include <amygdala/nconnector.h>#include <amygdala/synapse.h>#include <amygdala/axon.h>#include <amygdala/axonnode.h>#include <amygdala/inputneuron.h>#include <amygdala/utilities.h>#include <vector>#include <map>#include <string>#include <cmath>#define CDIAG(X,Y,Z) sqrt((float(X))*(float(X)) + (float(Y))*(float(Y)) + (float(Z))*(float(Z)))using namespace Amygdala;using namespace std;enum neurontype {INPUTNEURON, OUTPUTNEURON, HIDDENNEURON };bool SimpleNet::cube_iterator::end = true;void SimpleNet::GrowNeuron::CalculateType(const std::vector <Emitter> & sensitivities){ float threshold = sensitivityThreshold; axonType = 0; vector <float> strengths(8); for (int i=0; i<8; i++) strengths[i] = 0.; for (vector<Emitter>::const_iterator it=sensitivities.begin(); it != sensitivities.end(); it++) { Emitter emitter = *it; if(emitter.type > 7) throw runtime_error("emitter type larger than 7 is not allowed"); PhysicalProperties * pProps = neuron->Properties()->GetPhysicalProps(); float distance = CDIAG(emitter.x - pProps->GetX(), emitter.y - pProps->GetY(), emitter.z - pProps->GetZ()); strengths[emitter.type] = emitter.strength / (distance*distance); } for (unsigned char type=0; type<8; type++){ if(strengths[type] >= threshold) { axonType = type; threshold = strengths[type]; } } LOGGER(5, "Neuron " << neuron->GetId() << " has type " << (unsigned int) axonType);}bool SimpleNet::GrowNeuron::GrowAxon(const std::vector <Emitter> & attractors){ if(!growing) return growing; float strength1 = 0, strength2 = 0; float growvector[] = {0.,0.,0.}; bool haveType = false; LOGGER(6, "Growing axon id = " << neuron->GetId() << " type " << (unsigned int)axonType); for( std::vector <Emitter>::const_iterator attrIt = attractors.begin(); attrIt != attractors.end(); attrIt++){ Emitter attractor = *attrIt; if(attractor.type == axonType){ haveType = true; // add to the grow vector float distance = CDIAG(attractor.x - axonTip.x , attractor.y - axonTip.y, attractor.z - axonTip.z); growvector[0] += attractor.strength * (attractor.x - axonTip.x) / distance; growvector[1] += attractor.strength * (attractor.y - axonTip.y) / distance; growvector[2] += attractor.strength * (attractor.z - axonTip.z) / distance; strength1 += attractor.strength / distance; } } if(!haveType){ growing = false; return growing; } // scale the growth vector to a length of 0.1 and move the axontip float len = CDIAG(growvector[0], growvector[1], growvector[2]); for (unsigned int i = 0; i < 3; i++){ growvector[i] *= (0.1 / len); } axonTip.x += growvector[0]; axonTip.y += growvector[1]; axonTip.z += growvector[2]; axon.push_back(axonTip); // now figure out if growth finished for( std::vector <Emitter>::const_iterator attrIt = attractors.begin(); attrIt != attractors.end(); attrIt++){ Emitter attractor = *attrIt; if(attractor.type == axonType){ float distance = CDIAG(attractor.x - axonTip.x , attractor.y - axonTip.y, attractor.z - axonTip.z); strength2 += attractor.strength / distance; } } if (strength1 >= strength2)growing = false; TryMkSynapse(); if(dynamic_cast <InputNeuron*>(neuron) == NULL){ TryMkOutputSynapses(); } return growing;}void SimpleNet::GrowNeuron::TryMkOutputSynapses(){ for(neuron_iterator it = simplenet->outputNeurons.begin(); it != simplenet->outputNeurons.end(); it++){ GrowNeuron *postGrowNeuron = *it; Neuron *postNeuron = postGrowNeuron->neuron; if(AlreadyConnected(postNeuron)) continue; PhysicalProperties * pProps = postNeuron->Properties()->GetPhysicalProps(); float distance = CDIAG(pProps->GetX() - axonTip.x , pProps->GetY() - axonTip.y, pProps->GetZ() - axonTip.z); if(distance < 2.0) { SynLoc synloc = { { axonTip.x, axonTip.y, axonTip.z}, postNeuron }; synLocs.push_back(synloc); LOGGER(4, "Adding synapse " << neuron->GetId() << " --> " << postNeuron->GetId()); ConnectorRegistry & cr = ConnectorRegistry::GetRegistry();//FIXME: StaticSynapse hardcoded NConnector * connector = cr.GetConnector("StaticSynapse"); StaticSynapseProperties * sProps = dynamic_cast <StaticSynapseProperties *> (connector->GetDefaultProperties());//FIXME: valuse hardcoded sProps->SetDelay(1000); sProps->SetWeight(0.5); connector->Connect(neuron, static_cast <SpikingNeuron*>(postNeuron), *sProps); } }}bool SimpleNet::GrowNeuron::AlreadyConnected(Neuron * postNeuron){ LOGGER(5, "Checking for synapse "<< neuron->GetId() << " --> " << postNeuron->GetId()); for (synloc_iterator sIt = synLocs.begin(); sIt != synLocs.end(); sIt++){ if(postNeuron->GetId() == (*sIt).neuron->GetId()){ LOGGER(5, "Already connected: "<< neuron->GetId() << " --> " << postNeuron->GetId()); return true; } } return false;}void SimpleNet::GrowNeuron::TryMkSynapse(){ int index[3]; index[0] = (int) (axonTip.x+radius + 0.5); index[1] = (int) (axonTip.y+radius + 0.5); index[2] = (int) (axonTip.z+radius + 0.5); for (int i=0; i<3; i++) { if(index[i] <0 || index [i] > 2*radius){ cerr << "Warning: Index out of range: " << __FILE__ << ":" << __LINE__ << endl; return; } } GrowNeuron *postGrowNeuron = simplenet->cube[index[0]] [index[1]] [index[2]]; if(postGrowNeuron == NULL) return; Neuron *postNeuron = postGrowNeuron->neuron; if(postNeuron == neuron) return; // no synapses to myself if(AlreadyConnected(postNeuron)) return; SynLoc synloc = { { axonTip.x, axonTip.y, axonTip.z}, postNeuron }; synLocs.push_back(synloc); LOGGER(4, "Adding synapse " << neuron->GetId() << " Axonpos " << axonTip.x << "," << axonTip.y << "," << axonTip.z << " --> " << postNeuron->GetId() << " at " << postNeuron->Properties()->GetPhysicalProps()->GetX() << "," << postNeuron->Properties()->GetPhysicalProps()->GetY() << "," << postNeuron->Properties()->GetPhysicalProps()->GetZ()); LOGGER(5, "Adding synapse at index " << index[0] << ", " << index[1] << ", " << index[2]); ConnectorRegistry & cr = ConnectorRegistry::GetRegistry();//FIXME: StaticSynapse hardcoded NConnector * connector = cr.GetConnector("StaticSynapse"); StaticSynapseProperties * sProps = dynamic_cast <StaticSynapseProperties *> (connector->GetDefaultProperties());//FIXME: sProps->SetDelay(2000); sProps->SetWeight(0.5); connector->Connect(neuron, static_cast <SpikingNeuron*> (postNeuron), *sProps);}void SimpleNet::GrowNeuron::InitAxon(const std::vector <Emitter> & sensitivities, Neuron * n){ neuron = n; CalculateType(sensitivities); PhysicalProperties * pProps = n->Properties()->GetPhysicalProps(); axonTip.x = pProps->GetX(); axonTip.y = pProps->GetY(); axonTip.z = pProps->GetZ(); axon.push_back(axonTip); if(axonType != 0){ growing = true; } else { growing = false; }}SimpleNet::GrowNeuron::GrowNeuron(SimpleNet *sn, const unsigned int _radius, Network * _net) : sensitivityThreshold(0.0), radius(_radius), simplenet(sn), net(_net){}SimpleNet::GrowNeuron::~GrowNeuron(){}SimpleNet::SimpleNet(string nType, float _oR, float _iR) : FACTOR(180./127.){ cube = NULL; maxId = 0; outerRadius = _oR; innerRadius = _iR; oR = (int)ceil(outerRadius); // make an integer cube around sphere chromosome = 0; net = Network::GetNetworkRef(); TopologyFactory<FlatLayer> makeFlatLayer; top = makeFlatLayer("single layer"); neuronFactory = dynamic_cast<NFactory*> (FactoryBase::GetRegistry().GetFactory(nType));}SimpleNet::~SimpleNet(){ std::cout << "Destroying SimpleNet object" << std::endl; for(neuron_iterator it=inputNeurons.begin(); it != inputNeurons.end(); it++){ delete *it; } for(neuron_iterator it=outputNeurons.begin(); it != outputNeurons.end(); it++){ delete *it; } if(cube)DestroyCube();}void SimpleNet::DestroyCube()
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -