📄 ann.cc
字号:
/* YAKS, Optann, a Khepera simulator including a separate GA and ANN (Genetic Algoritm, Artificial Neural Net). Copyright (C) 2000 Johan Carlsson (johanc@ida.his.se) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */#include "ann.h"/** * Get last activation value from all nodes with the priority of pri * @param pri The priority of the nodes to look for * @return A structure with (struct)->nodes and the (struct)->values, or NULL if no nodes where found. */valuesS *Ann::getValuesFromNodesWithPriority(int pri){ int nrOfSuchNodes; int indexB,indexE,indexL; valuesS* list; nrOfSuchNodes=indexB=indexE=indexL=-1; for(int i=0; i < nrOfNeurons && indexB==-1; i++){ if(actOrder[i]->getPriority()==pri && indexB==-1){ indexB=i; for(i=indexB; i < nrOfNeurons && indexE==-1; i++){ if(actOrder[i]->getPriority()!=pri){ indexE=i; } } } } indexL=indexE-indexB; if(indexL>0){ list = (valuesS*) malloc(sizeof(valuesS)); list->nodes=indexL; list->values = (double*)malloc(indexL*sizeof(double)); for(int i=0; i < indexL; i++){ list->values[i]=actOrder[indexB+i]->getActivationValue(); } return list; } else{ return NULL; } }/** * Save all network weights to file. * @see loadWeights() * @param fp A file descriptor pointing to a file open for writing. */void Ann::saveWeights(FILE *fp) const{ for(int i=0; i < nrOfNeurons; i++){ fprintf(fp,"Neuron %d Links %d ",i,actOrder[i]->getNrOfLinks()); for(int w=0; w < actOrder[i]->getNrOfLinks(); w++){ fprintf(fp,"%hd ",(short)actOrder[i]->getLinkWeight(w)); } fprintf(fp,"\n"); }}/** * Load network weights from file. * @see saveWeights() * @param fp A file descriptor pointing to a file open for reading. */void Ann::loadWeights(FILE *fp){ int bogusI=0,bogusL=0; int scanned=0; int8_t weight=0; short weights=0; for(int i=0; i < nrOfNeurons; i++){ scanned=fscanf(fp,"Neuron %d Links %d ",&bogusI,&bogusL); if(bogusL != actOrder[i]->getNrOfLinks() || scanned!=2){ fprintf(stderr,"Ann::loadWeights Wohaaaa something is wrong !!!\n"); exit(-1); } for(int w=0; w < actOrder[i]->getNrOfLinks(); w++){ fscanf(fp,"%hd ",&weights); weight = (int8_t)weights; actOrder[i]->setLinkWeight(w,weight); weight=0; } fscanf(fp,"\n"); }}/** *Print the network architecture with weights and activation values to stdout. */void Ann::printNet() const{ cout << "Total number of neurons " << nrOfNeurons << endl; cout << " - Inputs : " << nrOfInputs << endl; cout << " - Hidden + Bias : "<< nrOfNeurons-nrOfInputs-nrOfOutputs << endl; cout << " - Outputs : " << nrOfOutputs << endl; if(actOrder!=NULL){ for(int i=0; i < nrOfNeurons; i++){ cout << "Neuron " << i << " priority " << actOrder[i]->getPriority() << " links " << actOrder[i]->getNrOfLinks(); for(int z=0; z < actOrder[i]->getNrOfLinks(); z++){ cout << " " << ((double)actOrder[i]->getLinkWeight(z)/12.8); } cout << " type " << actOrder[i]->getNodeType(); cout << " activation " << actOrder[i]->getActivationValue() << endl; } }}/** *Mutate all neurons link weights in the network. *@param bitMutateProbability The chance that a bit in a weight gets flipped/mutated */void Ann::mutateNeurons(int bitMutateProbability){ for(int i=0; i < nrOfNeurons;i++){ neurons[i]->mutateWeights(bitMutateProbability); }}/** *Mutate neurons link weights in the network where the neurons has the priority pri. *@param bitMutateProbability The chance that a bit in a weight gets flipped/mutated *@param pri The priority of the nodes to look for */void Ann::mutateNeuronsWithPri(int bitMutateProbability, int pri){ for(int i=0; i < nrOfNeurons;i++){ neurons[i]->mutateWeightsWithPri(bitMutateProbability, pri); }}/** *Get the number of links belonging to a neuron. *@param index The internal index of the neuron. *@return The number of links or 0 if the neuron does not exist. */int Ann::getNrOfLinks(int index) const{ if(index < 0 || index > nrOfNeurons){ return 0; } else{ return actOrder[index]->getNrOfLinks(); } }/** * Get the number of input neurons in the network. * @return The number of input neurons. */int Ann::getNrOfInputNodes() const{#ifdef ANN_DEBUG cout << "Ann::getNrOfInputNodes returns " << nrOfInputs << endl;#endif return nrOfInputs;}/** *Get the number of neurons in a network. *@return The number of neurons. */int Ann::getNrOfNodes() const{#ifdef ANN_DEBUG cout << "Ann::getNrOfInputNodes returns " << nrOfNeurons << endl;#endif return nrOfNeurons;}/** * Get the activation pointer from a neuron * @param index The internal index of the neuron * @return The pointer to the activation value or NULL if index is out of range. */double *Ann::getActivationPointer(int index){ if(index < 0 || index > nrOfNeurons){ return NULL; } else{ return neurons[index]->activationPointer(); }}/** * Get the number of output neurons in the network. * @return The number of output neurons. */int Ann::getNrOfOutputNodes() const{#ifdef ANN_DEBUG cout << "Ann::getNrOfOutputNodes returns " << nrOfOutputs << endl;#endifreturn nrOfOutputs;}/** * Replace the weights with the weights from another ANN. * @attention The ANN must have the same architecture. * @param from The ANN to copy the weights from. */void Ann::copyWeightsFrom(class Ann *from){ for(int i=0; i < nrOfNeurons; i++){ for(int u=0; u < actOrder[i]->getNrOfLinks(); u++){ actOrder[i]->setLinkWeight(u,from->getLinkWeight(i,u)); } }}/** * Get the weight of a neurons link. * @param neuron The internal index of the neuron * @param link The internal index of the link in the neuron */int8_t Ann::getLinkWeight(int neuron,int link) const{ return(actOrder[neuron]->getLinkWeight(link));}/** * Get a pointer to the weight of a neurons link. * @param neuron The internal index of the neuron * @param link The internal index of the link in the neuron */int8_t *Ann::getLinkWeightPointer(int neuron,int link){ return(actOrder[neuron]->getLinkWeightPointer(link));}/** * Make a exact copy of the ANN * @return A identical copy of the ANN. */class Ann* Ann::duplicate() const{ Ann *copy; long int linkTo,SCNto; int i,l,q,z; /* Calls Ann::Ann(int dontCare) instead of Ann:Ann() */ /* which means that the net doesnt get a bias node by */ /* default. */ copy = new Ann(1); /* Copy all neurons */ for(i=0; i < nrOfNeurons; i++){ copy->addNeuron(neurons[i]->getPriority(), neurons[i]->getNodeType(), neurons[i]->getActFunc(), neurons[i]->getId()); } for(i=0; i < nrOfNeurons; i++){ for(l=0; l < neurons[i]->getNrOfLinks();l++){ linkTo=neurons[i]->getLinkToId(l); for(q=0; q < nrOfNeurons; q++){ if(linkTo==copy->neurons[q]->getId()){ if(neurons[i]->getLinkType(l)==SCN_L){ SCNto=neurons[i]->getLinkSCNid(l); for(z=0; copy->neurons[z]->getId()!=SCNto && z < nrOfNeurons; z++); copy->neurons[i]->createSCNLink(copy->neurons[q],copy->neurons[z]); } else{ copy->neurons[i]->createLink(copy->neurons[q],neurons[i]->getLinkWeight(l),neurons[i]->getLinkType(l)); } break; } } } } return copy;}/** * Compare the ANN with another ANN, prints the result to stdout * @attention This method doesn't do any real valid tests. * @param net The ANN to compare with */void Ann::compareAnn(class Ann *net) const{ int error=0; if(nrOfInputs!=net->getNrOfInputNodes()){ printf("Ann::compareAnn In node count miss match\n"); error++; } if(nrOfOutputs!=net->getNrOfOutputNodes()){ printf("Ann::compareAnn Out node count mismatch\n"); error++; } if(error){ printf("This net look like this\n"); printNet(); printf("Compared net looked like this\n"); net->printNet(); }}/** * Constructor - initiate a ANN and adds a bias node with priority -32768 */Ann::Ann(){ nrOfNeurons=0; nrOfSCNs=0; nrOfInputs=0; nrOfOutputs=0; actOrder=NULL; inNodes=NULL; outNodes=NULL; SCNNodes=NULL; addNeuron(-32768,BIAS,NONE); neurons[0]->setActivationValue(1.0); /* Bias Activationvalue allways 1.0 */}/** * Constructor - initiate a ANN, does not add a bias node. * @param dontCare A dummy variable. */Ann::Ann(int dontCare){ nrOfNeurons=0; nrOfInputs=0; nrOfOutputs=0; nrOfSCNs=0; actOrder=NULL; inNodes=NULL; outNodes=NULL; SCNNodes=NULL;}/** * Deconstructor, cleans up and deallocates all allocated memory. */Ann::~Ann(){ for(int i=0; i < nrOfNeurons; i++){ delete(neurons[i]); } free(neurons); free(actOrder); free(inNodes); free(outNodes); free(SCNNodes);}/** * Adds a unique neuron to the network. * @param iLayerPriority The priority of the new neuron * @param inType The type of new neuron * @param iFuncT The activation function of the new neuron */void Ann::addNeuron(int iLayerPriority, NODE_T inType, ACTIVATION_T iFuncT){ if(nrOfNeurons==0){ neurons = (class Neuron**) malloc(sizeof(class Neuron*)); neurons[0]= new Neuron(iLayerPriority, inType, iFuncT); nrOfNeurons++;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -