📄 buildlayers.cpp
字号:
/*************************************************************************** buildlayers.cpp - description ------------------- copyright : (C) 2001 by Matt Grover email : mpgrover@sourceforge.net ***************************************************************************//*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * ***************************************************************************/#ifdef HAVE_CONFIG_H#include <config.h>#endif#include <iostream.h>#include <stdlib.h>#include <string.h>#include <string>#include <amygdala/types.h>#include <amygdala/network.h>#include <amygdala/layer.h>#include <amygdala/basicneuron.h>#include <amygdala/alphaneuron.h>#include <amygdala/fastneuron.h>#include <amygdala/netloader.h>#include <amygdala/simplespikeinput.h>Network* BuildNet();string dataDir;int main(int argc, char *argv[]){ dataDir = "/../../data"; // Tell libxerces where it will find the root data directory //AmEntityResolver::SetDataDir(dataDir.c_str()); //dataDir = dataDir + "/data"; cout << "Building the network... \n"; Network* testNet = BuildNet();// Uncomment to run the network with some input //Neuron::CaptureOutput( OUTPUT_LAYERS ); Neuron::CaptureOutput( OFF ); SimpleSpikeInput spikeIn(testNet); string inputFile = dataDir + "/buildlayers_input.xml"; if (spikeIn.ReadSpikeDef(inputFile.c_str())) { cout << "Results for buildlayers network:\n"; testNet->Run(350000); } else { cout << "Could not load input!\n"; }// Uncomment to enable saving// cout << "Saving the network... \n";// NetLoader* netSave = new NetLoader(testNet);// netSave->SaveXML("BuildLayerSave.xml");// delete netSave; delete testNet; cout << "Exiting." << endl; return EXIT_SUCCESS;}Network* BuildNet(){ // Build network with 10 input neurons, two hidden layers with // 200 neurons each, and an output layer with 10 neurons. // TODO: This must be modified to match the above description const unsigned int INPUTSIZE = 10; const unsigned int HIDDENSIZE = 200; const unsigned int OUTPUTSIZE = 10; unsigned int startId = 1; LayerConstants layerConst; Network* testNet = new Network(); Layer* inputLayer; Layer* hiddenLayer1; Layer* hiddenLayer2; Layer* outputLayer; GaussConnectType conParms; NetLoader* netLoad = new NetLoader(testNet); testNet->SetTrainingMode(false); // Set up the constants struct layerConst.type = INPUTLAYER; layerConst.layerId = 1; layerConst.learningConst = 1e-3; layerConst.membraneTimeConst = 10.0; layerConst.synapticTimeConst = 2.0; layerConst.restPtnl = 0.0; layerConst.thresholdPtnl = 5.0; // Build the layers with NetworkLoader. Layers are automatically // added to testNet inputLayer = netLoad->BuildLayer(INPUTSIZE, startId, layerConst, "FastNeuron"); inputLayer->LayerName("Input Layer"); layerConst.type = HIDDENLAYER; layerConst.layerId = 2; startId = testNet->MaxNeuronId() + 1; hiddenLayer1 = netLoad->BuildLayer(HIDDENSIZE, startId, layerConst, "FastNeuron"); hiddenLayer1->LayerName("Hidden Layer 1"); layerConst.type = HIDDENLAYER; layerConst.layerId = 3; startId = testNet->MaxNeuronId() + 1; hiddenLayer2 = netLoad->BuildLayer(HIDDENSIZE, startId, layerConst, "FastNeuron"); hiddenLayer2->LayerName("Hidden Layer 2"); layerConst.type = OUTPUTLAYER; layerConst.layerId = 4; startId = testNet->MaxNeuronId() + 1; outputLayer = netLoad->BuildLayer(OUTPUTSIZE, startId, layerConst, "FastNeuron"); outputLayer->LayerName("Output Layer"); // set up the connection parameters conParms.meanWeight = 0.2; // Mean weight is 20% of maximum conParms.stdDev = 0.15; conParms.pctConnect = 100.0; // Fully connected // Connect the input layer to the first hidden layer with // no inhibitory connections inputLayer->ConnectLayers(hiddenLayer1, conParms); // Connect the first hidden layer to the second hidden layer. // Use a connection rate of 60% with 30% inhibitory. conParms.pctConnect = 60.0; hiddenLayer1->ConnectLayers(hiddenLayer2, conParms, 30.0); // Connect hiddenLayer2 to the output layer. // Connection rate is 60% and 30% are inhibitory. hiddenLayer2->ConnectLayers(outputLayer, conParms, 30.0); delete netLoad; return testNet;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -