⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 simulate.h

📁 一个人工神经网络的程序。 文档等说明参见http://aureservoir.sourceforge.net/
💻 H
📖 第 1 页 / 共 2 页
字号:
/***************************************************************************//*! *  \file   simulate.h * *  \brief  simulation algorithms for Echo State Networks * *  \author Georg Holzmann, grh _at_ mur _dot_ at *  \date   Sept 2007 * *   ::::_aureservoir_:::: *   C++ library for analog reservoir computing neural networks * *   This library is free software; you can redistribute it and/or *   modify it under the terms of the GNU Lesser General Public *   License as published by the Free Software Foundation; either *   version 2.1 of the License, or (at your option) any later version. * ***************************************************************************/#ifndef AURESERVOIR_SIMULATE_H__#define AURESERVOIR_SIMULATE_H__#include "utilities.h"#include "filter.h"#include "delaysum.h"#include <vector>namespace aureservoir{/*! * \enum SimAlgorithm * * all possible simulation algorithms * \sa class SimStd */enum SimAlgorithm{  SIM_STD,       //!< standard simulation \sa class SimStd  SIM_SQUARE,    //!< additional squared state updates \sa class SimSquare  SIM_LI,        //!< simulation with leaky integrator neurons \sa class SimLI  SIM_BP,        //!< simulation with bandpass neurons \sa class SimBP  SIM_FILTER,    //!< simulation with IIR-Filter neurons \sa class SimFilter  SIM_FILTER2,   //!< IIR-Filter before nonlinearity \sa class SimFilter2  SIM_FILTER_DS  //!< with Delay&Sum Readout \sa class SimFilterDS};template <typename T> class ESN;/*! * \class SimBase * * \brief abstract base class for simulation algorithms * * This class is an abstract base class for all different kinds of * simulation algorithms. * The idea behind this system is that the algorithms can be exchanged * at runtime (strategy pattern). * \note I did quite some research and benchmarks, if a strategy design *       pattern should be used here (due to the virtual function overhead). *       In the end the virtual function call really did not matter, *       because there is quite much computation inside these methods. * * Simply derive from this class if you want to add a new algorithm. */template <typename T>class SimBase{ public:  /// Constructor  SimBase(ESN<T> *esn);  /// Destructor  virtual ~SimBase() {}  /// Clones the simulation algorithm with all its internal data  /// (virtual constructor idiom)  virtual SimBase<T> *clone(ESN<T> *esn) const = 0;  /*!   * simulation algorithm   *   * @param in matrix of input values (inputs x timesteps)   * @param out matrix for output values (outputs x timesteps)   */  virtual void simulate(const typename ESN<T>::DEMatrix &in,                        typename ESN<T>::DEMatrix &out) = 0;  /// reallocates data buffers  virtual void reallocate();  //! @name additional interface for filter neurons and delay&sum readout  //@{  virtual void setBPCutoffConst(T f1, T f2) throw(AUExcept);  virtual void setBPCutoff(const typename ESN<T>::DEVector &f1,                           const typename ESN<T>::DEVector &f2)                           throw(AUExcept);  virtual void setIIRCoeff(const typename DEMatrix<T>::Type &B,                           const typename DEMatrix<T>::Type &A,                           int series = 1) throw(AUExcept);  virtual void initDelayLine(int index, const typename DEVector<T>::Type &initbuf)                             throw(AUExcept);  virtual typename DEMatrix<T>::Type getDelays() throw(AUExcept);  virtual typename DEVector<T>::Type &getDelayBuffer(int output, int nr)    throw(AUExcept);  //@}  /// output from last simulation  typename ESN<T>::DEMatrix last_out_;  /// temporary object needed for algorithm calculation  typename ESN<T>::DEVector t_; protected:  /// reference to the data of the network  ESN<T> *esn_;};/*! * \class SimStd * * \brief standard simulation algorithm as in Jaeger's initial paper * * simulates an ESN with on activation function in the reservoir * and one activation function for the readout neurons, as described in * Jaeger's "Tutorial on training recurrent neural networks" * \sa http://www.faculty.iu-bremen.de/hjaeger/pubs/ESNTutorial.pdf * * \example "slow_sine.py" * \example "narma10.py" */template <typename T>class SimStd : public SimBase<T>{  using SimBase<T>::esn_;  using SimBase<T>::last_out_;  using SimBase<T>::t_; public:  SimStd(ESN<T> *esn) : SimBase<T>(esn) {}  virtual ~SimStd() {}  /// virtual constructor idiom  virtual SimStd<T> *clone(ESN<T> *esn) const  {    SimStd<T> *new_obj = new SimStd<T>(esn);    new_obj->t_ = t_; new_obj->last_out_ = last_out_;    return new_obj;  }  /// implementation of the algorithm  /// \sa class SimBase::simulate  virtual void simulate(const typename ESN<T>::DEMatrix &in,                        typename ESN<T>::DEMatrix &out);};/*! * \class SimLI * * \brief algorithm with leaky integrator neurons * * ESN with leaky integrator reservoir units, which are usefull for * learning slow dynamical systems (amongst others). * \attention For stability reasons the spectral radius should be *            not bigger than the leaking rate! *            If leaking rate = spectral radius the resulting system *            will have unit spectral radius. * * This implementation is done according to: * \sa Optimization and applications of echo state networks with leaky *     integrator neurons. Neural Networks, 20(3) */template <typename T>class SimLI : public SimBase<T>{  using SimBase<T>::esn_;  using SimBase<T>::last_out_;  using SimBase<T>::t_; public:  SimLI(ESN<T> *esn) : SimBase<T>(esn) {}  virtual ~SimLI() {}  /// virtual constructor idiom  virtual SimLI<T> *clone(ESN<T> *esn) const  {    SimLI<T> *new_obj = new SimLI<T>(esn);    new_obj->t_ = t_; new_obj->last_out_ = last_out_;    return new_obj;  }  /// implementation of the algorithm  /// \sa class SimBase::simulate  virtual void simulate(const typename ESN<T>::DEMatrix &in,                        typename ESN<T>::DEMatrix &out);};/*! * \class SimBP * * \brief algorithm with bandpass style neurons as in Wustlich and Siewert * * It can be shown that leaky integrator neurons perform somehow a lowpass * filtering on the reservoir states. * This is an extension of this interpration, using a lowpass and highpass * filter to get a bandpass style neuron. One reservoir can have neurons * with different cutoff frequencies to get richer activations on * different timescales. * \sa class SimLI * * The new activations can be calculated like this: * ema1  = ema1 + f1 * (activation - ema1); * ema2  = ema2 + f2 * (ema1 - ema2); * new_activation = ema1 - ema2; * * ema = exponential moving average filter, corresponds to a LOP * f1 = lowpass cutoff frequency * f2 = highpass cutoff frequency * 0 \< f2 \< f1 \< 1 * f1=1 -> no LOP filtering, f2=0 -> no highpass filtering * f1=1 and f2=0 -> standard ESN * f2=0 -> leaky integrator ESN * * Finally the output needs to be scale with * new_activation = new_activation * (1 + f2/f1) * to keep the current spectral radius. * * \example "slow_sine.py" */template <typename T>class SimBP : public SimBase<T>{  using SimBase<T>::esn_;  using SimBase<T>::last_out_;  using SimBase<T>::t_; public:  SimBP(ESN<T> *esn) : SimBase<T>(esn) {}  virtual ~SimBP() {}  /// virtual constructor idiom  virtual SimBP<T> *clone(ESN<T> *esn) const  {    SimBP<T> *new_obj = new SimBP<T>(esn);    new_obj->t_ = t_; new_obj->last_out_ = last_out_;    new_obj->filter_ = filter_;    return new_obj;  }  /// needed for bandpass style neurons  virtual void setBPCutoffConst(T f1, T f2) throw(AUExcept);  /// set all LOP and HIP cutoff frequencies  virtual void setBPCutoff(const typename ESN<T>::DEVector &f1,                           const typename ESN<T>::DEVector &f2)                           throw(AUExcept);  /// implementation of the algorithm  /// \sa class SimBase::simulate  virtual void simulate(const typename ESN<T>::DEMatrix &in,

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -