📄 annealing.cpp
字号:
/*
nn-utility (Provides neural networking utilities for c++ programmers)
Copyright (C) 2003 Panayiotis Thomakos
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
*/
//To contact the author send an email to panthomakos@users.sourceforge.net
#include <nn-utility.h>
using namespace nn_utility;
//define a floating-point handler for "nn_utility_functions"
nn_utility_functions<float> derived;
int main(){
//create a widrow hoff network layer named "hidden1"
WIDROW_HOFF<float> *hidden1 = new WIDROW_HOFF<float>( );
//define it's size 2x2 and it's elements
hidden1->definef( 2, 2, 2.0, 2.0, -1.0, -1.0 );
//create an initial input vector called "IN"
nn_utility_functions<float>::VECTOR IN;
derived.LoadVectorf( IN, 2, 1.0, 1.0 );
//define "hidden1" as an annealing network (see MANUAL for details)
hidden1->set_annealing( IN, 2, 1.0, 0.8, 5 );
//create a buffer for "hidden1", name the buffer "ppHidden1"
layer<float> *ppHidden1 = hidden1;
//train the network for 100 interations as 0.1 learning rate, and present output
derived.train( &ppHidden1, 100, 0.1, true );
//Present output:
cout << "Simulated Annealing found minima to be ";
derived.PrintVector( ppHidden1->annealing_input, 2 );
cout << "Notice in the output that the network has clearly converged on the\n";
cout << "minima of the network function. The minima should be [ 0 1 ].\n";
//attempts to find value of f(x) : f(x) < than all others by localizing minima
cout << '\n';
return 0;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -