📄 silly.cpp
字号:
//
// silly.cpp
// Neural Network Utility example
// One can see the learning process in detail (under/overlearning)
//
// Created by quantuumdot on Sep 2003
// Copyright (c) 2003 Tomasz Mloduchowski
//
/*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
// To contact author of example email quantuumdot (at) users.sourceforge.net
#include <nn-utility.h>
#include <unistd.h>
#include <fstream>
#include <stdio.h>
using std::ofstream;
using std::string;
#define ON_LINUX //If not linux platform, comment it
NEURAL NET;
#define NTRAINDIFF 20 //How many different training points
#define NCHECKDIFF 50 //How many different checking points
#define TRSESSLEN 100 //How long is single training session
#define NTRSESS 250 //How many training sessions perform
float function( VECTOR input, VECTOR weight, float bias, int length, bool output ){
return NET.sigmoid( input, weight, bias, length );
} //Sigmoidal function, see MANUAL for details
void GetInput( int iteration, VECTOR &send, VECTOR &target){
LoadVector (send, 2, (((float)(rand()%25))/25.0), ((float)(iteration%(NTRAINDIFF/2)))/NTRAINDIFF);
LoadVector (target, 1 , ((float)(iteration%(NTRAINDIFF/2))/NTRAINDIFF));
} //Net will be trained using two columns of data, of which it should igno re first of them.
bool CheckTrain( VECTOR output, VECTOR target, int length ){
return true;
} //Always train network.
void UpdateLayer( VECTOR target, VECTOR &new_target, int length, float learning_rate, VECTOR &biases, VECTOR input, MATRIX &weight, VECTOR result, int rows, int cols, bool output ){
NET.sigmoid( target, new_target, length, learning_rate, biases, input, weight, result, rows, cols, output );
} //Sigmoidal backpropagation, see MANUAL for details.
#ifdef ON_LINUX
static inline unsigned long long int rdtsc( void )
{
unsigned long long int retval;
__asm __volatile ("rdtsc":"=A"(retval)::"memory");
return retval;
} //Return number of cycles taken by CPU since reset signal received. (Normally since start of machine).
#define TIMING_OFF ctime+=rdtsc(); ctime-=calibration; //Stops the time measurement (used for io streams, on which results are not correlated with speed of library
#define TIMING_ON ctime-=rdtsc(); //Starts it back
#define TIMING_PREP long long int ctime=0, calibration=0;\
cout << "Calibrating time measurement\n";\
sleep(3);\
calibration -= rdtsc();\
calibration += rdtsc();\
cout << "Ok, calibration is " << calibration << " cycles\n"; //Prepare the timing routines, calibrate the asm functions. Sleep is needed to get kernel some time to clean data after process started.
#define TIMING_REP cout << "Time used :" << ctime << " cycles\n";
//Report the timing.
#endif
#ifndef ON_LINUX
#define TIMING_OFF
#define TIMING_ON
#define TIMING_PREP
#define TIMING_REP
//All empty, as far as this is not linux.
#endif
int main(){
TIMING_PREP
TIMING_ON
layer *hidden1 = new layer( 2,2 );
layer *hidden2 = new layer( 2,2 );
layer *output = new layer( 2,1 );
VECTOR input, FINAL, target;
int i,j;
ofstream grif,grid;
char str[1024];
//Variables creation
Insert (&hidden1, &hidden2);
Insert (&hidden2, &output);
hidden1->Set( -2.0, 3.0, -2.0, 3.0);
hidden2->Set( -2.0, 2.0, -4.0, 2.0);
output->Set ( 3.0, 1.0);
LoadVector (hidden1->weight, 2, 2.0, 2.0);
LoadVector (hidden2->weight, 2, 3.0, -2.0);
LoadVector (output->weight, 1, -2.0);
//Initial values for network.
for (i=0; i<20; i++){
LoadVector (input, 2, 1, (((float)i)/20.0));
hidden1->FeedForward (input, FINAL);
TIMING_OFF
PrintVector(input,2);
PrintVector(FINAL,1);
TIMING_ON
} //Check the behaviour of network at time of creation for different input values.
for (j=0; j<NTRSESS; j++){
train (&hidden1, &output, TRSESSLEN, 0.7, false); //One training step, to see how network is being trained.
sprintf(str,"silly.%.3d.gri",j);
grif.open(str,ios::out);
sprintf(str,"silly.%.3d.dta",j);
grid.open(str,ios::out);
grif << "open" << " \"" << str << "\"\n";
grif << "read columns x y\n";
grif << "set x axis 0 1 0.1\n";
grif << "set y axis -1 1 0.1\n";
grif << "draw symbol bullet\n";
grif << "\\function = \"0\"\n";
grif << "create columns from function\n";
grif << "draw curve\n";
grif << "set font size 16\n";
grif << "draw title \"" << j*TRSESSLEN << " iterations\"\n";
grif.close();
for (i=0; i<NCHECKDIFF; i++){
LoadVector (input, 2, 1, (((float)i)/NCHECKDIFF));
hidden1->FeedForward (input, FINAL);
TIMING_OFF
cout << "LEARNING STEP: " << j*TRSESSLEN << "\n";
PrintVector(input,2);
PrintVector(FINAL,1);
grid << input[1] << " " << input[1]-FINAL[0] << "\n";
TIMING_ON
} //Report how network managed those data. We have here some interpolation between learning points, and extrapolation far after the learning points to see if network is doing the same at totally nontrained data.
grid.close();
/*
* Here comes some explanation:
* Network is trained on such numbers: rand(0,1) 0.05; rand(0,1) 0.10; etc. to rnd(0,1) 0.45
* It is checked by 1 0.00; 1 0.02 etc to 1 1
* As we can see at begining we have random answers, after 20*100 learning steps network manages to do almost what we need, and it can be extrapolated to 0.64 with good characteristics.
* When learning continues, the behavior of net in trained range does not change, but it extrapolates worse. This bad feature of network is called OverLearning. Network keeps well on training data, but doesn't ,,catch'' the idea
*
*
*/
}
TIMING_OFF
TIMING_REP
//And final stage, report the efficiency of new library version (to check when we gained some speed).
return 0;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -