📄 cmlp.cpp
字号:
const double * const pdInputs
)
{
unsigned long i,j;
//Declare storage for the activities of the hidden and output neurons
double *pdah=new double[ulNumberOfHiddenNodes];
double *pdao=new double[ulNumberOfOutputs];
//Declare storage for the amount of stimulation coming onto a neuron
double dStimulus;
//Calculate the activity of the network's hidden neurons.
for(i=0;i<ulNumberOfHiddenNodes;i++)
{
//Each hidden neuron receives internal stimulation:
dStimulus=ppdwih[i][0];
//And stimulation from input neurons (the activites of which are just the inputs to the
//network, pdInputs) via the weights connecting it to each input (ppdwih). Remember
//that pdInputs contains scaled versions of x-displacement, y-displacement and wind
//speed.
for(j=1;j<ulNumberOfInputs+1;j++)
{
dStimulus+=ppdwih[i][j]*pdInputs[j-1];
}
//The stimulation that a hidden neuron receives is translated into its level of activity
//by an "activation function":
pdah[i]=1.0/(1.0+exp(-dStimulus));
//The logistic function (used in the line above) is by far the most common, though almost
//any function can be used. In fact, each hidden neuron can use a different function (though
//such a network can strictly no longer be considered neural). Of course, the weights learnt during
//training are specific to the types of activation function used, so weights learnt
//by a network with logistic activation functions won't work well in one with sine functions,
//for example
}
//The activity of the output neuron of the network is computed in
//essentially the same way as the activities of the hidden neurons, except that the
//output receives stimulation from the hidden neurons (pdah) via the hidden to output
//layer weights (ppdwho). Note that a network may have several outputs but this application
//requires only one - to represent the angle of the AI tank's barrel.
for(i=0;i<ulNumberOfOutputs;i++)
{
//Account for the neuron's internal stimulation.
dStimulus=ppdwho[i][0];
//And that coming from the hidden neurons
for(j=1;j<ulNumberOfHiddenNodes+1;j++)
{
dStimulus+=ppdwho[i][j]*pdah[j-1];
}
//Translate this stimulation into the activity of the output neuron using the activation
//function:
pdao[i]=dStimulus;
//In this case, the activation function of the output neuron is just the identity:
//the output neuron's activity equals the amount of stimulation it receives. This
//is the function normally used when a network is estimating a continuous variable
//(like the angle of a tank's barrel). As with hidden neurons, other activation
//functions can be used, though care should be taken to make sure that it is
//possible for the network output to reach the desired value. e.g. the angle of
//the AI tank's barrel that hits the player is always negative, so there'd be no
//point in using the logistic activation function for the network outputs (because
//the output of the logistic function is always positive).
}
//Deallocate the temporary storage that was used for the hidden neuron activities
delete []pdah;
//Remember that we're returning a pointer to "new"ed storage, so the calling function
//must deallocate it to avoid memory leaks.
return pdao;
}
double CMLP::dGetPerformance(void)
{
//dBestError is computed in dTrainingStep and indicates the current performance of the
//network on the exemplar data. The function below need only be called from outside the class
//if the network's performance on some other data needs to be measured.
return dBestError;
}
double CMLP::dGetPerformance(
const unsigned long ulNumberOfPatterns,
double ** const ppdInputs,
double ** const ppdTargets
)
{
double dError=0.0;
double *pdOutputs;
unsigned long i,j;
//Go through each pattern (example) in the data set and
for(i=0;i<ulNumberOfPatterns;i++)
{
//Compute the output (estimated barrel angle) produced by the network in response
//to its inputs (x-displacement, y-displacement and wind speed)
pdOutputs=pdGetOutputs(ppdInputs[i]);
for(j=0;j<ulNumberOfOutputs;j++)
{
//Compute the squared error between the output produced by the network
//(the barrel angle it estimated as being correct) and the target output
//contained in the sample (the barrel angle that actually scored the hit)
dError+=0.5*pow(fabs(ppdTargets[i][j]-pdOutputs[j]),2.0);
//Again, multiple outputs are supported, but only one is used in this
//application. Occasionally, different error measures are employed. Using a number
//greater than 2.0 in the above equation tends to result in much less variation
//in the performance of the network accross samples in the data set, but slows
//training down, whereas values less than 2.0 speed training up but allow the
//network to show greater variation in performance accross the data set - almost
//ignoring some of the more difficult to learn examples. A value of 2.0 should be
//sufficient for virtually all applications. Note that if a value other than 2.0
//is used, the termination error specified in TanksDoc must be revised accordingly.
}
//Deallocate the memory used to store the network outputs.
delete []pdOutputs;
}
//Divide the error by the number of patterns to give the average error per sample - makes
//the error measure independent of the number of samples and hence a little more
//interpretable.
dError/=double(ulNumberOfPatterns);
//Return the computed error
return dError;
}
int CMLP::Save(const char * const pFileName)
{
unsigned long i,j;
assert(pFileName);
//Create the output stream to save the network to.
ofstream *pOut=new ofstream(pFileName);
//Make sure it was created and opened successfully.
if(!pOut)
{
assert(false);
return 0;
}
if(!pOut->is_open())
{
assert(false);
delete pOut;
return 0;
}
//Make sure we don't lose information.
pOut->precision(15);
//Save all the network info:
//Its structure...
*pOut<<ulNumberOfInputs;
*pOut<<"\n";
*pOut<<ulNumberOfHiddenNodes;
*pOut<<"\n";
*pOut<<ulNumberOfOutputs;
*pOut<<"\n";
//Its weights
for(i=0;i<ulNumberOfHiddenNodes;i++)
{
for(j=0;j<ulNumberOfInputs+1;j++)
{
*pOut<<ppdwih[i][j];
*pOut<<"\t";
}
*pOut<<"\n";
}
for(i=0;i<ulNumberOfOutputs;i++)
{
for(j=0;j<ulNumberOfHiddenNodes+1;j++)
{
*pOut<<ppdwho[i][j];
*pOut<<"\t";
}
*pOut<<"\n";
}
//When training started...
*pOut<<"Training started\t";
*pOut<<pTrainingStartTime;
*pOut<<"\t";
*pOut<<pTrainingStartDate;
*pOut<<"\n";
//the current date and time...
char *pTime=new char[256];
*pOut<<"Current time\t\t";
_strtime(pTime);
*pOut<<pTime;
*pOut<<"\t";
_strdate(pTime);
*pOut<<pTime;
*pOut<<"\n";
delete []pTime;
//And how well the network currently performs.
*pOut<<"Performance\t\t";
*pOut<<dBestError;
*pOut<<"\n";
//Close the file and delete the stream.
pOut->close();
delete pOut;
//Return that the save was successful.
return 1;
}
int CMLP::Load(const char * const pFileName)
{
unsigned long i,j;
assert(pFileName);
//Create a stream to load the network from
ifstream *pIn=new ifstream(pFileName,ios::nocreate);
//Check to make sure that it was created and could be opened.
if(!pIn)
{
assert(false);
return 0;
}
if(!pIn->is_open())
{
assert(false);
delete pIn;
return 0;
}
//Since we're about to load a new network, we should delete the storage used by the
//current one to prevent memory leaks.
DeallocateMemory();
//Load the structure of the new network
*pIn>>ulNumberOfInputs;
*pIn>>ulNumberOfHiddenNodes;
*pIn>>ulNumberOfOutputs;
//Allocate memory to store its weights
AllocateMemory();
//Reset its status so that it can be trained if necessary
Reset();
//Load in its weights
for(i=0;i<ulNumberOfHiddenNodes;i++)
{
for(j=0;j<ulNumberOfInputs+1;j++)
{
*pIn>>ppdwih[i][j];
ppdBestwih[i][j]=ppdwih[i][j];
}
}
for(i=0;i<ulNumberOfOutputs;i++)
{
for(j=0;j<ulNumberOfHiddenNodes+1;j++)
{
*pIn>>ppdwho[i][j];
ppdBestwho[i][j]=ppdwho[i][j];
}
}
//Close and delete the stream.
pIn->close();
delete pIn;
//Indicate that we've been successful.
return 1;
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -