📄 backprop.cpp
字号:
#include "backprop.h"
#include <time.h>
#include <stdlib.h>
#include <string>
#include <sstream>
#include <fstream>
using namespace std;
#define filesize 500
exfile::exfile(double iv,double nx,double ny,double nz,
double lx,double ly,double lz,double ht,
double cpx,double cpy)
{
interval=iv;
nfx=nx;
nfy=ny;
nfz=nz;
losex=lx;
losey=ly;
losez=lz;
height=ht;
compx=cpx;
compy=cpy;
incount=0;
gotit=0;
prepoints=0;
t=new double[filesize];
x=new double[filesize];
y=new double[filesize];
z=new double[filesize];
datax=new double[filesize];
datay=new double[filesize];
dataz=new double[filesize];
traindatax=new double[filesize];
traindatay=new double[filesize];
traindataz=new double[filesize];
predatax=new double[filesize];
predatay=new double[filesize];
predataz=new double[filesize];
wantedx=new double[filesize];
wantedy=new double[filesize];
wantedz=new double[filesize];
}
int exfile::opendata(char *path)
{
infile.open(path);
if(infile.is_open())
return 1;
else
return 0;
}
void exfile::readdata(char *pr)
{
char nouse[20];
char result[filesize];
infile.getline(result,filesize,':');
stream<<result;stream>>nouse;stream.clear();
infile.getline(result,filesize,',');
stream<<result;stream>>t[incount];stream.clear();
infile.getline(result,filesize,',');
stream<<result;stream>>x[incount];stream.clear();
infile.getline(result,filesize,',');
stream<<result;stream>>y[incount];stream.clear();
infile.getline(result,filesize);
stream<<result;stream>>z[incount];stream.clear();
if(incount>=1)
{
datax[incount-1]=(x[incount]-x[incount-1])/nfx;
datay[incount-1]=(y[incount]-y[incount-1])/nfy;
dataz[incount-1]=(z[incount]-z[incount-1])/nfz;
}
incount++;
if(incount>=5)
{
exfile::maketd_pd(incount);
}
}
void exfile::maketd_pd(int inc)
{
for(int j=0;j<=3;j++)
{
traindatax[j]=datax[inc-5+j];
traindatay[j]=datay[inc-5+j];
traindataz[j]=dataz[inc-5+j];
if(j>=1)
{
predatax[j-1]=traindatax[j];
predatay[j-1]=traindatay[j];
predataz[j-1]=traindataz[j];
}
}
}
void exfile::writedata(char *pw)
{
outfile.open(pw);
if(outfile.is_open())
{
outfile<<"x "<<"y "<<"z "<<endl;
for(int j=0;j<prepoints;j++)
{cout<<wantedx[j]<<" "<<wantedy[j]<<" "<<wantedz[j]<<endl;
outfile<<wantedx[j]<<" "<<wantedy[j]<<" "<<wantedz[j]<<endl;
}
outfile.close();
}
else
cout<<"outfile open error"<<endl;
}
void exfile::collide(int tp)
{
double vx,vy,vz,dt1,dt2;
vx=(x[tp-1]-x[tp-2])/interval;
vy=(y[tp-1]-y[tp-2])/interval;
vz=(z[tp-1]-z[tp-2])/interval;
dt1=z[tp-1]/fabs(vz);
z[tp]=0.0;
x[tp]=x[tp-1]+vx*dt1;
y[tp]=y[tp-1]+vy*dt1;
vx=vx*losex;
vy=vy*losey;
vz=vz*losez;
wantedz[prepoints]=height;
dt2=wantedz[prepoints]/fabs(vz);
wantedx[prepoints]=x[tp]+vx*dt2+compx;
wantedy[prepoints]=y[tp]+vy*dt2+compy;
}
void exfile::update(double nextdatax,double nextdatay,double nextdataz,int tp)
{
traindatax[0]=traindatax[1];traindatax[1]=traindatax[2];traindatax[2]=traindatax[3];
traindatax[3]=nextdatax;
predatax[0]=traindatax[1];predatax[1]=traindatax[2];predatax[2]=traindatax[3];
traindatay[0]=traindatay[1];traindatay[1]=traindatay[2];traindatay[2]=traindatay[3];
traindatay[3]=nextdatay;
predatay[0]=traindatay[1];predatay[1]=traindatay[2];predatay[2]=traindatay[3];
traindataz[0]=traindataz[1];traindataz[1]=traindataz[2];traindataz[2]=traindataz[3];
traindataz[3]=nextdataz;
predataz[0]=traindataz[1];predataz[1]=traindataz[2];predataz[2]=traindataz[3];
x[tp]=x[tp-1]+nfx*nextdatax;
y[tp]=y[tp-1]+nfy*nextdatay;
z[tp]=z[tp-1]+nfz*nextdataz;
if(z[tp]<=0)
{
collide(tp);
prepoints++;
gotit=1;
}
}
exfile::~exfile()
{
delete [] t;
delete [] x;
delete [] y;
delete [] z;
delete [] datax;
delete [] datay;
delete [] dataz;
delete [] predatax;
delete [] predatay;
delete [] predataz;
delete [] traindatax;
delete [] traindatay;
delete [] traindataz;
delete [] wantedx;
delete [] wantedy;
delete [] wantedz;
}
// initializes and allocates memory on heap
CBackProp::CBackProp(int nl,int *sz,double b,double a):beta(b),alpha(a)
{
// set no of layers and their sizes
numl=nl;
lsize=new int[numl];
for(int i=0;i<numl;i++){
lsize[i]=sz[i];
}
// allocate memory for output of each neuron
out = new double*[numl];
for( i=0;i<numl;i++){
out[i]=new double[lsize[i]];
}
// allocate memory for delta
delta = new double*[numl];
for(i=1;i<numl;i++){
delta[i]=new double[lsize[i]];
}
// allocate memory for weights
weight = new double**[numl];
for(i=1;i<numl;i++){
weight[i]=new double*[lsize[i]];
}
for(i=1;i<numl;i++){
for(int j=0;j<lsize[i];j++){
weight[i][j]=new double[lsize[i-1]+1];
}
}
// allocate memory for previous weights
prevDwt = new double**[numl];
for(i=1;i<numl;i++){
prevDwt[i]=new double*[lsize[i]];
}
for(i=1;i<numl;i++){
for(int j=0;j<lsize[i];j++){
prevDwt[i][j]=new double[lsize[i-1]+1];
}
}
// seed and assign random weights
srand((unsigned)(time(NULL)));
for(i=1;i<numl;i++)
for(int j=0;j<lsize[i];j++)
for(int k=0;k<lsize[i-1]+1;k++)
weight[i][j][k]=(double)(rand())/(RAND_MAX/2) - 1;//32767
// initialize previous weights to 0 for first iteration
for(i=1;i<numl;i++)
for(int j=0;j<lsize[i];j++)
for(int k=0;k<lsize[i-1]+1;k++)
prevDwt[i][j][k]=(double)0.0;
// Note that the following variables are unused,
//
// delta[0]
// weight[0]
// prevDwt[0]
// I did this intentionaly to maintains consistancy in numbering the layers.
// Since for a net having n layers, input layer is refered to as 0th layer,
// first hidden layer as 1st layer and the nth layer as output layer. And
// first (0th) layer just stores the inputs hence there is no delta or weigth
// values corresponding to it.
}
CBackProp::~CBackProp()
{
// free out
for(int i=0;i<numl;i++)
delete[] out[i];
delete[] out;
// free delta
for(i=1;i<numl;i++)
delete[] delta[i];
delete[] delta;
// free weight
for(i=1;i<numl;i++)
for(int j=0;j<lsize[i];j++)
delete[] weight[i][j];
for(i=1;i<numl;i++)
delete[] weight[i];
delete[] weight;
// free prevDwt
for(i=1;i<numl;i++)
for(int j=0;j<lsize[i];j++)
delete[] prevDwt[i][j];
for(i=1;i<numl;i++)
delete[] prevDwt[i];
delete[] prevDwt;
// free layer info
delete[] lsize;
}
// sigmoid function
double CBackProp::sigmoid(double in)
{
return (double)((1-exp(-in))/(1+exp(-in))); // in 最大为709,当in=710时溢出
}
//如果使用非对称Sigmoid函数1/(1+exp(-in)),y无法执行;
//但使用此对称Sigmoid函数(1-exp(-in))/(1+exp(-in)),x和z的Thresh值有限制
// mean square error
double CBackProp::mse(double *tgt) const
{
double mse=0;
for(int i=0;i<lsize[numl-1];i++){
mse+=(tgt[i]-out[numl-1][i])*(tgt[i]-out[numl-1][i]);
}
return mse/2;
}
// returns i'th output of the net
double CBackProp::Out(int i) const
{
return out[numl-1][i];
}
// feed forward one set of input
void CBackProp::ffwd(double *in)
{
double sum;
// assign content to input layer
for(int i=0;i<lsize[0];i++)
out[0][i]=in[i]; // output_from_neuron(i,j) Jth neuron in Ith Layer
// assign output(activation) value
// to each neuron usng sigmoid func
for(i=1;i<numl;i++){ // For each layer
for(int j=0;j<lsize[i];j++){ // For each neuron in current layer
sum=0.0;
for(int k=0;k<lsize[i-1];k++){ // For input from each neuron in preceeding layer
sum+= out[i-1][k]*weight[i][j][k]; // Apply weight to inputs and add to sum
}
sum+=weight[i][j][lsize[i-1]]; // Apply bias
out[i][j]=sigmoid(sum); // Apply sigmoid function
}
}
}
// backpropogate errors from output
// layer uptill the first hidden layer
void CBackProp::bpgt(double *in,double *tgt)
{
double sum;
// update output values for each neuron
ffwd(in);
// find delta for output layer
for(int i=0;i<lsize[numl-1];i++){
delta[numl-1][i]=0.5*(1-out[numl-1][i]*out[numl-1][i])*(tgt[i]-out[numl-1][i]);
}
// find delta for hidden layers
for(i=numl-2;i>0;i--){
for(int j=0;j<lsize[i];j++){
sum=0.0;
for(int k=0;k<lsize[i+1];k++){
sum+=delta[i+1][k]*weight[i+1][k][j];
}
delta[i][j]=0.5*(1-out[i][j]*out[i][j])*sum;
}
}
// apply momentum ( does nothing if alpha=0 )
for(i=1;i<numl;i++){
for(int j=0;j<lsize[i];j++){
for(int k=0;k<lsize[i-1];k++){
weight[i][j][k]+=alpha*prevDwt[i][j][k];
}
weight[i][j][lsize[i-1]]+=alpha*prevDwt[i][j][lsize[i-1]];
}
}
// adjust weights usng steepest descent
for(i=1;i<numl;i++){
for(int j=0;j<lsize[i];j++){
for(int k=0;k<lsize[i-1];k++){
prevDwt[i][j][k]=beta*delta[i][j]*out[i-1][k];
weight[i][j][k]+=prevDwt[i][j][k];
}
prevDwt[i][j][lsize[i-1]]=beta*delta[i][j];
weight[i][j][lsize[i-1]]+=prevDwt[i][j][lsize[i-1]];
}
}
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -