⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 netlayer.cpp

📁 人工神经网络的c++实现源码,含实现神经网络层的类,神经网络的类,中文详细注释, 并含有使用示例,学习神经网络的好源码
💻 CPP
字号:
#include "Net.h"#include <stdlib.h>#include <math.h>#include <string.h>using namespace NeuralNetwork;static const float BIAS = 1.0f;//取得均一的[Low, High]范围的随机值static real RandomEqualReal(real Low, real High){    return ((real) rand() / RAND_MAX) * (High-Low) + Low;}      //构造函数NetLayer::NetLayer(const int initUnits,		   NetLayer* initPrevLayer)    : units(initUnits), prevLayer(initPrevLayer){    //给输出值和误差值申请空间    output = new real[units+1];    error  = new real[units+1];    if (prevLayer)    {        //如果前一层存在的话,给权值申请空间	weightsPerUnit = prevLayer->getUnits()+1;	int weightArraySize = (units+1)*weightsPerUnit;	weight     = new real[weightArraySize];	weightSave = new real[weightArraySize];	dWeight    = new real[weightArraySize];    }    else    {	weightsPerUnit = 0;	weight = 0;	weightSave = 0;	dWeight = 0;    }    weightIntermediate = new real[weightsPerUnit+1];    output[0] = BIAS;}//构造函数NetLayer::NetLayer(std::istream& in, NetLayer* initPrevLayer)    : prevLayer(initPrevLayer){    readRaw(in, units);    output = new real[units+1];    error  = new real[units+1];    if (prevLayer)    {	weightsPerUnit = prevLayer->getUnits()+1;	int weightArraySize = (units+1)*weightsPerUnit;	weight     = new real[weightArraySize];	weightSave = new real[weightArraySize];	dWeight    = new real[weightArraySize];	clearWeights();	readRawArray(in, weight, weightArraySize);    }    else    {	weightsPerUnit = 0;	weight = 0;	weightSave = 0;	dWeight = 0;    }    weightIntermediate = new real[weightsPerUnit+1];    output[0] = BIAS;}void NetLayer::save(std::ostream& out){    writeRaw(out, units);    if (prevLayer)    {	writeRawArray(out, weight, (units+1)*weightsPerUnit);    }}//析构函数NetLayer::~NetLayer(){    delete[] output;    delete[] error;    if (weight)    {	delete[] weight;	delete[] weightSave;	delete[] dWeight;    }    delete [] weightIntermediate;}void NetLayer::randomizeWeights(){    for (int i=0; i < (units+1)*weightsPerUnit; i++)    {	weight[i]  = RandomEqualReal(-0.5, 0.5);	dWeight[i] = 0;    }}void NetLayer::clearWeights(){    for (int i=0; i < (units+1)*weightsPerUnit; i++)    {	weight[i]  = 0;	dWeight[i] = 0;    }}void NetLayer::propagate(real gain){    real* currentWeight = &getWeight (1,0);    for (int unitNum=1; unitNum <= units; unitNum++)    {	real sum = 0;	for (int outputNum=0; outputNum < weightsPerUnit; outputNum++) {	    sum += *currentWeight * prevLayer->getOutput(outputNum);	    currentWeight++;	}	output[unitNum] = 1 / (1 + exp(-gain * sum));    }}void NetLayer::backpropagate(real gain){    int prevUnits = prevLayer->getUnits();    for (int prevUnitNum=prevUnits; prevUnitNum != 0; prevUnitNum--) {	real out = prevLayer->getOutput(prevUnitNum);	real err = 0;	for (int unitNum=1; unitNum <= units; unitNum++) {	    err += getWeight(unitNum, prevUnitNum) * error[unitNum];	}	prevLayer->setError(prevUnitNum, gain * out * (1-out) * err);    }}real NetLayer::computeError(real gain, real target[]){    real out, err;    real totalError = 0;       real* currentTarget = target + 0;    real* currentOutput = output + 1;    real* currentError  = error  + 1;    for(int unitNum=units; unitNum != 0; unitNum--)     {	out = *currentOutput;	err = *currentTarget - out;	*currentError = gain * out * (1-out) * err;	totalError += err*err;	currentOutput++;	currentTarget++;	currentError++;    }    return 0.5*totalError;}void NetLayer::adjustWeights(real momentum, real learningRate){    int localWeightsPerUnit = weightsPerUnit;    real* currentWeight  = &getWeight (units,prevLayer->getUnits());    real* currentDWeight = &getDWeight (units,prevLayer->getUnits());        for (int prevUnitNum=0; prevUnitNum < weightsPerUnit; prevUnitNum++) {	weightIntermediate[prevUnitNum+1] = learningRate * prevLayer->getOutput(prevUnitNum);    }    for (int unitNum=units; unitNum != 0; unitNum--) {	for (int prevUnitNum=localWeightsPerUnit; prevUnitNum != 0; prevUnitNum--) {	    real newDeltaWeight = weightIntermediate[prevUnitNum] * error[unitNum];	    real oldDeltaWeight = *currentDWeight;	    *currentDWeight  = newDeltaWeight;	    *currentWeight  += newDeltaWeight + momentum*oldDeltaWeight;	    currentDWeight--;	    currentWeight--;	}    }}void NetLayer::doneTraining(){    delete[] error;    error = 0;    delete[] dWeight;    dWeight = 0;    delete[] weightSave;    weightSave = 0;}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -