📄 bpn.c
字号:
void RandomWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = RandomEqualREAL(-0.5, 0.5);
}
}
}
}
void SetInput(NET* Net, REAL* Input)
{
INT i;
for (i=1; i<=Net->InputLayer->Units; i++) {
Net->InputLayer->Output[i] = Input[i-1];
}
}
void GetOutput(NET* Net, REAL* Output)
{
INT i;
for (i=1; i<=Net->OutputLayer->Units; i++) {
Output[i-1] = Net->OutputLayer->Output[i];
}
}
/******************************************************************************
S U P P O R T F O R S T O P P E D T R A I N I N G
******************************************************************************/
void SaveWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->WeightSave[i][j] = Net->Layer[l]->Weight[i][j];
}
}
}
}
void RestoreWeights(NET* Net)
{
INT l,i,j;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Net->Layer[l]->Weight[i][j] = Net->Layer[l]->WeightSave[i][j];
}
}
}
}
/******************************************************************************
P R O P A G A T I N G S I G N A L S
******************************************************************************/
void PropagateLayer(NET* Net, LAYER* Lower, LAYER* Upper)
{
INT i,j;
REAL Sum;
for (i=1; i<=Upper->Units; i++) {
Sum = 0;
for (j=0; j<=Lower->Units; j++) {
Sum += Upper->Weight[i][j] * Lower->Output[j];
}
Upper->Output[i] = 1 / (1 + exp(-Net->Gain * Sum));
}
}
void PropagateNet(NET* Net)
{
INT l;
for (l=0; l<NUM_LAYERS-1; l++) {
PropagateLayer(Net, Net->Layer[l], Net->Layer[l+1]);
}
}
/******************************************************************************
B A C K P R O P A G A T I N G E R R O R S
******************************************************************************/
void ComputeOutputError(NET* Net, REAL* Target)
{
INT i;
REAL Out, Err;
Net->Error = 0;
for (i=1; i<=Net->OutputLayer->Units; i++) {
Out = Net->OutputLayer->Output[i];
Err = Target[i-1]-Out;
Net->OutputLayer->Error[i] = Net->Gain * Out * (1-Out) * Err;
Net->Error += 0.5 * sqr(Err);
}
}
void BackpropagateLayer(NET* Net, LAYER* Upper, LAYER* Lower)
{
INT i,j;
REAL Out, Err;
for (i=1; i<=Lower->Units; i++) {
Out = Lower->Output[i];
Err = 0;
for (j=1; j<=Upper->Units; j++) {
Err += Upper->Weight[j][i] * Upper->Error[j];
}
Lower->Error[i] = Net->Gain * Out * (1-Out) * Err;
}
}
void BackpropagateNet(NET* Net)
{
INT l;
for (l=NUM_LAYERS-1; l>1; l--) {
BackpropagateLayer(Net, Net->Layer[l], Net->Layer[l-1]);
}
}
void AdjustWeights(NET* Net)
{
INT l,i,j;
REAL Out, Err, dWeight;
for (l=1; l<NUM_LAYERS; l++) {
for (i=1; i<=Net->Layer[l]->Units; i++) {
for (j=0; j<=Net->Layer[l-1]->Units; j++) {
Out = Net->Layer[l-1]->Output[j];
Err = Net->Layer[l]->Error[i];
dWeight = Net->Layer[l]->dWeight[i][j];
Net->Layer[l]->Weight[i][j] += Net->Eta * Err * Out + Net->Alpha * dWeight;
Net->Layer[l]->dWeight[i][j] = Net->Eta * Err * Out;
}
}
}
}
/******************************************************************************
S I M U L A T I N G T H E N E T
******************************************************************************/
void SimulateNet(NET* Net, REAL* Input, REAL* Output, REAL* Target, BOOL Training)
{
SetInput(Net, Input);
PropagateNet(Net);
GetOutput(Net, Output);
ComputeOutputError(Net, Target);
if (Training) {
BackpropagateNet(Net);
AdjustWeights(Net);
}
}
void TrainNet(NET* Net, INT Epochs)
{
INT Year, n;
REAL Output[M];
for (n=0; n<Epochs*TRAIN_YEARS; n++) {
Year = RandomEqualINT(TRAIN_LWB, TRAIN_UPB);
SimulateNet(Net, &(Sunspots[Year-N]), Output, &(Sunspots[Year]), TRUE);
}
}
void TestNet(NET* Net)
{
INT Year;
REAL Output[M];
TrainError = 0;
for (Year=TRAIN_LWB; Year<=TRAIN_UPB; Year++) {
SimulateNet(Net, &(Sunspots[Year-N]), Output, &(Sunspots[Year]), FALSE);
TrainError += Net->Error;
}
TestError = 0;
for (Year=TEST_LWB; Year<=TEST_UPB; Year++) {
SimulateNet(Net, &(Sunspots[Year-N]), Output, &(Sunspots[Year]), FALSE);
TestError += Net->Error;
}
fprintf(f, "\nNMSE is %0.3f on Training Set and %0.3f on Test Set",
TrainError / TrainErrorPredictingMean,
TestError / TestErrorPredictingMean);
}
void EvaluateNet(NET* Net)
{
INT Year;
REAL Output [M];
REAL Output_[M];
fprintf(f, "\n\n\n");
fprintf(f, "Year Sunspots Open-Loop Prediction Closed-Loop Prediction\n");
fprintf(f, "\n");
for (Year=EVAL_LWB; Year<=EVAL_UPB; Year++) {
SimulateNet(Net, &(Sunspots [Year-N]), Output, &(Sunspots [Year]), FALSE);
SimulateNet(Net, &(Sunspots_[Year-N]), Output_, &(Sunspots_[Year]), FALSE);
Sunspots_[Year] = Output_[0];
fprintf(f, "%d %0.3f %0.3f %0.3f\n",
FIRST_YEAR + Year,
Sunspots[Year],
Output [0],
Output_[0]);
}
}
/******************************************************************************
M A I N
******************************************************************************/
void main()
{
NET Net;
BOOL Stop;
REAL MinTestError;
InitializeRandoms();
GenerateNetwork(&Net);
RandomWeights(&Net);
InitializeApplication(&Net);
Stop = FALSE;
MinTestError = MAX_REAL;
do {
TrainNet(&Net, 10);
TestNet(&Net);
if (TestError < MinTestError) {
fprintf(f, " - saving Weights ...");
MinTestError = TestError;
SaveWeights(&Net);
}
else if (TestError > 1.2 * MinTestError) {
fprintf(f, " - stopping Training and restoring Weights ...");
Stop = TRUE;
RestoreWeights(&Net);
}
} while (NOT Stop);
TestNet(&Net);
EvaluateNet(&Net);
FinalizeApplication(&Net);
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -