📄 rnn.cc
字号:
}/* Function: copyPurpose: copy an RNN passed to the function Arguments: R: an RNN to copy*/void RNN::copy(RNN R){ int i,j; if (wp != (float **) NULL) free_fmat(wp,N,N); if (wm != (float **) NULL) free_fmat(wm,N,N); if (w != (float **) NULL) free_fmat(w,N,N); if (C != (u_char **) NULL) free_cmat(C,N,(int)((N+7)/8.0)); delete GOP; delete typ; delete q; delete den; delete r; delete lp; delete lm; new_rnn(R.IP,R.HN,R.OP,R.eta,R.rate,R.RNNTYPE); for (i=0;i<N;i++) for (j=0;j<N;j++) { wp[i][j] = R.wp[i][j]; wm[i][j] = R.wm[i][j]; } for (i=0;i<N;i++) for (j=0;j<(int)((N+7)/8.0);j++) C[i][j] = R.C[i][j];}/* Function: compPurpose: compute sum-squared difference between current RNN and RNN R Input: R: an RNN to compare to current RNN*/float RNN::comp(RNN R){ int i; int j; float mx1, mx2; float err=0,errtmp; mx1 = fabs(wp[0][0] - wm[0][0]); mx2 = fabs(R.wp[0][0] - R.wm[0][0]); if (N != R.N) return(0); for (i=0;i<N;i++) for (j=0;j<N;j++) { if (fabs(wp[i][j] - wm[i][j]) > mx1) mx1 = fabs(wp[i][j]-wm[i][j]); if (fabs(R.wp[i][j] - R.wm[i][j]) > mx2) mx2 = fabs(R.wp[i][j]-R.wm[i][j]); } for (i=0;i<N;i++) for (j=0;j<N;j++) { errtmp=(wp[i][j] - wm[i][j])/mx1 - (R.wp[i][j] - R.wm[i][j]) / mx2; err += errtmp*errtmp; } return err;}/* Function: initPurpose: Initialize a previously undefined RNN Arguments: ip: number of input neurons hn: number of hidden neurons op: number of output neurons ETA: learning rate RATE: output rate for output neurons rnntype: architecture type for RNN*/void RNN::init(int ip, int hn, int op, float ETA, float RATE, u_char rnntype){ new_rnn(ip,hn,op,ETA,RATE,rnntype);}/* Function: setETAPurpose: Set the value of the learning rate Arguments: e: new learning rate*/void RNN::setETA(float e) { eta = e; }/* Function: SNRPurpose: Compute PSNR of difference between current network o/p and defined "good" output (network's GOP) Arguments: none, assumes RNN output has been computed and the desired o/p has been set*/float RNN::SNR (){ int k; float err=0; for (k=0;k<OP;k++) err += (GOP[k] - q[N - OP + k]) * (GOP[k] - q[N - OP + k]); err = 10.0 * log10(OP / err); return(err);}/* Function: MSEPurpose: Compute MSE of difference between current network o/p and defined "good" output (network's GOP) Arguments: none, assumes RNN output has been computed and the desired o/p has been set*/float RNN::MSE (){ int k; float err=0; for (k=0;k<OP;k++) err += (GOP[k] - q[N - OP + k]) * (GOP[k] - q[N - OP + k]); return(err/(float)OP);}/* Function: displayPurpose: Display an RNN (weights and all)Arguments: none*/void RNN::display(){ int i, j; printf("Input: %3i\n",IP); printf("Hidden: %3i\n",HN); printf("Output: %3i\n",OP); printf("Total: %3i\n",N); printf("Eta: %8.5f\n",eta); printf("Rate: %8.5f\n",rate); printf("WP:\n"); for (i = 0; i < N; i++) { for (j = 0; j < N; j++) printf("%8.5f ",wp[i][j]); printf("\n"); } printf("\n\n"); printf("WM:\n"); for (i = 0; i < N; i++) { for (j = 0; j < N; j++) printf("%8.5f ",wm[i][j]); printf("\n"); } printf("\n\n"); printf("Connections:\n"); for (i = 0; i < N; i++) { for (j = 0; j < N; j++) printf("%1i ",(C[i][j/8] & (1 << (j%8))) >> (j%8)); printf("\n"); } printf("\n\n");}/* Function: trainPurpose: Adjust weights so that the network o/p resembles GOP Arguments: None, assumes that the input values and desired O/P values have been set*/void RNN::train(){ computeop(); if (RNNTYPE == 0) { ff_inv(); ff_computews(); } else { gen_inv(); gen_computews(); }}/* Function: computeopPurpose: Compute the network's o/p based on current i/p Arguments: None, assumes input values have been set*/void RNN::computeop(){ if (RNNTYPE == 0) ffop(); else genop();}/* Function: computeopPurpose: Compute network's o/p quantized to the base 2 log of quant + 1. i.e. quant of 255 gives 8 bit quantization Arguments: quant: amount of quantization*/void RNN::computeop(int quant){ if (RNNTYPE == 0) ffop(quant); else genop(quant);}/* Function: randomweightsPurpose: Randomize weights, good for networks defining own connections Arguments: None, assumes network architecture is defined*/void RNN::randomweights(){ int i,j; for (i=0;i<N;i++) for (j=0;j<N;j++) if (C[i][j/8] & (1 << (j%8))) { wm[i][j] = (float)(rand() % 5000) / 50000.; wp[i][j] = (float)(rand() % 5000) / 50000.; }}/* Function: setinputsPurpose: define inputs for a specific neuronArguments: i: neuron number LP: (lambda plus) external excitation input LM: (lambda minus) external inhibition input*/void RNN::setinput(int i, float LP, float LM){ lp[i] = LP; lm[i] = LM;}/*Function: setinputvectorPurpose: set a vector of inputs at a timeArguments: n: number of elements in vector LP: vector of excitation inputs LM: vector of inhibition inputs*/void RNN::setinputvector(int n, float *LP, float *LM){ memcpy(lp,LP,n*sizeof(float)); memcpy(lm,LM,n*sizeof(float));}/* Function: getopPurpose: Get output value of i-th output neuron Arguments: i: which output neuron to "probe" (starts at 0)Output: value of i-th output neuron*/float RNN::getop(int i) { return q[i+IP+HN];}/* Function: getop2Purpose: Get output value of i-th neuron. Neurons stored in [i/p hidden o/p] order Arguments: i: which neuron to probe (between 0 and Number of I/P + Hidden + O/P neurons)OutpusL value of i-th neuron*/float RNN::getop2(int i){ return q[i];}/* Function: setGOPPurpose: Set the desired output of the i-th output neuron Arguments: i: which o/p neuron to change the desired o/p of val: desired value for the i-th neuron*/void RNN::setGOP(int i, float val){ GOP[i] = val;}/* Function: setGOPvectorPurpose: Set the desired output of the first n output neurons Arguments: n: number of output neurons to change the value of val: vector of neuron values*/void RNN::setGOPvector(int n, float *val){ memcpy(GOP,val,n*sizeof(float));}/* Function: connectPurpose: Make a connection between two neurons Arguments: i: "from" neuron j: "to" neuron*/int RNN::connect(int i, int j){ if ((i < N) && (j < N) && (i != j)) { C[i][j/8] |= (1 << (j%8)); return 0; } if (i == j) return -1; return -2;}/* Function: savePurpose: Save RNN to file named fn Arguments: fn: file name to save rnn to*/void RNN::save(char *fn){ FILE *f; int k,j; if ((f = fopen(fn,"wb")) == NULL) { printf("Cannot open output file!\n"); exit(-1); } fwrite(&RNNTYPE, sizeof(u_char), 1, f); fwrite(&IP, sizeof(int), 1, f); fwrite(&HN, sizeof(int), 1, f); fwrite(&OP, sizeof(int), 1, f); fwrite(&eta, sizeof(float), 1, f); fwrite(&rate, sizeof(float), 1, f); for (k=0;k<N;k++) fwrite(C[k], sizeof(u_char), (int)((N+7)/8.0), f); for (k=0;k<N;k++) for (j=0;j<N;j++) if (C[k][j/8] & 1 << (j%8)) { fwrite(&(wp[k][j]), sizeof(float), 1, f); fwrite(&(wm[k][j]), sizeof(float), 1, f); } fclose(f);}/* Internal Function */float strange_float(float x){ char *y,*z; float t; y = (char *)(& x); z = (char *)(& t); z[0] = y[3]; z[1] = y[2]; z[2] = y[1]; z[3] = y[0]; return t;}/* Internal Function */int strange_int(int x){ char *y,*z; int t; y = (char *)(& x); z = (char *)(& t); z[0] = y[3]; z[1] = y[2]; z[2] = y[1]; z[3] = y[0]; return t;}/* Function: load_strangePurpose: Load RNN from file named fn, reverse endian-ness of stored values useful for conversion programs between PC's and workstationsArguments: fn: file name to load fromNotes: I really need to re-write the I/O to be generic to all machines*/int RNN::load_strange(char *fn){ FILE *f; int i,j,k; float l,m; u_char rnntype; if ((f = fopen(fn,"rb")) == NULL) return(0); fread(&rnntype, sizeof(rnntype), 1, f); fread(&i, sizeof(i), 1, f); i = strange_int(i); fread(&j, sizeof(j), 1, f); j = strange_int(j); fread(&k, sizeof(k), 1, f); k = strange_int(k); fread(&l, sizeof(float), 1, f); l = strange_float(l); fread(&m, sizeof(float), 1, f); m = strange_float(m); new_rnn(i,j,k,l,m,rnntype); for (k=0;k<N;k++) fread(C[k], sizeof(u_char), (int)((N+7)/8.0), f); for (k=0;k<N;k++) for (j=0;j<N;j++) if (C[k][j/8] & 1 << (j%8)) { fread(&(wp[k][j]), sizeof(float), 1, f); fread(&(wm[k][j]), sizeof(float), 1, f); wp[k][j] = strange_float(wp[k][j]); wm[k][j] = strange_float(wm[k][j]); } fclose(f); return(1);}/*Function: loadPurpose: Load RNN from file named fn Arguments: fn: file name to load from*/int RNN::load(char *fn){ FILE *f; int i,j,k; float l,m; u_char rnntype; if ((f = fopen(fn,"rb")) == NULL) return(0); fread(&rnntype, sizeof(rnntype), 1, f); fread(&i, sizeof(i), 1, f); fread(&j, sizeof(j), 1, f); fread(&k, sizeof(k), 1, f); fread(&l, sizeof(float), 1, f); fread(&m, sizeof(float), 1, f); new_rnn(i,j,k,l,m,rnntype); for (k=0;k<N;k++) fread(C[k], sizeof(u_char), (int)((N+7)/8.0), f); for (k=0;k<N;k++) for (j=0;j<N;j++) if (C[k][j/8] & 1 << (j%8)) { fread(&(wp[k][j]), sizeof(float), 1, f); fread(&(wm[k][j]), sizeof(float), 1, f); } fclose(f); return(1);}/* Function: dispopPurpose: Display desired and current o/p of output neurons Arguments: none*/void RNN::dispop(){ int i; for (i=0;i<OP;i++) printf("%8.5f ",GOP[i]); printf(": "); for (i=0;i<OP;i++) printf("%8.5f ",q[i+IP+HN]); printf("\n");}/* Function: dispop2Purpose: Display positive input to i/p neurons and o/p of o/p neurons Arguments: none*/void RNN::dispop2(){ int i; for (i=0;i<IP;i++) printf("%8.5f ",lp[i]); printf(": "); for (i=0;i<N;i++) printf("%8.5f ",q[i]); printf("\n");}/* Function: decompressPurpose: Compute o/p of o/p neurons if hidden neurons are set to vector h Arguments: h: floating point vector of values for hidden neurons */void RNN::decompress(float *h) { int i,j; for (i=0;i<HN;i++) q[IP+i] = h[i]; for (i=IP+HN;i<N;i++) { r[i] = rate; den[i] = r[i]; num[i] = 0; for (j=IP;j<IP+HN;j++) { num[i] += (float)q[j] * (float)wp[j][i]; den[i] += (float)q[j] * (float)wm[j][i]; } q[i] = num[i]/den[i]; if (q[i] > 1) q[i] = 1; }} /*Function: getArchPurpose: return the architecture of the RNNArguments: s: integer vector to store number of I/P, Hidden and O/P neurons in*/void RNN::getArch(int *s) { s[0] = IP; s[1] = HN; s[2] = OP;}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -