📄 io.c
字号:
if (ch2 != '\r') ungetc(ch2,wtfileptr);
};
}
#ifdef SYMMETRIC
*(w->weight) = wvalue;
if (fileformat == 'R')
{
*(w->olddw) = dvalue;
*(w->eta) = evalue;
}
else *(w->olddw) = 0;
#else
w->weight = wvalue;
w->inuse = wtinuse;
if (fileformat == 'R')
{
w->olddw = dvalue;
w->eta = evalue;
w->slope = svalue;
}
else w->olddw = 0;
#endif
w = w->next;
};
u = u->next;
};
layer = layer->next;
};
fclose(wtfileptr);
lastsave = totaliter;
return;
badread: pg(stdout,"\n>>>>> scanf read error <<<<<\n\n");
fclose(wtfileptr);
return;
unexpectedeof: pg(stdout,"\n>>>>> ran out of weights <<<<<\n\n");
fclose(wtfileptr);
}
void printweights(u,layerno) /* print the weights leading into unit u */
UNIT *u;
int layerno;
{
WTNODE *w;
UNIT *bunit;
WTTYPE value;
LAYER *layer;
int i;
short wtinuse, unitinuse;
#ifdef INTEGER
INT32 sum, input;
#else
REAL sum, input;
#endif
w = (WTNODE *) u->wtlist;
sum = 0;
pg(stdout,"layer unit inuse unit value weight inuse input from unit\n");
while (w != NULL)
{
bunit = (UNIT *) w->backunit;
unitinuse = w->backunit->inuse;
#ifdef SYMMETRIC
value = *(w->weight);
wtinuse = 1;
#else
value = w->weight;
wtinuse = w->inuse;
#endif
#ifdef INTEGER
if (wtinuse && unitinuse)
{
input = (INT32) value * bunit->oj;
input = input / 1024;
}
else input = 0;
#else
if (wtinuse && unitinuse) input = value * bunit->oj; else input = 0;
#endif
sum = sum + input;
sprintf(outstr,"%3d ",bunit->layernumber); pg(stdout,outstr);
if (bunit->unitnumber == 32767) pg(stdout," b ");
else {sprintf(outstr,"%3d ",bunit->unitnumber); pg(stdout,outstr);};
sprintf(outstr,"%4d ",unitinuse); pg(stdout,outstr);
sprintf(outstr,"%10.5f %10.5f ",unscale(bunit->oj),unscale(value));
pg(stdout,outstr);
sprintf(outstr," %3d ",wtinuse); pg(stdout,outstr);
sprintf(outstr,"%14.5f\n",unscaleint(input));
if (pg(stdout,outstr)) return;
w = w->next;
};
pg(stdout," ");
sprintf(outstr,"sum = %9.5f\n",unscaleint(sum));
if (pg(stdout,outstr)) return;
layer = start;
for (i=2;i<=layerno;i++) layer = layer->next;
if (layer->D != scale(1.0))
{
#ifdef INTEGER
sum = sum * layer->D / 1024;
#else
sum = sum * layer->D;
#endif
pg(stdout," ");
sprintf(outstr,"sum = %9.5f with D = %7.3f\n",unscaleint(sum),unscale(layer->D));
if (pg(stdout,outstr)) return;
};
pg(stdout,"\n");
}
void parameters(f)
FILE *f;
{int i;
printnetsize(f);
sprintf(outstr," * %1d weights; ",wttotal); pg(f,outstr);
sprintf(outstr,"data file = %s",datafile); pg(f,outstr);
if (zeroderiv) pg(f,"*>>>>> 0 deriv <<<<<*\n"); else pg(f,"\n");
if (toosmall != 0)
{
sprintf(outstr,"pw %4.2f",unscale(toosmall)); pg(f,outstr);
sprintf(outstr," * weights in use: %1d\n",wtsinuse); pg(f,outstr);
};
sprintf(outstr,"sb %f\n", unscale(stdthresh)); pg(f,outstr);
#ifdef INTEGER
if (wtlimithit) pg(f,"*>>>>> WEIGHT LIMIT HIT <<<<<*\n");
#endif
listseeds(f); pg(f,"\n");
sprintf(outstr,"r %1d %1d ",maxiter,printrate); pg(f,outstr);
if (f != stdout) pg(f,"\"\n"); else pg(f,"\n");
sprintf(outstr,"f b%c c%c e%c i%c",ringbell,copyflag,echo,informat);
pg(f,outstr);
if (offby1 == '+') pg(f," O+"); else pg(f," O-");
sprintf(outstr," o%c P %d p%c ",outformat,pagesize,probtype);
pg(f,outstr);
sprintf(outstr,"R%c ",runningflag); pg(f,outstr);
sprintf(outstr," s%c ",summary); pg(f,outstr);
sprintf(outstr,"u%c ",up_to_date_stats); pg(f,outstr);
sprintf(outstr,"x% f\n",unscale(unknown)); pg(f,outstr);
pg(f,"f B ");
for (i=1;i<=MAXFORMAT-1;i++)
{sprintf(outstr," %1d",format[i]); pg(f,outstr);};
pg(f,"\n");
sprintf(outstr,"! %s ",sysstr); pg(f,outstr);
if (f != stdout) pg(f,"\"\n"); else pg(f,"\n");
pg(f,"a ");
if (nlayers > 2)
{sprintf(outstr,"ah %c ",ah); pg(f,outstr);};
sprintf(outstr,"ao %c ",ao); pg(f,outstr);
sprintf(outstr,"d%c ",deriv); pg(f,outstr);
sprintf(outstr,"u%c\n",update); pg(f,outstr);
sprintf(outstr,"e %7.5f %7.5f\n",unscale(eta),unscale(eta2));pg(f,outstr);
sprintf(outstr,"a %7.5f\n",unscale(alpha)); pg(f,outstr);
#ifndef SYMMETRIC
sprintf(outstr,"d d %8.5f e %8.5f ",unscale(decay),unscale(dbdeta));
pg(f,outstr);
sprintf(outstr,"k %8.5f m %8.5f ",unscale(kappa),unscale(etamax));
pg(f,outstr);
sprintf(outstr,"t %8.5f\n",unscale(theta1));
pg(f,outstr);
sprintf(outstr,"qp dh %8.6f do %8.6f e %8.5f ",qpdecayh,qpdecayo,unscale(qpeta));
pg(f,outstr);
sprintf(outstr,"m %8.5f n %8.5f s%c\n",unscale(mu),unscale(qpnoise),qpslope);
pg(f,outstr);
#endif
sprintf(outstr,"* last save at: %d\n",lastsave); pg(f,outstr);
sprintf(outstr,"rw %s\n",wtfilename); pg(f,outstr);
sprintf(outstr,"t %4.2f\n",unscale(toler)); pg(f,outstr);
printstats(f,TRAIN,1,s);
if (s[TOL][TEST].npats > 0) printstats(f,TEST,1,s);
if (f != stdout)
{
sprintf(outstr,"z %e",minimumsofar); pg(f,outstr);
};
pg(f,"\n");
}
void menus(ch)
char ch;
{
switch (ch) {
case '?': parameters(stdout); return;
case 'A':
pg(stdout,"\nAlgorithm Parameters\n\n");
pg(stdout,"a a <char> sets all act. functions to <char>; {ls} h aa\n");
if (nlayers > 2)
{
sprintf(outstr,"a ah %c ",ah); pg(stdout,outstr);
pg(stdout,"hidden layer(s) act. function; {ls} h aa\n");
};
sprintf(outstr,"a ao %c ",ao); pg(stdout,outstr);
pg(stdout,"output layer act. function; {ls} h aa\n");
sprintf(outstr,"a d %c ",deriv); pg(stdout,outstr);
pg(stdout,"the output layer derivative term; {cdf} h ad\n");
sprintf(outstr,"a u %c ",update); pg(stdout,outstr);
pg(stdout,"the weight update algorithm; {Ccdpq} h au\n");
sprintf(outstr,"t %4.3f ",unscale(toler)); pg(stdout,outstr);
pg(stdout,"tolerance/unit for successful learning; (0..1)\n");
sprintf(outstr,"\nf O %c ",offby1); pg(stdout,outstr);
pg(stdout,"allows out-of-date statistics to print; {+-}\n");
sprintf(outstr,"f u %c ",up_to_date_stats); pg(stdout,outstr);
pg(stdout,"compute up-to-date statistics; {+-}\n");
pg(stdout,"\n");
return;
case 'C':
pg(stdout,"\nScreen Includes Information and Parameters on:\n\n");
pg(stdout," A algorithm parameters and tolerance\n");
pg(stdout," C this listing of major command groups\n");
#ifndef SYMMETRIC
pg(stdout," D delta-bar-delta parameters\n");
#endif
pg(stdout," F formats: patterns, output, paging, copying screen i/o\n");
pg(stdout," G gradient descent (plain backpropagation)\n");
pg(stdout," M miscellaneous commands: shell escape, seed values, clear,\n");
pg(stdout," clear and initialize, quit, save almost everything\n");
pg(stdout," N making a network, listing network unit values\n");
pg(stdout," P pattern commands: reading patterns, testing patterns,\n");
#ifndef SYMMETRIC
pg(stdout," Q quickprop parameters\n");
#endif
pg(stdout," T a short tutorial\n");
pg(stdout," W weight commands: listing, saving, restoring, set bias unit weights\n");
pg(stdout," ? a compact listing of everything\n");
pg(stdout,"\n");
return;
#ifndef SYMMETRIC
case 'D':
pg(stdout,"\nThe Delta-Bar-Delta Paramters (d)\n\n");
sprintf(outstr,"d d %4.2f ",unscale(decay)); pg(stdout,outstr);
pg(stdout,"decay rate for each eta; (0..1)\n");
sprintf(outstr,"d e %5.3f ",unscale(dbdeta)); pg(stdout,outstr);
pg(stdout,"initial eta for each weight; (0..inf)\n");
sprintf(outstr,"d k %6.4f ",unscale(kappa)); pg(stdout,outstr);
pg(stdout,"kappa, the eta increment; (0..inf)\n");
sprintf(outstr,"d m %6.3f ",unscale(etamax)); pg(stdout,outstr);
pg(stdout,"maximum value for eta; (0..inf)\n");
sprintf(outstr,"d t %3.1f ",unscale(theta1)); pg(stdout,outstr);
pg(stdout,"the theta parameter; (0..1)\n");
pg(stdout,"\na u d use this to get the delta-bar-delta update method\n");
pg(stdout,"\n");
return;
#endif
case 'F':
pg(stdout,"\nThe Format Command (f)\n\n");
sprintf(outstr,"f b %c ",ringbell); pg(stdout,outstr);
pg(stdout,"ring the bell when training is finished; {+-}\n");
sprintf(outstr,"f c %c ",copyflag); pg(stdout,outstr);
pg(stdout,"make a copy of screen i/o on the file copy; {+-}\n");
sprintf(outstr,"f e %c ",echo); pg(stdout,outstr);
pg(stdout,"echo input; {+-}\n");
sprintf(outstr,"f i %c ",informat); pg(stdout,outstr);
pg(stdout,"format for reading input and output patterns; {cr} h fi\n");
sprintf(outstr,"f O %c ",offby1); pg(stdout,outstr);
pg(stdout,"give off by 1 statistics for training set; {+-}\n");
sprintf(outstr,"f o %c ",outformat); pg(stdout,outstr);
pg(stdout,"format for outputing network's unit values; {acer} h fo\n");
sprintf(outstr,"f P %2d ",pagesize); pg(stdout,outstr);
pg(stdout,"lines per page; 0 = no paging; [0..inf]\n");
sprintf(outstr,"f p %c ",probtype); pg(stdout,outstr);
pg(stdout,"problem type; {cg} h fp\n");
sprintf(outstr,"f R %c ",runningflag); pg(stdout,outstr);
pg(stdout,"print the \"running . . .\" message; {+-}\n");
sprintf(outstr,"f s %c ",summary); pg(stdout,outstr);
pg(stdout,"summarize learning status while running; {+-}\n");
sprintf(outstr,"f u %c ",up_to_date_stats); pg(stdout,outstr);
pg(stdout,"get up-to-date status of training patterns; {+-}\n");
sprintf(outstr,"f x %5.2f ",unknown); pg(stdout,outstr);
pg(stdout,"the value of x in patterns\n");
pg(stdout,"f B ");
{int i;
for (i=1;i<=MAXFORMAT-1;i++)
{sprintf(outstr," %1d",format[i]); pg(stdout,outstr);};};
pg(stdout,"\n ");
pg(stdout," h fB\n");
pg(stdout,"\n");
return;
case 'G':
pg(stdout,"\nGradient Descent (Plain Backpropagation)\n\n");
sprintf(outstr,"e %8.5f %8.5f ",unscale(eta),unscale(eta2));pg(stdout,outstr);
pg(stdout,"eta for weights into the output layer is");
sprintf(outstr,"%8.5f\n",unscale(eta)); pg(stdout,outstr);
pg(stdout," eta for weights into the hidden layer(s) is");
sprintf(outstr,"%8.5f\n",unscale(eta2)); pg(stdout,outstr);
pg(stdout,"e <real> sets both etas to <real>\n\n");
sprintf(outstr,"a %4.2f ",unscale(alpha)); pg(stdout,outstr);
pg(stdout,"the momentum parameter, alpha, for all layers; [0..1)\n\n");
pg(stdout,"a u C use this for the right continuous update method\n");
pg(stdout,"a u c use this for the wrong continuous update method\n");
pg(stdout,"a u p use this for the periodic update method\n");
pg(stdout,"\n");
return;
case 'M':
pg(stdout,"\nMiscellaneous Commands\n\n");
if (sysstr[0] != '\0')
{pg(stdout,"! "); pg(stdout,sysstr); pg(stdout,"\n");};
pg(stdout,"! <string> pass <string> to the operating system\n\n");
pg(stdout,"q q by itself quits the program\n\n");
pg(stdout,"c clear the network weights\n");
pg(stdout,"ci same as c except the weights are initialized to\n");
sprintf(outstr," between -%4.2f ",unscale(initrange));
pg(stdout,outstr);
sprintf(outstr,"and %4.2f\n",unscale(initrange)); pg(stdout,outstr);
pg(stdout,"ci <real> same as ci except the weights are initialized to\n");
pg(stdout," between - <real> and + <real>\n\n");
sprintf(outstr,"r run %d iterations",maxiter); pg(stdout,outstr);
pg(stdout," and print a summary every\n");
sprintf(outstr," %d iterations\n",printrate); pg(stdout,outstr);
pg(stdout,"^R same as r\n");
pg(stdout,"r <int1> <int2> run <int1> iterations and print a summary every\n");
pg(stdout," <int2> iterations\n");
sprintf(outstr,"s %5d",seed); pg(stdout,outstr);
sprintf(outstr," random number seed: %1d\n",seed); pg(stdout,outstr);
pg(stdout,"\nse <filename> save everything to <filename>\n");
pg(stdout,"\n");
return;
case 'N':
pg(stdout,"\nNetwork Information\n\n");
printnetsize(stdout);
pg(stdout,"\n");
sprintf(outstr," total weights: %1d\n",wttotal);
pg(stdout,outstr);
pg(stdout,"l <layer> print the unit values in layer <layer>\n\n");
return;
case 'P':
pg(stdout,"\nPattern Related Commands\n\n");
pg(stdout,"f p g the problem type; {gc} h fp\n");
pg(stdout,"f i r format to input patterns; {cr} h fi\n");
pg(stdout,"f o r format to output network values; {acer} h fo\n\n");
pg(stdout,"rt {patterns} reads the patterns between { and }\n");
pg(stdout,"rt <filename> read training patterns from <filename>\n");
pg(stdout,"rx <filename> read more training patterns from <filename>\n");
pg(stdout,"tf <filename> test file is <filename>\n\n");
pg(stdout,"p list all training set pattern values\n");
pg(stdout,"pa same as p\n");
pg(stdout,"p <pat. no.> evaluate the training pattern\n");
pg(stdout,"p0 summarize training pattern learning\n\n");
pg(stdout,"t list all test set pattern values\n");
pg(stdout,"ta same as t\n");
pg(stdout,"tr <int> test recurrent patterns h tr\n");
pg(stdout,"trp <int> test recurrent patterns and print results h trp\n");
pg(stdout,"t <pat. no.> evaluate the test pattern\n");
pg(stdout,"t0 summarize test pattern learning\n\n");
pg(stdout,"o <int> prints the target for training pattern <int>\n\n");
printstats(stdout,TRAIN,1,s);
if (s[TOL][TEST].npats > 0) printstats(stdout,TEST,1,s);
pg(stdout,"\n");
return;
case 'Q':
pg(stdout,"\nThe Quickprop Parameters (qp)\n\n");
pg(stdout,"qp d <real> sets quickprop weight decay to <real> in all layers\n");
sprintf(outstr,"qp dh %8.6f ",unscale(qpdecayh)); pg(stdout,outstr);
pg(stdout,"quickprop decay for the hidden layer weights; [0..1)\n");
sprintf(outstr,"qp do %8.6f ",unscale(qpdecayo)); pg(stdout,outstr);
pg(stdout,"quickprop decay for the output layer weights; [0..1)\n");
sprintf(outstr,"qp e %6.4f ",unscale(qpeta)); pg(stdout,outstr);
pg(stdout,"quickprop eta; (0..inf)\n");
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -