optmum.src
来自「没有说明」· SRC 代码 · 共 388 行 · 第 1/2 页
SRC
388 行
** _opusrch - scalar, if nonzero and if all other line search methods fail
** OPTMUM will enter an interactive mode in which the
** user can select a line search parameter.
**
** _opdelta - scalar, used in NEWTON. The eigenvalues of the Hessian
** will be constrained to be greater than this value.
** If set to zero the constraint will be disabled.
**
** _opstmth - String, contains starting methods for algorithm, step
** length, and Hessian. For example,
**
** _opstmth = "NEWTON BRENT";
**
** will cause OPTMUM to start with the NEWTON algorithm and
** the BRENT step length method.
**
** _opmdmth - String, contains "middle" methods for algorithm, step
** length, and Hessian. OPTMUM will switch to the methods
** in _opmdmth either when the functions fails to change by
** _opdfct, or when _opditer iterations have occured.
**
** _opkey - scalar, flag controlling the keyboard capture feature. During
** the iterations OPTMUM will respond to the keyboard in order to
** allow the user to modify global variables on the fly. If
** OPTMUM is being run recursively, however, (i.e., OPTMUM is
** being called inside of the user procedure) then the keyboard
** capture feature should be turned off for the version of
** OPTMUM being called inside the user procedure which will
** permit the outer version of OPTMUM to retain keyboard
** control.
**
** _opgrdh - scalar, increment size for computing gradient.
**
**
**-------------------**------------------**-------------------**-----------**
**-------------------**------------------**-------------------**-----------**
**
** Calling OPTMUM recursively
**
** The procedure provided by the user for computing the function to
** to be minimized can itself call OPTMUM. In fact the number of nested
** levels is limited only by the amount of workspace memory. Each level
** will also contain its own set of global variables. Thus nested versions
** can have their own set of attributes and optimization methods.
** It will be important to call OPTSET for all nested versions, and
** generally if you wish the outer version of OPTMUM to retain control
** over the keyboard you will need to set _opkey to zero for all the
** nested versions.
**
*/
/*-------------------**------------------**-------------------**-----------**
**-------------------**------------------**-------------------**-----------*/
/* SOURCE CODE */
#include optmum.ext
#include gauss.ext
proc (4) = optmum(fnct,x0);
local x,f0,g,retcode;
local Lopfhess,Lopitdta,LLoutput;
#ifUNIX
LLoutput = __output /= 0;
#else
LLoutput = __output;
#endif
{ x,f0,g,retcode,Lopfhess,Lopitdta } = _optmum(fnct,x0,
_opalgr,
_opdelta,
_opdfct,
_opditer,
_opgdmd,
_opgdprc,
_opgrdh,
_opgtol,
_ophsprc,
_opkey,
_opmbkst,
_opmdmth,
_opmiter,
_opmtime,
_opmxtry,
_opparnm,
_oprteps,
_opshess,
_opstep,
_opstmth,
_opusrch,
_opusrgd,
_opusrhs,
LLoutput,
__title
);
_opfhess = Lopfhess;
_opitdta = Lopitdta;
retp(x,f0,g,retcode);
endp;
proc(0) = optset;
gausset;
_opalgr = 2; /* optimization algorithm */
_opparnm = 0; /* parameter names */
_opstep = 2; /* selects type of step length */
_opshess = 0; /* selects starting hessian */
_opmbkst = 10; /* # of backsteps in computing steplength */
_opgtol = 1e-5; /* convergence tolerance for gradient */
_ophsprc = 0; /* procedure to compute hessian */
_opgdprc = 0; /* procedure to compute gradient */
_opgdmd = 0; /* numerical gradient method */
_opditer = 20; /* # iters to switch algorithms for _opmdmth */
_opdfct = 0.001; /* % change in function for _opmdmth */
_opmiter = 1e+5; /* maximum number of iterations */
_opitdta = { 0,0,0 };
_oprteps = .01;
_opusrch = 0;
_opdelta = .1;
_opmxtry = 100;
_opfhess = 0;
_opusrgd = 0;
_opusrhs = 0;
_opstmth = "";
_opmdmth = "";
_opkey = 1;
_opgrdh = 0;
endp;
proc(4) = optprt(x,f,g,ret);
local lbl,mask,fmt;
print;
call header("OPTMUM","",_op_ver);
print;
print "return code = " ftos(ret,"%*.*lf",4,0);
if ret == 0;
print "normal convergence";
elseif ret == 1;
print "forced termination";
elseif ret == 2;
print "maximum number of iterations exceeded";
elseif ret == 3;
print "function calculation failed";
elseif ret == 4;
print "gradient calculation failed";
elseif ret == 5;
print "Hessian calculation failed";
elseif ret == 6;
print "step length calculation failed";
elseif ret == 7;
print "function cannot be evaluated at initial parameter values";
elseif ret == 8;
print "number of elements in the gradient vector inconsistent";
print "with number of starting values";
elseif ret == 9;
print "gradient function returned a column vector rather than";
print "the required row vector";
elseif ret == 11;
print "maximum time exceeded";
elseif ret == 20;
print "Hessian failed to invert";
endif;
print;
print "Value of objective function " ftos(f,"%*.*lf",15,6);
print;
print "Parameters Estimates Gradient";
print "-----------------------------------------";
if rows(g) /= rows(x);
g = miss(zeros(rows(x),1),0);
endif;
if _opparnm $== 0;
lbl = 0 $+ "P" $+ ftocv(seqa(1,1,rows(x)),2,0);
else;
lbl = _opparnm;
endif;
mask = 0~1~1;
let fmt[3,3] = "-*.*s" 9 8 "*.*lf" 14 4 "*.*lf" 14 4;
call printfm(lbl~x~g,mask,fmt);
print;
print "Number of iterations " ftos(_opitdta[1],"%*.*lf",5,0);
print "Minutes to convergence " ftos(_opitdta[2],"%*.*lf",10,5);
retp(x,f,g,ret);
endp;
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?