📄 regress.pas
字号:
ErrCode := 0;
case RegAlgo of
GAUSS_JORDAN : ErrCode := Gauss_GenMulFit(Mode, X, Y, W, N, Nvar,
ConsTerm, B, V);
SVD : ErrCode := SVD_GenMulFit(Mode, X, Y, W, N, Nvar,
ConsTerm, B, V);
end;
GenMulFit := ErrCode;
end;
function MulFit(X : TMatrix;
Y : TVector;
N, Nvar : Integer;
ConsTerm : Boolean;
B : TVector;
V : TMatrix) : Integer;
var
W : TVector;
begin
W := nil;
MulFit := GenMulFit(UNWEIGHTED, X, Y, W, N, Nvar, ConsTerm, B, V);
end;
function WMulFit(X : TMatrix;
Y, W : TVector;
N, Nvar : Integer;
ConsTerm : Boolean;
B : TVector;
V : TMatrix) : Integer;
begin
WMulFit := GenMulFit(WEIGHTED, X, Y, W, N, Nvar, ConsTerm, B, V);
end;
procedure PowMat(X : TVector; N, Deg : Integer; U : TMatrix);
{ ----------------------------------------------------------------------
Computes matrix of increasing powers of X for polynomial regression
---------------------------------------------------------------------- }
var
I, K : Integer;
begin
for K := FirstPoint to N do
begin
U[1,K] := X[K];
for I := 2 to Deg do
U[I,K] := U[I - 1,K] * X[K];
end;
end;
function GenPolFit(Mode : TRegMode;
X, Y, W : TVector;
N, Deg : Integer;
B : TVector;
V : TMatrix) : Integer;
{ ----------------------------------------------------------------------
General polynomial regression routine
---------------------------------------------------------------------- }
var
U : TMatrix;
begin
DimMatrix(U, Deg, N);
PowMat(X, N, Deg, U);
GenPolFit := GenMulFit(Mode, U, Y, W, N, Deg, True, B, V);
end;
function PolFit(X, Y : TVector;
N, Deg : Integer;
B : TVector;
V : TMatrix) : Integer;
var
W : TVector;
begin
W := nil;
PolFit := GenPolFit(UNWEIGHTED, X, Y, W, N, Deg, B, V);
end;
function WPolFit(X, Y, W : TVector;
N, Deg : Integer;
B : TVector;
V : TMatrix) : Integer;
begin
WPolFit := GenPolFit(WEIGHTED, X, Y, W, N, Deg, B, V);
end;
procedure SetGlobalVar(RegFunc : TRegFunc;
DerivProc : TDerivProc;
Mode : TRegMode;
X, Y, W : TVector;
N, Lbound, Ubound : Integer;
B_min, B_max : TVector);
{ Sets the global variables used by the nonlinear regression routines }
begin
NN := N;
DimVector(XX, N);
DimVector(YY, N);
DimVector(YYcalc, N);
CopyVector(XX, X, FirstPoint, N);
CopyVector(YY, Y, FirstPoint, N);
if Mode = WEIGHTED then
begin
DimVector(WW, N);
CopyVector(WW, W, FirstPoint, N);
end;
DimVector(ParamMin, Ubound);
DimVector(ParamMax, Ubound);
CopyVector(ParamMin, B_min, Lbound, Ubound);
CopyVector(ParamMax, B_max, Lbound, Ubound);
FirstParam := Lbound;
LastParam := Ubound;
RegFunc1 := RegFunc;
DerivProc1 := DerivProc;
end;
procedure NumDeriv(RegFunc : TRegFunc;
X, Y : Float;
B, D : TVector);
var
I : Integer;
Eps, Temp, Y1 : Float;
begin
Eps := Sqrt(MACHEP);
for I := FirstParam to LastParam do
begin
Temp := B[I]; { Save parameter }
B[I] := B[I] + Eps * Abs(B[I]); { Modified parameter }
Y1 := RegFunc(X, B);
D[I] := (Y1 - Y) / (B[I] - Temp); { Derivative }
B[I] := Temp; { Restore parameter }
end;
end;
function OutOfBounds(B, B_min, B_max : TVector) : Boolean;
{ Check if the parameters are inside the bounds }
var
I : Integer;
OoB : Boolean;
begin
I := FirstParam;
repeat
OoB := (B[I] < B_min[I]) or (B[I] > B_max[I]);
Inc(I);
until OoB or (I > LastParam);
OutOfBounds := OoB;
end;
function OLS_ObjFunc(B : TVector) : Float;
{ Objective function for unweighted nonlinear regression }
var
K : Integer;
S : Float;
begin
if OutOfBounds(B, ParamMin, ParamMax) then
begin
OLS_ObjFunc := MAX_FUNC;
Exit;
end;
S := 0.0;
K := FirstPoint;
repeat
YYcalc[K] := RegFunc1(XX[K], B);
S := S + Sqr(YY[K] - YYcalc[K]);
Inc(K);
until (K > NN) or (S > MAX_FUNC);
if S > MAX_FUNC then S := MAX_FUNC;
OLS_ObjFunc := S;
end;
procedure OLS_Gradient(Func : TFuncNVar;
B : TVector;
Lbound, Ubound : Integer;
G : TVector);
{ Gradient for unweighted nonlinear regression.
Func is a dummy parameter here. }
var
I, K : Integer; { Loop variables }
R : Float; { Residual }
D : TVector; { Derivatives of the regression function }
begin
DimVector(D, Ubound);
{ Initialization }
for I := Lbound to Ubound do
G[I] := 0.0;
{ Compute Gradient }
for K := FirstPoint to NN do
begin
R := YY[K] - YYcalc[K];
DerivProc1(RegFunc1, XX[K], YYcalc[K], B, D);
for I := Lbound to Ubound do
G[I] := G[I] - D[I] * R;
end;
for I := Lbound to Ubound do
G[I] := 2.0 * G[I];
end;
procedure OLS_HessGrad(Func : TFuncNVar;
B : TVector;
Lbound, Ubound : Integer;
G : TVector;
H : TMatrix);
{ Gradient and Hessian for unweighted nonlinear regression.
Func is a dummy parameter here. }
var
I, J, K : Integer; { Loop variables }
R : Float; { Residual }
D : TVector; { Derivatives of the regression function }
begin
DimVector(D, Ubound);
{ Initializations }
for I := Lbound to Ubound do
begin
G[I] := 0.0;
for J := I to Ubound do
H[I,J] := 0.0;
end;
{ Compute Gradient & Hessian }
for K := FirstPoint to NN do
begin
R := YY[K] - YYcalc[K];
DerivProc1(RegFunc1, XX[K], YYcalc[K], B, D);
for I := Lbound to Ubound do
begin
G[I] := G[I] - D[I] * R;
for J := I to Ubound do
H[I,J] := H[I,J] + D[I] * D[J];
end;
end;
{ Fill in symmetric matrix }
for I := Succ(Lbound) to Ubound do
for J := Lbound to Pred(I) do
H[I,J] := H[J,I];
end;
function WLS_ObjFunc(B : TVector) : Float;
{ Objective function for weighted nonlinear regression }
var
K : Integer;
S : Float;
begin
if OutOfBounds(B, ParamMin, ParamMax) then
begin
WLS_ObjFunc := MAX_FUNC;
Exit;
end;
S := 0.0;
K := FirstPoint;
repeat
YYcalc[K] := RegFunc1(XX[K], B);
S := S + WW[K] * Sqr(YY[K] - YYcalc[K]);
Inc(K);
until (K > NN) or (S > MAX_FUNC);
if S > MAX_FUNC then S := MAX_FUNC;
WLS_ObjFunc := S;
end;
procedure WLS_Gradient(Func : TFuncNVar;
B : TVector;
Lbound, Ubound : Integer;
G : TVector);
{ Gradient for weighted nonlinear regression.
Func is a dummy parameter here. }
var
I, K : Integer; { Loop variables }
R : Float; { Residual }
D : TVector; { Derivatives of the regression function }
WD : Float; { Weighted derivative }
begin
DimVector(D, Ubound);
{ Initialization }
for I := Lbound to Ubound do
G[I] := 0.0;
{ Compute Gradient }
for K := FirstPoint to NN do
begin
R := YY[K] - YYcalc[K];
DerivProc1(RegFunc1, XX[K], YYcalc[K], B, D);
for I := Lbound to Ubound do
begin
WD := WW[K] * D[I];
G[I] := G[I] - WD * R;
end;
end;
for I := Lbound to Ubound do
G[I] := 2.0 * G[I];
end;
procedure WLS_HessGrad(Func : TFuncNVar;
B : TVector;
Lbound, Ubound : Integer;
G : TVector;
H : TMatrix);
{ Gradient and Hessian for weighted nonlinear regression.
Func is a dummy parameter here. }
var
I, J, K : Integer; { Loop variables }
R : Float; { Residual }
D : TVector; { Derivatives of the regression function }
WD : Float; { Weighted derivative }
begin
DimVector(D, Ubound);
{ Initialization }
for I := Lbound to Ubound do
begin
G[I] := 0.0;
for J := I to Ubound do
H[I,J] := 0.0;
end;
{ Compute Gradient & Hessian }
for K := FirstPoint to NN do
begin
R := YY[K] - YYcalc[K];
DerivProc1(RegFunc1, XX[K], YYcalc[K], B, D);
for I := Lbound to Ubound do
begin
WD := WW[K] * D[I];
G[I] := G[I] - WD * R;
for J := I to Ubound do
H[I,J] := H[I,J] + WD * D[J];
end;
end;
{ Fill in symmetric matrix }
for I := Succ(Lbound) to Ubound do
for J := Lbound to Pred(I) do
H[I,J] := H[J,I];
end;
procedure SaveSim(B_sim : TMatrix; Lbound, Ubound : Integer);
{ Saves the simulated parameters from the last Metropolis-Hastings cycle }
var
F : Text;
I, J : Integer;
begin
Assign(F, MHFile);
Rewrite(F);
Writeln(F, 'Simulation (Metropolis-Hastings)');
Write(F, ' Iter');
for I := Lbound to Ubound do
Write(F, 'b':14, I);
Writeln(F);
for J := 1 to MH_SavedSim do
begin
Write(F, J:5);
for I := Lbound to Ubound do
Write(F, ' ', B_sim[I,J]:14:6);
Writeln(F);
end;
Close(F);
end;
function Simulate(ObjFunc : TFuncNvar;
B, B_min, B_max : TVector;
Lbound, Ubound : Integer;
var F_min : Float;
V : TMatrix) : Integer;
{ Simulation by the Metropolis-Hastings algorithm }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -