📄 ml_elf_bhhh.ado
字号:
*! version 1.0.0 21apr2003
program define ml_elf_bhhh
version 8.0
args calltype
// no need to check memory requirements
if (`calltype' == -1) exit
local i 1
while `i' <= $ML_n {
tempname x`i'
qui mat score double `x`i'' = $ML_b if $ML_samp, eq(#`i')
if "${ML_xo`i'}${ML_xe`i'}" != "" {
if "${ML_xo`i'}" != "" {
qui replace `x`i'' = `x`i'' + ${ML_xo`i'}
}
else qui replace `x`i'' = `x`i'' + ln(${ML_xe`i'})
}
local list `list' `x`i''
local i = `i' + 1
}
tempvar f
qui gen double `f' = . in 1
$ML_vers $ML_user `f' `list'
mlsum $ML_f = `f'
if (`calltype'==0 | scalar($ML_f)==.) exit
/* we now continue to make derivative
calculations
*/
tempname wrk
tempvar one x0
mat $ML_g = J(1,$ML_k,0)
qui gen byte `one' = 1 if $ML_samp
quietly {
local i 1
while `i'<=$ML_n {
if "${ML_xc`i'}" == "nocons" {
local vl`i' ${ML_x`i'}
}
else local vl`i' ${ML_x`i'} `one'
local se ${ML_fp`i'}
local ee ${ML_lp`i'}
tempname h`i'
tempvar fph`i' fmh`i' g`i'
local glist `glist' `g`i''
rename `x`i'' `x0'
/* calculate stepsize `h`i'',
`fph`i'', and `fmh`i'' */
noi ml_adjs elf `i' `fph`i'' `fmh`i'' `x0' `list'
scalar `h`i'' = r(step)
/* gradient calculation */
gen double `g`i''=$ML_w*(`fph`i''-`fmh`i'')/(2*`h`i'')
matrix vecaccum `wrk' = `g`i'' `vl`i'', nocons
mat subst $ML_g[1,`se'] = `wrk'
drop `x`i''
rename `x0' `x`i''
local i=`i'+1
} /* i loop */
} /* quietly */
/* Estimate Hessian as outer
* product of gradients_t */
mat $ML_V = I($ML_k)
_cpmatnm $ML_b, square($ML_V)
qui _robust `glist' [iw=$ML_w] if $ML_samp, variance($ML_V) minus(0)
end
exit
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -