📄 ml_max.ado
字号:
if "$ML_vce"=="robust" {
CheckSco `score'
if r(okay) {
if "$ML_svy" != "" {
tempname Vsrs ev
matrix `ev' = e(V)
local svyopt zeroweight vsrs(`Vsrs') v(`ev')
if "$ML_str" != "" {
local svyopt `svyopt' strata($ML_str)
}
if "$ML_fpc" != "" {
local svyopt `svyopt' fpc($ML_fpc)
}
if "$ML_subv" != "" {
local svyopt `svyopt' subpop($ML_subv)
local svyopt `svyopt' $ML_srsp
}
/* Replace missing scores with zeros. */
foreach sc of local score {
capture replace `sc' = 0 if `sc' >= .
if !_rc {
local sclist `sclist' `sc'
}
}
}
if "$ML_clust" != "" {
local cluster cluster($ML_clust)
}
if "${ML_wtyp}" != "" {
if "$ML_svy" != "" {
local w [iw=$ML_sw]
}
else {
local w [$ML_wtyp=$ML_w]
}
}
if "$ML_vscr" != "" {
// trick 4 rdu0: scores are complete, _robust
// doesn't have to expand using chain rule
tempname b
mat `b' = e(b)
local fullnms : colfullnames `b'
local i 1
while `i' <= $ML_k {
local newnams `newnams' eq`i':_cons
local i = `i' + 1
}
mat colnames `b' = `newnams'
est repost _b=`b', rename
}
if "$ML_svy2" == "" {
_robust `score' `w' if $ML_samp, ///
`cluster' `svyopt' nofatal
}
else {
_robust2 `score' if $ML_samp, svy `svyopt'
_r2e
est scalar N_psu = e(N_clust)
est scalar N_psu = e(N_clust)
if !missing(e(sum_wsub)) {
est scalar N_subpop = e(sum_wsub)
}
est local prefix svy
est local brrweight
est local jkrweight
est local sum_wsub
est local sum_w
est local vcetype Linearized
}
if "$ML_svy" != "" {
est repost V = `ev'
/* clear non-svy results */
est local clustvar
est local N_clust
est local rc
est local ll
est local rank
est local chi2type
est local k
est local k_dv
est local user
est local cnt0
est local cnt1
est local cnt2
est local cnt_
est local ilog
est local ic
/* (re)post svy results */
est local depvar $ML_y
if "$ML_sw" != "" {
est local wtype $ML_swty
est local wexp "= $ML_sw"
}
if "$ML_svy2" == "" {
est local vcetype
est local strata $ML_str
est local psu $ML_clust
est local fpc $ML_fpc
}
else {
est local estat_cmd svy_estat
}
est local subpop $ML_subp
est local adjust $ML_sadj
est local svyml svyml
/* population size */
if "$ML_pop" == "" {
est scalar N_pop = r(sum_w)
}
else est scalar N_pop = $ML_pop
/* subpopulation size */
if "`r(N_sub)'" != "" & "`r(N_sub)'" != "." {
/* # subpop. obs */
est scalar N_sub = r(N_sub)
/* subpop. size */
est scalar N_subpop = r(sum_wsub)
}
if "$ML_svy2" == "" {
est scalar N = $ML_N
/* number of strata */
est scalar N_strata = r(N_strata)
/* number of PSUs */
est scalar N_psu = r(N_clust)
est scalar df_r = e(N_psu)-e(N_strata)
}
if `"$ML_omit"' != "" {
est scalar N_strata_omit = $ML_omit
}
if "`e(poststrata)'" == "" {
/* needs e(V) posted and e(fpc) */
_svy_mkvsrs `Vsrs' $ML_srsp
/* needs e(V), e(V_srs) [e(V_srswr)] */
_svy_mkdeff
}
local spec
forval i = 1/$ML_wald {
local spec `"`spec' ([#`i'])"'
}
_svy_ftest "`spec'" "$ML_cns"
/* _svy_ftest did the wald test already */
global ML_wald
}
// trick 4 rdu0: restore colfullnames
if "$ML_vscr" != "" {
mat `b' = e(b)
mat colnames `b' = `fullnms'
est repost _b=`b', rename
}
}
else {
tempname badV
mat `badV' = J($ML_k,$ML_k,0)
est repost VCE=`badV'
est scalar rc = 504
}
if "$ML_svy" == "" {
est scalar df_m = .
est scalar chi2 = .
est scalar p = .
est local vcetype2 $ML_vce2
}
est local vce robust
}
else {
est local vce $ML_vce
est local vcetype $ML_vce2
}
if "$ML_wald" != "" {
local i 1
local temp 0
while `i' <= $ML_wald {
capture test [${ML_eq`i'}], accum notest
if _rc!=0 & _rc!=302 {
error _rc
}
if "${ML_xc`i'}"=="" {
local temp = `temp' + ${ML_k`i'} - 1
}
else {
local temp = `temp' + ${ML_k`i'}
}
local i = `i' + 1
}
capture test, min
if _rc==302 {
est scalar df_m = 0
}
else est scalar df_m = r(df)
if r(df) == `temp' | $ML_C == 1 {
est scalar chi2 = r(chi2)
est scalar p = r(p)
}
else {
est scalar chi2 = .
est scalar p = .
}
est local chi2type "Wald"
}
PostDiparms
est local predict "ml_p"
est local cmd "ml"
if "`clear'"=="" & "$ML_pres" == "" {
ml clear
}
global ML_setes "yes"
if "`output'"=="" {
`vv' ml_mlout, `header' `eform' level(`level')
exit `e(rc)'
}
end
program PostDiparms, eclass
version 9
if ("$ML_diparms" == "") exit
forval k = 1/$ML_diparms {
ereturn local diparm`k' `"${ML_diparm`k'}"'
}
end
program define Iter0
syntax [, noWARNing noVCE ]
if $ML_trace > 1 | ("`vce'"=="" & $ML_dider) {
di _n in smcl in gr "{hline 78}" _n "Iteration 0:"
}
if $ML_trace == 2 | $ML_trace == 4 {
di in gr "Coefficient vector:"
mat list $ML_b, noheader noblank format(%9.0g)
di /* blank line */
}
if "`vce'"=="" {
capture noisily $ML_eval 2
if _rc == 1 {
exit 1
}
if _rc {
di in red "method $ML_meth may not " /*
*/ "support iterate(0)"
exit 198
}
}
else $ML_eval 0
global ML_ic 1 /* this is how ml_log counts */
if scalar($ML_f)>=. {
di in red "initial values infeasible"
exit 1400
}
if $ML_trace > 1 | ("`vce'"=="" & $ML_dider) {
local col = 66-length("$ML_crtyp")
di in gr _col(`col') "$ML_crtyp = " in ye %10.0g /*
*/ scalar($ML_f)
}
else if $ML_trace == 1 {
di in gr "Iteration 0:" _col(16) "$ML_crtyp = " /*
*/ in ye %10.0g scalar($ML_f)
}
if "`vce'"=="" & ($ML_trace>2 | $ML_dider==1 | $ML_dider==3) {
tempname lengrad
mat `lengrad' = $ML_g * $ML_g'
if $ML_dider==1 | $ML_dider==3 {
_cpmatnm $ML_b, vec($ML_g)
di in gr "Gradient vector (length =" /*
*/ in ye %9.0g sqrt(`lengrad'[1,1]) /*
*/ in gr "):"
mat list $ML_g, noheader noblank format(%9.0g)
}
else di in gr "Length of gradient vector =" /*
*/ in ye %9.0g sqrt(`lengrad'[1,1])
local newline "_n"
}
if "`vce'"=="" & $ML_dider > 1 {
_cpmatnm $ML_b, square($ML_V)
di `newline' in gr "Negative Hessian:"
mat list $ML_V, noheader noblank format(%9.0g)
}
if $ML_trace > 1 | ("`vce'"=="" & $ML_dider) {
di in smcl in gr "{hline 78}"
}
if "`warning'"=="" {
di in blu "convergence not achieved"
}
if "`vce'"=="" {
capture mat $ML_V = syminv($ML_V)
if _rc {
di in red "Hessian has become unstable or " /*
*/ "asymmetric (M2)"
exit _rc
}
}
else mat $ML_V = J($ML_k,$ML_k,0)
end
program define SetTrace
syntax [, noLOg TRace GRADient HESSian SHOWSTEP ]
if "`showste'"!="" {
if "`trace'"!="" {
global ML_trace 4 /* = showstep trace */
}
else global ML_trace 3 /* = showstep */
}
else if "`trace'"!="" {
global ML_trace 2
}
else if "`log'"!="" {
global ML_trace 0 /* = -nolog- */
}
else global ML_trace 1 /* = -log- */
if $ML_trace {
global ML_dider = ("`gradien'"!="") + 2*("`hessian'"!="")
/*
ML_dider == 0 => <nothing>
ML_dider == 1 => gradient
ML_dider == 2 => hessian
ML_dider == 3 => gradient and hessian
*/
if $ML_trace == 4 { /* display gradient */
if $ML_dider == 0 {
global ML_dider 1
}
if $ML_dider == 2 {
global ML_dider 3
}
}
}
else global ML_dider 0 /* -nolog- overrides -gradient- and
-hessian-
*/
end
program define CheckSco /* scorevars */, rclass
local i 1
if "$ML_vscr" == "" {
local numscr $ML_n
}
else local numscr $ML_k
while `i' <= `numscr' {
capture assert ``i''<. if $ML_samp
if _rc {
return scalar okay = 0
exit
}
local i = `i' + 1
}
return scalar okay = 1
end
exit
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -