📄 logit2.gss
字号:
/*
************************************************************************
* (C) Copyright 1999, Peter Lenk. All Rights Reserved.
* LOGIT2.GSS
* Select one of mvar+1 alternatives where the last alternative is
* the base brand.
*---------> Different choice matrices.
*
* P(C_{ij} = k|X_{ij}) = exp(X_{ij}[k]'beta_i)/(1 + sum exp(X_{ij}[l]'beta_i))
* for i = 1, ..., nsub
* j = 1, ..., yrows[i]
* k = 1, ..., mvar+1
* Alternative mvar is the base vector.
*
*
* beta_i is rankx vector
*
* X_{ij} is mvar x rankx
* X will have brand intercepts for the first mvar brands,
* and coefficients for price and advertising.
* To identify the model, we fix the intercept for the last brand to zero.
*
*
* beta_i = Theta'Z_i + delta_i
* delta_i is N(0,Lambda)
*
* Priors:
* Theta is maxtrix normal(u0,v0).
* That is, vec(Theta) is N(vec(u0),v0).
* vec(theta) stacks the columns of theta.
* Lambda is Inverted Wishart(f0, g0)
***********************************************************************
*/
new;
outfile = "results1.dat"; @ Specify output file for saving results @
@ outfile is a string variable that contains a file name @
inxy = "xpdata"; @ Name of Gauss file with X, Choice data @
inz = "zdata"; @ Name of Gauss file with Z data @
flagtrue = 1; @ 1 -> knows true parameters from simulation @
/*
********************************************************************
* Initialize parameters for MCMC
********************************************************************
*/
smcmc = 1000; @ number of iterations to save for analysis @
skip = 1; @ Save every skip iterations @
nblow = 1000; @ Initial transition iterations @
nmcmc = nblow + skip*smcmc; @ total number of iterations @
metstd = 0.3; @ STD for random walk metropolis @
/*
********************************************************************
* Get data
********************************************************************
*/
@ Get dimensions and pointers @
load yrows = yrows; @ Number of observations per subject @
load lxy = lxy; @ xij = xdata[lxy[i,j]:uxy[i,j],.] @
load uxy = uxy;
nsub = rows(yrows); @ number of subjects @
mvar = uxy[1,2] - uxy[1,1]; @ Y_{ij} is a mvar vector. @
ntot = sumc(yrows);
@ Input Gauss files @
open f1 = ^inxy; @ Get Gauss file for X, Y data @
@ Opens Gauss file & assigns file handle f1 @
xpdata = readr(f1,rowsf(f1)); @ "p" for picks @
@ readr reads in Gauss file with file handle f1. @
@ rowsf(f1) returns the number of rows in the Gauss file. @
@ readr reads rowsf(f1) rows, which is the entir dataset. @
ci = close(f1);
xpnames = setvars(inxy); @ Get the variable names that accompnay X, Y data @
ynames = xpnames[1:mvar]; @ Use names of interceps for names of components of Y @
rankxp = cols(xpdata);
rankx = rankxp - 1; @ # of X variables (includes intercept) @
xnames = xpnames[1:rankx];
@ Last row of xpdata is the choice vector. @
open f1 = ^inz;
zdata = readr(f1,rowsf(f1)); @ First column of zdata is a vector of ones @
ci = close(f1);
znames = setvars(inz);
rankz = cols(zdata); @ # of Z variables (includes intercept) @
thdim = rankx*rankz; @ dimension of vec(theta) @
@ Compute some sufficient statistics @
ztz = zdata'zdata;
/*
********************************************************************
* Initialize Priors
********************************************************************
*/
@ Prior for theta is N(u0,v0) @
u0 = zeros(thdim,1);
v0 = 100*eye(thdim); @ thdim = rankx*rankz @
v0i = invpd(v0); @ used in updating theta @
v0iu0 = v0i*u0; @ used in updating theta @
@ Lambda^{-1} is W_rankx(f0,g0 ) @
@ f0 = prior df, g0 = prior scale matrix @
f0 = rankx+2; f0n = f0 + nsub;
g0i = eye(rankx); @ g0^{-1} @
/*
*******************************************************************
* Initialize MCMC
******************************************************************
*/
beta = zeros(nsub,rankx);
llbeta = loglike(beta); @ Compute log-likelihood at beta @
@ llbeta is used in metropolis @
theta = zeros(rankz,rankx);
lambda = eye(rankx);
lambdai = invpd(lambda);
@ Define data structures for saving iterates & computing posterior means & std @
betam = zeros(nsub,rankx); @ posterior mean of beta @
betas = zeros(nsub,rankx); @ posterior std of beta @
thetag = zeros(smcmc,thdim);
c = rankx*(rankx+1)/2;
lambdag = zeros(smcmc,c); @ save iterations for lambda @
/*
********************************************************************
* Do MCMC
********************************************************************
*/
mixp = 0;
@ Do the initial transition period @
for i1 (1,nblow,1); imcmc = i1;
call getlogit;
endfor;
for i1 (1,smcmc,1); imcmc = i1; @ Save smcmc iterations @
for i2 (1,skip,1); jmcmc = i2; @ Save every skip iterations @
call getlogit;
endfor;
@ Save the random deviates @
thetag[imcmc,.] = vecr(theta)';
betam = betam + beta;
betas = betas + beta^2;
lambdag[imcmc,.]= vech(lambda)';
endfor;
/*
******************************************************************
* Compute Posterior Means and STD
******************************************************************
*/
mixp = mixp/(nmcmc*nsub);
betam = betam/smcmc;
thetam = reshape(meanc(thetag),rankz,rankx);
lambdam = xpnd(meanc(lambdag));
betas = sqrt( abs(betas - smcmc*betam^2)/smcmc);
thetas = reshape(stdc(thetag),rankz,rankx);
lambdas = xpnd(stdc(lambdag));
/*
****************************************************************
* Do some output
****************************************************************
*/
call outputanal;
@ Plot saved iterations against iterations number @
t = seqa(nblow+skip,skip,smcmc); @ saved iteration number @
title("Theta versus Iteration");
xy(t,thetag);
title("Lambda versus Iteration");
xy(t,lambdag);
graphset;
end;
/*
****************************************************************
* GETLOGIT
* Does one iteration of the HB regression model.
* INPUT
* Global Variables
* OUTPUT
* Global Variables
****************************************************************
*/
PROC (0) = getlogit;
local
thz,bnew,llbnew,i0,i,mui,testp,vibn12,ebin,resid,gni,gn,gn12,w, u;
thz = zdata*theta;
/*
***********************************************************
* Generate beta
* Use symmetric random walk metropolis
**********************************************************
*/
bnew = beta + metstd*rndn(nsub,rankx);
llbnew = loglike(bnew); @ Get log likelihood at new beta @
u = ln(rndu(nsub,1));
@ Do metropolis step @
for i0 (1,nsub, 1); i = i0;
mui = thz[i,.]';
testp = llbnew[i] - llbeta[i] @log Likelihood @
- ( bnew[i,.]' - mui )'lambdai*( bnew[i,.]' - mui )/2 @ "Priors" @
+ ( beta[i,.]' - mui )'lambdai*( beta[i,.]' - mui )/2;
if u[i] < testp; @ Accept Candidate @
mixp = mixp + 1;
beta[i,.] = bnew[i,.];
llbeta[i] = llbnew[i];
endif; @ else reject candidate & stay pat @
endfor;
/*
***********************************************************
* Generate Theta and Lambda from multivariate model:
* B = Z*Theta + N(0,Lambda)
************************************************************
*/
{theta, lambda, lambdai} =
getmulreg(beta,zdata,ztz,theta,lambda,lambdai,v0i,v0iu0,f0n,g0i);
endp;
/*
****************************************************************
* GETMULREG
* Generate multivariate regression parameters.
* Yd = Xd*parmat + epsilon
*
* INPUT
* yd = dependent variables
* xd = independet variables
* xdtxd = xd'xd
*
* parmat = current value of coefficient matrix
* var = current value of covariance matrix
* vari = its inverse
* v0i = prior precisions for bmat
* v0iu0 = prior precision*prior mean for bmat
* f0n = posterior df for sigma
* g0i = prior scaling matrix inverse for sigma
*
* OUTPUT
* parmat = updated rankx x mvar coefficient matrix
* var = updated variance
* vari = updated inverse of sigma
*
* Calling Statement:
{parmat, var, vari} = getmulreg(yd,xd,xdtxd,parmat,var,vari,v0i,v0iu0,f0n,g0i);
****************************************************************
*/
PROC (3) = getmulreg(yd,xd,xdtxd,parmat,var,vari,v0i,v0iu0,f0n,g0i);
local vb12, ubn, par, pdim, resid, gni, gn, rp, cp;
rp = rows(parmat);
cp = cols(parmat);
pdim = rp*cp;
/*
***********************************************************
* Generate parmat from N_{rp x cp}(M,v)
* par = vecr(parmat)
* par is N(u,V) whee u = vec(M');
* V = (Xd'Xd.*.Var^{-1} + V_0^{-1})^{-1}
* u = V*( (Xd'.*.Var^{-1})*vec(Yd') + V_0^{-1}u_0 )
*************************************************************
*/
vb12 = chol(xdtxd.*.vari + v0i);
ubn = ( (xd').*.vari )*vecr(yd) + v0iu0;
par = cholsol(ubn + vb12'rndn(pdim,1), vb12);
parmat = reshape(par,rp,cp);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -