[r-cran-bayesm] 03/44: Import Upstream version 1.0-1
Andreas Tille
tille at debian.org
Thu Sep 7 11:16:19 UTC 2017
This is an automated email from the git hooks/post-receive script.
tille pushed a commit to branch master
in repository r-cran-bayesm.
commit 0643053577b25893bb8a2fd9581c00324925a864
Author: Andreas Tille <tille at debian.org>
Date: Thu Sep 7 13:08:06 2017 +0200
Import Upstream version 1.0-1
---
DESCRIPTION | 6 +-
NAMESPACE | 3 +-
R/llmnp.R | 2 +-
R/mixDen.R | 6 +-
R/momMix.R | 2 +
R/numEff.R | 4 +-
R/rhierBinLogit.R | 224 +++++++++++++++++++++++++++++++++++++
R/rhierLinearModel.R | 8 +-
R/rhierMnlRwMixture.R | 41 ++++---
R/rmixture.R | 6 +-
R/rmnlIndepMetrop.R | 18 +--
R/rmvpGibbs.R | 3 +-
R/{rScaleUsage.R => rscaleUsage.R} | 4 +-
R/simmnl.R | 24 ++--
data/Scotch.rda | Bin 0 -> 212574 bytes
data/bank.rda | Bin 0 -> 1154613 bytes
data/cheese.rda | Bin 0 -> 201974 bytes
data/customerSat.rda | Bin 0 -> 93417 bytes
data/margarine.rda | Bin 0 -> 491229 bytes
inst/doc/bayesm-manual.pdf | Bin 0 -> 369507 bytes
man/Scotch.Rd | 86 ++++++++++++++
man/bank.Rd | 113 +++++++++++++++++++
man/cgetC.Rd | 2 +-
man/cheese.Rd | 97 ++++++++++++++++
man/condMom.Rd | 4 +-
man/createX.Rd | 9 +-
man/customerSat.Rd | 39 +++++++
man/llmnl.Rd | 2 +-
man/margarine.Rd | 136 ++++++++++++++++++++++
man/mixDen.Rd | 4 +-
man/numEff.Rd | 6 +-
man/rbprobitGibbs.Rd | 3 +-
man/rhierBinLogit.Rd | 133 ++++++++++++++++++++++
man/rhierLinearModel.Rd | 8 +-
man/rhierMnlRwMixture.Rd | 53 ++++-----
man/rivGibbs.Rd | 4 +-
man/rmixture.Rd | 6 +-
man/rmnlIndepMetrop.Rd | 17 ++-
man/rmnpGibbs.Rd | 6 +-
man/rmvpGibbs.Rd | 8 +-
man/rnmixGibbs.Rd | 19 ++--
man/rscaleUsage.Rd | 38 +++++--
man/simmnl.Rd | 6 +-
43 files changed, 1008 insertions(+), 142 deletions(-)
diff --git a/DESCRIPTION b/DESCRIPTION
index 7ac5454..051ef4a 100755
--- a/DESCRIPTION
+++ b/DESCRIPTION
@@ -1,6 +1,6 @@
Package: bayesm
-Version: 0.0-2
-Date: 2005-04-26
+Version: 1.0-1
+Date: 2005-05-16
Title:Bayesian Inference for Marketing/Micro-econometrics
Author: Peter Rossi <peter.rossi at ChicagoGsb.edu>,
Rob McCulloch <robert.mcculloch at ChicagoGsb.edu>.
@@ -25,4 +25,4 @@ Description: bayesm covers many important models used
Marketing by Allenby, McCulloch and Rossi.
License: GPL (version 2 or later)
URL: http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html
-Packaged: Tue Apr 26 16:13:16 2005; per
+Packaged: Wed May 18 17:24:34 2005; per
diff --git a/NAMESPACE b/NAMESPACE
index 7b2ac6e..7e2f779 100755
--- a/NAMESPACE
+++ b/NAMESPACE
@@ -5,7 +5,8 @@ lndIChisq,lndIWishart,lndMvn,lndMvst,mnlHess,momMix,nmat,numEff,rdirichlet,
rmixture,rmultireg,rmultiregfp,rwishart,rmvst,rtrun,rbprobitGibbs,runireg,
runiregGibbs,simmnl,simmnp,simmvp,simnhlogit,rmnpGibbs,rmixGibbs,rnmixGibbs,
rmvpGibbs,rhierLinearModel,rhierMnlRwMixture,simmnlwX,rivGibbs,
-rmnlIndepMetrop,rscaleUsage,ghkvec,condMom,logMargDenNR,init.rmultiregfp)
+rmnlIndepMetrop,rscaleUsage,ghkvec,condMom,logMargDenNR,init.rmultiregfp,
+rhierBinLogit)
diff --git a/R/llmnp.R b/R/llmnp.R
index 4c1daa0..6e1c9b2 100755
--- a/R/llmnp.R
+++ b/R/llmnp.R
@@ -49,7 +49,7 @@ logl=0.0
above=rep(0,pm1)
for (j in 1:pm1) {
muj=mu[,y==j]
- Aj=diag(rep(-1,pm1))
+ Aj=-diag(pm1)
Aj[,j]=rep(1,pm1)
trunpt=as.vector(-Aj%*%muj)
Lj=t(chol(Aj%*%Sigma%*%t(Aj)))
diff --git a/R/mixDen.R b/R/mixDen.R
index 3a63660..f911bdd 100755
--- a/R/mixDen.R
+++ b/R/mixDen.R
@@ -1,5 +1,5 @@
mixDen=
-function(x,p,comps)
+function(x,pvec,comps)
{
# Revision History:
# R. McCulloch 11/04
@@ -9,7 +9,7 @@ function(x,p,comps)
#
# arguments:
# x: ith columns gives evaluations for density of ith variable
-# p: prior probabilities of normal components
+# pvec: prior probabilities of normal components
# comps: list, each member is a list comp with ith normal component ~ N(comp[[1]],Sigma),
# Sigma = t(R)%*%R, R^{-1} = comp[[2]]
# Output:
@@ -45,7 +45,7 @@ nc = length(comps)
mars = ums(comps)
den = matrix(0.0,nrow(x),ncol(x))
for(i in 1:ncol(x)) {
- for(j in 1:nc) den[,i] = den[,i] + dnorm(x[,i],mean = mars$mu[j,i],sd=mars$sigma[j,i])*p[j]
+ for(j in 1:nc) den[,i] = den[,i] + dnorm(x[,i],mean = mars$mu[j,i],sd=mars$sigma[j,i])*pvec[j]
}
return(den)
}
diff --git a/R/momMix.R b/R/momMix.R
index 72b1180..039c2eb 100755
--- a/R/momMix.R
+++ b/R/momMix.R
@@ -47,6 +47,8 @@ list(mu=mu,sigma=sigma)
}
#---------------------------------------------------------------------------------------
dim=length(compdraw[[1]][[1]][[1]])
+nc=length(compdraw[[1]])
+dim(probdraw)=c(length(compdraw),nc)
mu=double(dim)
sigma=matrix(double(dim*dim),ncol=dim)
sd=double(dim)
diff --git a/R/numEff.R b/R/numEff.R
index f3d43a3..f73f279 100755
--- a/R/numEff.R
+++ b/R/numEff.R
@@ -1,5 +1,5 @@
numEff=
-function(x,m=max(length(x),(100/sqrt(5000))*sqrt(length(x))))
+function(x,m=as.integer(min(length(x),(100/sqrt(5000))*sqrt(length(x)))))
{
#
# P. Rossi
@@ -20,6 +20,6 @@ wgt=as.vector(seq(m,1,-1))/(m+1)
z=acf(x,lag.max=m,plot=FALSE)
f=1+2*wgt%*%as.vector(z$acf[-1])
stderr=sqrt(var(x)*f/length(x))
-list(stderr=stderr,f=f)
+list(stderr=stderr,f=f,m=m)
}
diff --git a/R/rhierBinLogit.R b/R/rhierBinLogit.R
new file mode 100755
index 0000000..a778b7c
--- /dev/null
+++ b/R/rhierBinLogit.R
@@ -0,0 +1,224 @@
+#
+# -----------------------------------------------------------------------------
+#
+rhierBinLogit=
+function(Data,Prior,Mcmc){
+#
+# revision history:
+# changed 5/12/05 by Rossi to add error checking
+#
+# purpose: run binary heterogeneous logit model
+#
+# Arguments:
+# Data contains a list of (lgtdata[[i]],Z)
+# lgtdata[[i]]=list(y,X)
+# y is index of brand chosen, y=1 is exp[X'beta]/(1+exp[X'beta])
+# X is a matrix that is n_i x by nvar
+# Z is a matrix of demographic variables nlgt*nz that have been
+# mean centered so that the intercept is interpretable
+# Prior contains a list of (nu,V,Deltabar,ADelta)
+# beta_i ~ N(Delta,Vbeta)
+# vec(Delta) ~ N(vec(Deltabar),Vbeta (x) ADelta^-1)
+# Vbeta ~ IW(nu,V)
+# Mcmc is a list of (sbeta,R,keep)
+# sbeta is scale factor for RW increment for beta_is
+# R is number of draws
+# keep every keepth draw
+#
+# Output:
+# a list of Deltadraw (R/keep x nvar x nz), Vbetadraw (R/keep x nvar**2),
+# llike (R/keep), betadraw is a nlgt x nvar x nz x R/keep array of draws of betas
+# nunits=length(lgtdata)
+#
+# define functions needed
+#
+# ------------------------------------------------------------------------
+#
+loglike=
+function(y,X,beta) {
+# function computer log likelihood of data for binomial logit model
+# Pr(y=1) = 1 - Pr(y=0) = exp[X'beta]/(1+exp[X'beta])
+prob = exp(X%*%beta)/(1+exp(X%*%beta))
+prob = prob*y + (1-prob)*(1-y)
+sum(log(prob))
+}
+#
+#
+# check arguments
+#
+pandterm=function(message) { stop(message,call.=FALSE) }
+if(missing(Data)) {pandterm("Requires Data argument -- list of m,lgtdata, and (possibly) Z")}
+ if(is.null(Data$lgtdata)) {pandterm("Requires Data element lgtdata (list of data for each unit)")}
+ lgtdata=Data$lgtdata
+ nlgt=length(lgtdata)
+if(is.null(Data$Z)) { cat("Z not specified -- putting in iota",fill=TRUE); fsh() ; Z=matrix(rep(1,nlgt),ncol=1)}
+ else {if (nrow(Data$Z) != nlgt) {pandterm(paste("Nrow(Z) ",nrow(Z),"ne number logits ",nlgt))}
+ else {Z=Data$Z}}
+ nz=ncol(Z)
+#
+# check lgtdata for validity
+#
+m=2 # set two choice alternatives for Greg's code
+ypooled=NULL
+Xpooled=NULL
+if(!is.null(lgtdata[[1]]$X)) {oldncol=ncol(lgtdata[[1]]$X)}
+for (i in 1:nlgt)
+{
+ if(is.null(lgtdata[[i]]$y)) {pandterm(paste("Requires element y of lgtdata[[",i,"]]"))}
+ if(is.null(lgtdata[[i]]$X)) {pandterm(paste("Requires element X of lgtdata[[",i,"]]"))}
+ ypooled=c(ypooled,lgtdata[[i]]$y)
+ nrowX=nrow(lgtdata[[i]]$X)
+ if((nrowX) !=length(lgtdata[[i]]$y)) {pandterm(paste("nrow(X) ne length(yi); exception at unit",i))}
+ newncol=ncol(lgtdata[[i]]$X)
+ if(newncol != oldncol) {pandterm(paste("All X elements must have same # of cols; exception at unit",i))}
+ Xpooled=rbind(Xpooled,lgtdata[[i]]$X)
+ oldncol=newncol
+}
+nvar=ncol(Xpooled)
+levely=as.numeric(levels(as.factor(ypooled)))
+if(length(levely) != m) {pandterm(paste("y takes on ",length(levely)," values -- must be = m"))}
+bady=FALSE
+for (i in 0:1 )
+{
+ if(levely[i+1] != i) bady=TRUE
+}
+cat("Table of Y values pooled over all units",fill=TRUE)
+print(table(ypooled))
+if (bady)
+ {pandterm("Invalid Y")}
+#
+# check on prior
+#
+if(missing(Prior)){
+ nu=nvar+3
+ V=nu*diag(nvar)
+ Deltabar=matrix(rep(0,nz*nvar),ncol=nvar)
+ ADelta=.01*diag(nz) }
+else {
+ if(is.null(Prior$nu)) {nu=nvar+3} else {nu=Prior$nu}
+ if(nu < 1) {pandterm("invalid nu value")}
+ if(is.null(Prior$V)) {V=nu*diag(rep(1,nvar))} else {V=Prior$V}
+ if(sum(dim(V)==c(nvar,nvar)) !=2) pandterm("Invalid V in prior")
+ if(is.null(Prior$ADelta) ) {ADelta=.01*diag(nz)} else {ADelta=Prior$ADelta}
+ if(ncol(ADelta) != nz | nrow(ADelta) != nz) {pandterm("ADelta must be nz x nz")}
+ if(is.null(Prior$Deltabar) ) {Deltabar=matrix(rep(0,nz*nvar),ncol=nvar)} else {Deltabar=Prior$Deltabar}
+}
+#
+# check on Mcmc
+#
+if(missing(Mcmc))
+ {pandterm("Requires Mcmc list argument")}
+else
+ {
+ if(is.null(Mcmc$sbeta)) {sbeta=.2} else {sbeta=Mcmc$sbeta}
+ if(is.null(Mcmc$keep)) {keep=1} else {keep=Mcmc$keep}
+ if(is.null(Mcmc$R)) {pandterm("Requires R argument in Mcmc list")} else {R=Mcmc$R}
+ }
+#
+# print out problem
+#
+cat(" ",fill=TRUE)
+cat("Attempting MCMC Inference for Hierarchical Binary Logit:",fill=TRUE)
+cat(paste(" ",nvar," variables in X"),fill=TRUE)
+cat(paste(" ",nz," variables in Z"),fill=TRUE)
+cat(paste(" for ",nlgt," cross-sectional units"),fill=TRUE)
+cat(" ",fill=TRUE)
+cat("Prior Parms: ",fill=TRUE)
+cat("nu =",nu,fill=TRUE)
+cat("V ",fill=TRUE)
+print(V)
+cat("Deltabar",fill=TRUE)
+print(Deltabar)
+cat("ADelta",fill=TRUE)
+print(ADelta)
+cat(" ",fill=TRUE)
+cat("MCMC Parms: ",fill=TRUE)
+cat(paste("sbeta=",round(sbeta,3)," R= ",R," keep= ",keep),fill=TRUE)
+cat("",fill=TRUE)
+
+nlgt=length(lgtdata)
+nvar=ncol(lgtdata[[1]]$X)
+nz=ncol(Z)
+
+
+
+#
+# initialize storage for draws
+#
+Vbetadraw=matrix(double(floor(R/keep)*nvar*nvar),ncol=nvar*nvar)
+betadraw=array(double(floor(R/keep)*nlgt*nvar),dim=c(nlgt,nvar,floor(R/keep)))
+Deltadraw=matrix(double(floor(R/keep)*nvar*nz),ncol=nvar*nz)
+oldbetas=matrix(double(nlgt*nvar),ncol=nvar)
+oldVbeta=diag(nvar)
+oldVbetai=diag(nvar)
+oldDelta=matrix(double(nvar*nz),ncol=nvar)
+
+betad = array(0,dim=c(nvar))
+betan = array(0,dim=c(nvar))
+reject = array(0,dim=c(R/keep))
+llike=array(0,dim=c(R/keep))
+
+#
+# set up fixed parm for the draw of Vbeta, Delta=Delta
+#
+Fparm=init.rmultiregfp(Z,ADelta,Deltabar,nu,V)
+
+itime=proc.time()[3]
+cat("MCMC Iteration (est time to end - min)",fill=TRUE)
+fsh()
+for (j in 1:R) {
+ rej = 0
+ logl = 0
+ sV = sbeta*oldVbeta
+ root=t(chol(sV))
+
+# Draw B-h|B-bar, V
+
+ for (i in 1:nlgt) {
+
+ betad = oldbetas[i,]
+ betan = betad + root%*%rnorm(nvar)
+# data
+ lognew = loglike(lgtdata[[i]]$y,lgtdata[[i]]$X,betan)
+ logold = loglike(lgtdata[[i]]$y,lgtdata[[i]]$X,betad)
+# heterogeneity
+logknew = -.5*(t(betan)-Z[i,]%*%oldDelta) %*% oldVbetai %*% (betan-t(Z[i,]%*%oldDelta))
+logkold = -.5*(t(betad)-Z[i,]%*%oldDelta) %*% oldVbetai %*% (betad-t(Z[i,]%*%oldDelta))
+# MH step
+ alpha = exp(lognew + logknew - logold - logkold)
+ if(alpha=="NaN") alpha=-1
+ u = runif(n=1,min=0, max=1)
+ if(u < alpha) {
+ oldbetas[i,] = betan
+ logl = logl + lognew } else {
+ logl = logl + logold
+ rej = rej+1 }
+ }
+# Draw B-bar and V as a multivariate regression
+ out=rmultiregfp(oldbetas,Z,Fparm)
+ oldDelta=out$B
+ oldVbeta=out$Sigma
+ oldVbetai=chol2inv(chol(oldVbeta))
+
+ if((j%%100)==0)
+ {
+ ctime=proc.time()[3]
+ timetoend=((ctime-itime)/j)*(R-j)
+ cat(" ",j," (",round(timetoend/60,1),")",fill=TRUE)
+ fsh() }
+ mkeep=j/keep
+ if(mkeep*keep == (floor(mkeep)*keep))
+ {Deltadraw[mkeep,]=as.vector(oldDelta)
+ Vbetadraw[mkeep,]=as.vector(oldVbeta)
+ betadraw[,,mkeep]=oldbetas
+ llike[mkeep]=logl
+ reject[mkeep]=rej/nlgt
+ }
+}
+ctime=proc.time()[3]
+cat(" Total Time Elapsed: ",round((ctime-itime)/60,2),fill=TRUE)
+
+return(list(betadraw=betadraw,Vbetadraw=Vbetadraw,Deltadraw=Deltadraw,llike=llike,reject=reject))
+}
+
+
diff --git a/R/rhierLinearModel.R b/R/rhierLinearModel.R
index e6c529c..44a4bcd 100755
--- a/R/rhierLinearModel.R
+++ b/R/rhierLinearModel.R
@@ -101,10 +101,11 @@ pandterm=function(message) {stop(message,call.=FALSE)}
if(missing(Data)) {pandterm("Requires Data argument -- list of regdata and Z")}
if(is.null(Data$regdata)) {pandterm("Requires Data element regdata")}
regdata=Data$regdata
- if(is.null(Data$Z)) {pandterm("Requires Data element Z")}
- Z=Data$Z
-nz=ncol(Z)
nreg=length(regdata)
+if(is.null(Data$Z)) { cat("Z not specified -- putting in iota",fill=TRUE); fsh() ; Z=matrix(rep(1,nreg),ncol=1)}
+ else {if (nrow(Data$Z) != nreg) {pandterm(paste("Nrow(Z) ",nrow(Z),"ne number regressions ",nlgt))}
+ else {Z=Data$Z}}
+nz=ncol(Z)
#
# check data for validity
#
@@ -164,6 +165,7 @@ else
cat(" ", fill=TRUE)
cat("Starting Gibbs Sampler for Linear Hierarchical Model",fill=TRUE)
cat(" ",nreg," Regressions",fill=TRUE)
+cat(" ",ncol(Z)," Variables in Z (if 1, then only intercept)",fill=TRUE)
cat(" ", fill=TRUE)
cat("Prior Parms: ",fill=TRUE)
cat("Deltabar",fill=TRUE)
diff --git a/R/rhierMnlRwMixture.R b/R/rhierMnlRwMixture.R
index 1d6f524..275be01 100755
--- a/R/rhierMnlRwMixture.R
+++ b/R/rhierMnlRwMixture.R
@@ -11,13 +11,13 @@ function(Data,Prior,Mcmc)
# uses normal approximation to pooled likelihood
#
# Arguments:
-# Data contains a list of (m,lgtdata, and possibly Z)
-# m is number of choice alternatives
+# Data contains a list of (p,lgtdata, and possibly Z)
+# p is number of choice alternatives
# lgtdata is a list of lists (one list per unit)
# lgtdata[[i]]=list(y,X)
# y is a vector indicating alternative chosen
-# integers 1:m indicate alternative
-# X is a length(y)*m x nvar matrix of values of
+# integers 1:p indicate alternative
+# X is a length(y)*p x nvar matrix of values of
# X vars including intercepts
# Z is an length(lgtdata) x nz matrix of values of variables
# note: Z should NOT contain an intercept
@@ -55,9 +55,9 @@ function(Data,Prior,Mcmc)
# check arguments
#
pandterm=function(message) { stop(message,call.=FALSE) }
-if(missing(Data)) {pandterm("Requires Data argument -- list of m,lgtdata, and (possibly) Z")}
- if(is.null(Data$m)) {pandterm("Requires Data element m (# chce alternatives)") }
- m=Data$m
+if(missing(Data)) {pandterm("Requires Data argument -- list of p,lgtdata, and (possibly) Z")}
+ if(is.null(Data$p)) {pandterm("Requires Data element p (# chce alternatives)") }
+ p=Data$p
if(is.null(Data$lgtdata)) {pandterm("Requires Data element lgtdata (list of data for each unit)")}
lgtdata=Data$lgtdata
nlgt=length(lgtdata)
@@ -65,7 +65,13 @@ if(missing(Data)) {pandterm("Requires Data argument -- list of m,lgtdata, and (p
if(is.null(Data$Z)) { cat("Z not specified",fill=TRUE); fsh() ; drawdelta=FALSE}
else {if (nrow(Data$Z) != nlgt) {pandterm(paste("Nrow(Z) ",nrow(Z),"ne number logits ",nlgt))}
else {Z=Data$Z}}
- if(drawdelta) nz=ncol(Z)
+ if(drawdelta) {
+ nz=ncol(Z)
+ colmeans=apply(Z,2,mean)
+ if(sum(colmeans) > .00001)
+ {pandterm(paste("Z does not appear to be de-meaned: colmeans= ",colmeans))}
+ }
+
#
# check lgtdata for validity
#
@@ -78,7 +84,7 @@ for (i in 1:nlgt)
if(is.null(lgtdata[[i]]$X)) {pandterm(paste("Requires element X of lgtdata[[",i,"]]"))}
ypooled=c(ypooled,lgtdata[[i]]$y)
nrowX=nrow(lgtdata[[i]]$X)
- if((nrowX/m) !=length(lgtdata[[i]]$y)) {pandterm(paste("nrow(X) ne m*length(yi); exception at unit",i))}
+ if((nrowX/p) !=length(lgtdata[[i]]$y)) {pandterm(paste("nrow(X) ne p*length(yi); exception at unit",i))}
newncol=ncol(lgtdata[[i]]$X)
if(newncol != oldncol) {pandterm(paste("All X elements must have same # of cols; exception at unit",i))}
Xpooled=rbind(Xpooled,lgtdata[[i]]$X)
@@ -86,9 +92,9 @@ for (i in 1:nlgt)
}
nvar=ncol(Xpooled)
levely=as.numeric(levels(as.factor(ypooled)))
-if(length(levely) != m) {pandterm(paste("y takes on ",length(levely)," values -- must be = m"))}
+if(length(levely) != p) {pandterm(paste("y takes on ",length(levely)," values -- must be = p"))}
bady=FALSE
-for (i in 1:m )
+for (i in 1:p )
{
if(levely[i] != i) bady=TRUE
}
@@ -108,9 +114,9 @@ if(is.null(Prior$Amu)) {Amu=matrix(.01,ncol=1)} else {Amu=matrix(Prior$Amu,ncol=
if(ncol(Amu) != 1 | nrow(Amu) != 1) {pandterm("Am must be a 1 x 1 array")}
if(is.null(Prior$nu)) {nu=nvar+3} else {nu=Prior$nu}
if(nu < 1) {pandterm("invalid nu value")}
-if(is.null(Prior$V)) {V=nu*diag(rep(1,nvar))} else {V=Prior$V}
+if(is.null(Prior$V)) {V=nu*diag(nvar)} else {V=Prior$V}
if(sum(dim(V)==c(nvar,nvar)) !=2) pandterm("Invalid V in prior")
-if(is.null(Prior$Ad) & drawdelta) {Ad=.01*diag(rep(1,nvar*nz))} else {Ad=Prior$Ad}
+if(is.null(Prior$Ad) & drawdelta) {Ad=.01*diag(nvar*nz)} else {Ad=Prior$Ad}
if(drawdelta) {if(ncol(Ad) != nvar*nz | nrow(Ad) != nvar*nz) {pandterm("Ad must be nvar*nz x nvar*nz")}}
if(is.null(Prior$deltabar)& drawdelta) {deltabar=rep(0,nz*nvar)} else {deltabar=Prior$deltabar}
if(drawdelta) {if(length(deltabar) != nz*nvar) {pandterm("deltabar must be of length nvar*nz")}}
@@ -137,7 +143,7 @@ else
cat(" ",fill=TRUE)
cat("Attempting MCMC Inference for Hierarchical Logit:",fill=TRUE)
cat(" Normal Mixture with",ncomp,"components for first stage prior",fill=TRUE)
-cat(paste(" ",m," alternatives; ",nvar," variables in X"),fill=TRUE)
+cat(paste(" ",p," alternatives; ",nvar," variables in X"),fill=TRUE)
cat(paste(" for ",nlgt," cross-sectional units"),fill=TRUE)
cat(" ",fill=TRUE)
cat("Prior Parms: ",fill=TRUE)
@@ -167,7 +173,6 @@ cat("",fill=TRUE)
if(drawdelta) Deltadraw=matrix(double((floor(R/keep))*nz*nvar),ncol=nz*nvar)
betadraw=array(double((floor(R/keep))*nlgt*nvar),dim=c(nlgt,nvar,floor(R/keep)))
probdraw=matrix(double((floor(R/keep))*ncomp),ncol=ncomp)
-olddelta=double(nz*nvar)
oldbetas=matrix(double(nlgt*nvar),ncol=nvar)
oldll=double(nlgt)
oldcomp=NULL
@@ -268,7 +273,7 @@ for (i in 1:nlgt)
lgtdata[[i]]=c(lgtdata[[i]],list(converge=1,betafmle=out$par,hess=hess)) }
else
{ lgtdata[[i]]=c(lgtdata[[i]],list(converge=0,betafmle=c(rep(0,nvar)),
- hess=diag(rep(0,nvar)))) }
+ hess=diag(nvar))) }
oldbetas[i,]=lgtdata[[i]]$betafmle
}
#
@@ -285,7 +290,7 @@ if(ncomp != 1) {ind = c(ind,rep(ncomp,nlgt-length(ind)))} else {ind=rep(1,nlgt)}
#
# initialize delta
#
-olddelta=rep(0,nz*nvar)
+if (drawdelta) olddelta=rep(0,nz*nvar)
#
# initialize probs
#
@@ -293,7 +298,7 @@ oldprob=rep(1/ncomp,ncomp)
#
# initialize comps
#
-tcomp=list(list(mu=rep(0,nvar),rooti=diag(rep(1,nvar))))
+tcomp=list(list(mu=rep(0,nvar),rooti=diag(nvar)))
oldcomp=rep(tcomp,ncomp)
#
#
diff --git a/R/rmixture.R b/R/rmixture.R
index 2ffb7dc..bc20d48 100755
--- a/R/rmixture.R
+++ b/R/rmixture.R
@@ -1,5 +1,5 @@
rmixture=
-function(n,p,comps)
+function(n,pvec,comps)
{
#
# R. McCulloch 12/04
@@ -9,7 +9,7 @@ function(n,p,comps)
# purpose: iid draws from mixture of multivariate normals
# arguments:
# n: number of draws
-# p: prior probabilities of normal components
+# pvec: prior probabilities of normal components
# comps: list, each member is a list comp with ith normal component
# ~N(comp[[1]],Sigma), Sigma = t(R)%*%R, R^{-1} = comp[[2]]
# output:
@@ -31,6 +31,6 @@ as.vector(comp[[1]] + t(invUT(comp[[2]]))%*%rnorm(length(comp[[1]])))
}
#----------------------------------------------------------------------------------
#
-z = sample(1:length(p), n, replace = TRUE, prob = p)
+z = sample(1:length(pvec), n, replace = TRUE, prob = pvec)
return(list(x = t(sapply(comps[z],rcomp)),z=z))
}
diff --git a/R/rmnlIndepMetrop.R b/R/rmnlIndepMetrop.R
index e473162..64a984a 100755
--- a/R/rmnlIndepMetrop.R
+++ b/R/rmnlIndepMetrop.R
@@ -10,10 +10,10 @@ function(Data,Prior,Mcmc)
# draw from posterior for MNL using Independence Metropolis
#
# Arguments:
-# Data - list of m,X,y
-# m is number of alternatives
-# X is nobs*m x nvar matrix
-# y is nobs vector of values from 1 to m
+# Data - list of p,y,X
+# p is number of alternatives
+# X is nobs*p x nvar matrix
+# y is nobs vector of values from 1 to p
# Prior - list of A, betabar
# A is nvar x nvar prior preci matrix
# betabar is nvar x 1 prior mean
@@ -33,19 +33,19 @@ function(Data,Prior,Mcmc)
# check arguments
#
pandterm=function(message) {stop(message,call.=FALSE)}
-if(missing(Data)) {pandterm("Requires Data argument -- list of y and X")}
+if(missing(Data)) {pandterm("Requires Data argument -- list of p, y, X")}
if(is.null(Data$X)) {pandterm("Requires Data element X")}
X=Data$X
if(is.null(Data$y)) {pandterm("Requires Data element y")}
y=Data$y
- if(is.null(Data$m)) {pandterm("Requires Data element m")}
+ if(is.null(Data$p)) {pandterm("Requires Data element p")}
m=Data$m
nvar=ncol(X)
nobs=length(y)
#
# check data for validity
#
-if(length(y) != (nrow(X)/m) ) {pandterm("length(y) ne nrow(X)/m")}
+if(length(y) != (nrow(X)/p) ) {pandterm("length(y) ne nrow(X)/p")}
#
# check for Prior
#
@@ -80,8 +80,8 @@ else
# print out problem
#
cat(" ", fill=TRUE)
-cat("Starting Gibbs Sampler for Multinomial Logit Model",fill=TRUE)
-cat(" with ",m," alternatives",fill=TRUE)
+cat("Starting Independence Metropolis Sampler for Multinomial Logit Model",fill=TRUE)
+cat(" with ",p," alternatives",fill=TRUE)
cat(" ", fill=TRUE)
cat("Prior Parms: ",fill=TRUE)
cat("betabar",fill=TRUE)
diff --git a/R/rmvpGibbs.R b/R/rmvpGibbs.R
index 2d15699..3da0732 100755
--- a/R/rmvpGibbs.R
+++ b/R/rmvpGibbs.R
@@ -101,7 +101,8 @@ cat(" ",fill=TRUE)
cat("MCMC Parms:",fill=TRUE)
cat("R= ",R,fill=TRUE)
cat("initial beta= ",beta0,fill=TRUE)
-cat("initial sigma= ",sigma0,fill=TRUE)
+cat("initial sigma= ",fill=TRUE)
+print(sigma0)
cat(" ",fill=TRUE)
#
diff --git a/R/rScaleUsage.R b/R/rscaleUsage.R
similarity index 96%
rename from R/rScaleUsage.R
rename to R/rscaleUsage.R
index cf5be38..61dc025 100755
--- a/R/rScaleUsage.R
+++ b/R/rscaleUsage.R
@@ -218,7 +218,7 @@ nu = p+3
V= nu*diag(p)
mubar = matrix(rep(k/2,p),ncol=1)
Am = .0001*diag(p)
-gs = 500
+gs = 200
gsigma = 6*(1:gs)/gs
gl11 = .1 + 5.9*(1:gs)/gs
gl22 = .1 + 2.0*(1:gs)/gs
@@ -298,7 +298,7 @@ for(rep in 1:ndpost) {
Si = chol2inv(chol(Sigma))
Vmi = sum(1/sigma^2)*Si + Am
R = chol(Vmi)
- Ri = backsolve(R,diag(rep(1,p)))
+ Ri = backsolve(R,diag(p))
Vm = chol2inv(chol(Vmi))
mm = Vm %*% (Si %*% (t(yd) %*% matrix(1/sigma^2,ncol=1)) + Am %*% mubar)
mu = as.vector(mm + Ri %*% matrix(rnorm(p),ncol=1))
diff --git a/R/simmnl.R b/R/simmnl.R
index a285545..32745c7 100755
--- a/R/simmnl.R
+++ b/R/simmnl.R
@@ -1,5 +1,5 @@
simmnl=
-function(j,n,beta)
+function(p,n,beta)
{
#
# p. rossi 2004
@@ -7,22 +7,22 @@ function(j,n,beta)
# Purpose: simulate from MNL (including X values)
#
# Arguments:
-# j is number of alternatives
+# p is number of alternatives
# n is number of obs
# beta is true parm value
#
# Output:
# list of X (note: we include full set of intercepts and 2 unif(-1,1) X vars)
-# y (indicator of choice-- 1, ...,j
-# prob is a n x j matrix of choice probs
+# y (indicator of choice-- 1, ...,p
+# prob is a n x p matrix of choice probs
#
# note: first choice alternative has intercept set to zero
#
k=length(beta)
-x1=runif(n*j,min=-1,max=1)
-x2=runif(n*j,min=-1,max=1)
-I2=diag(rep(1,j-1))
-zero=rep(0,j-1)
+x1=runif(n*p,min=-1,max=1)
+x2=runif(n*p,min=-1,max=1)
+I2=diag(rep(1,p-1))
+zero=rep(0,p-1)
xadd=rbind(zero,I2)
for(i in 2:n) {
xadd=rbind(xadd,zero,I2)
@@ -32,15 +32,15 @@ X=cbind(xadd,x1,x2)
# now construct probabilities
Xbeta=X%*%beta
-j=nrow(Xbeta)/n
-Xbeta=matrix(Xbeta,byrow=T,ncol=j)
+p=nrow(Xbeta)/n
+Xbeta=matrix(Xbeta,byrow=T,ncol=p)
Prob=exp(Xbeta)
-iota=c(rep(1,j))
+iota=c(rep(1,p))
denom=Prob%*%iota
Prob=Prob/as.vector(denom)
# draw y
y=vector("double",n)
-ind=1:j
+ind=1:p
for (i in 1:n)
{
yvec=rmultinom(1,1,Prob[i,])
diff --git a/data/Scotch.rda b/data/Scotch.rda
new file mode 100755
index 0000000..0dc3b8b
Binary files /dev/null and b/data/Scotch.rda differ
diff --git a/data/bank.rda b/data/bank.rda
new file mode 100755
index 0000000..84b49e6
Binary files /dev/null and b/data/bank.rda differ
diff --git a/data/cheese.rda b/data/cheese.rda
new file mode 100755
index 0000000..6a89bc1
Binary files /dev/null and b/data/cheese.rda differ
diff --git a/data/customerSat.rda b/data/customerSat.rda
new file mode 100755
index 0000000..b34a8be
Binary files /dev/null and b/data/customerSat.rda differ
diff --git a/data/margarine.rda b/data/margarine.rda
new file mode 100755
index 0000000..52a8690
Binary files /dev/null and b/data/margarine.rda differ
diff --git a/inst/doc/bayesm-manual.pdf b/inst/doc/bayesm-manual.pdf
new file mode 100755
index 0000000..061a84a
Binary files /dev/null and b/inst/doc/bayesm-manual.pdf differ
diff --git a/man/Scotch.Rd b/man/Scotch.Rd
new file mode 100755
index 0000000..682dadf
--- /dev/null
+++ b/man/Scotch.Rd
@@ -0,0 +1,86 @@
+\name{Scotch}
+\alias{Scotch}
+\docType{data}
+\title{ Survey Data on Brands of Scotch Consumed}
+\description{
+ from Simmons Survey. Brands used in last year for those
+ respondents who report consuming scotch.
+}
+\usage{data(Scotch)}
+\format{
+ A data frame with 2218 observations on the following 21 variables.
+ All variables are coded 1 if consumed in last year, 0 if not.
+ \describe{
+ \item{\code{Chivas.Regal}}{a numeric vector}
+ \item{\code{Dewar.s.White.Label}}{a numeric vector}
+ \item{\code{Johnnie.Walker.Black.Label}}{a numeric vector}
+ \item{\code{J...B}}{a numeric vector}
+ \item{\code{Johnnie.Walker.Red.Label}}{a numeric vector}
+ \item{\code{Other.Brands}}{a numeric vector}
+ \item{\code{Glenlivet}}{a numeric vector}
+ \item{\code{Cutty.Sark}}{a numeric vector}
+ \item{\code{Glenfiddich}}{a numeric vector}
+ \item{\code{Pinch..Haig.}}{a numeric vector}
+ \item{\code{Clan.MacGregor}}{a numeric vector}
+ \item{\code{Ballantine}}{a numeric vector}
+ \item{\code{Macallan}}{a numeric vector}
+ \item{\code{Passport}}{a numeric vector}
+ \item{\code{Black...White}}{a numeric vector}
+ \item{\code{Scoresby.Rare}}{a numeric vector}
+ \item{\code{Grants}}{a numeric vector}
+ \item{\code{Ushers}}{a numeric vector}
+ \item{\code{White.Horse}}{a numeric vector}
+ \item{\code{Knockando}}{a numeric vector}
+ \item{\code{the.Singleton}}{a numeric vector}
+ }
+}
+\source{
+ Edwards, Y. and G. Allenby (2003), "Multivariate Analysis of Multiple Response Data,"
+ \emph{JMR} 40, 321-334.
+}
+\references{
+ Chapter 4, \emph{Bayesian Statistics and Marketing} by Rossi et al.\cr
+ \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\examples{
+data(Scotch)
+cat(" Frequencies of Brands", fill=TRUE)
+mat=apply(as.matrix(Scotch),2,mean)
+print(mat)
+##
+## use Scotch data to run Multivariate Probit Model
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0){
+##
+
+y=as.matrix(Scotch)
+p=ncol(y); n=nrow(y)
+dimnames(y)=NULL
+y=as.vector(t(y))
+y=as.integer(y)
+I_p=diag(p)
+X=rep(I_p,n)
+X=matrix(X,nrow=p)
+X=t(X)
+
+R=2000
+Data=list(p=p,X=X,y=y)
+Mcmc=list(R=R)
+set.seed(66)
+out=rmvpGibbs(Data=Data,Mcmc=Mcmc)
+
+ind=(0:(p-1))*p + (1:p)
+cat(" Betadraws ",fill=TRUE)
+mat=apply(out$betadraw/sqrt(out$sigmadraw[,ind]),2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+rdraw=matrix(double((R)*p*p),ncol=p*p)
+rdraw=t(apply(out$sigmadraw,1,nmat))
+cat(" Draws of Correlation Matrix ",fill=TRUE)
+mat=apply(rdraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+## correlation matrix too large to print -- summarize
+quantile(round(mat,digits=2))
+
+}
+
+}
+\keyword{datasets}
diff --git a/man/bank.Rd b/man/bank.Rd
new file mode 100755
index 0000000..f4ff8fc
--- /dev/null
+++ b/man/bank.Rd
@@ -0,0 +1,113 @@
+\name{bank}
+\alias{bank}
+\docType{data}
+\title{ Bank Card Conjoint Data of Allenby and Ginter (1995)}
+\description{
+ Data from a conjoint experiment in which two partial profiles of
+ credit cards were presented to 946 respondents. The variable
+ bank\$choiceAtt\$choice indicates which profile was chosen. The
+ profiles are coded as the difference in attribute levels. Thus,
+ a "-1" means the profile coded as a choice of "0" has the attribute.
+ A value of 0 means that the attribute was not present in the
+ comparison.
+
+ data on age,income and gender (female=1) are also recorded in
+ bank\$demo
+}
+\usage{data(bank)}
+\format{
+ This R object is a list of two data frames, list(choiceAtt,demo).
+
+ List of 2
+
+ \$ choiceAtt:`data.frame': 14799 obs. of 16 variables:\cr
+ \ldots\$ id : int [1:14799] 1 1 1 1 1 1 1 1 1 1 \cr
+ \ldots\$ choice : int [1:14799] 1 1 1 1 1 1 1 1 0 1 \cr
+ \ldots\$ Med\_FInt : int [1:14799] 1 1 1 0 0 0 0 0 0 0 \cr
+ \ldots\$ Low\_FInt : int [1:14799] 0 0 0 0 0 0 0 0 0 0 \cr
+ \ldots\$ Med\_VInt : int [1:14799] 0 0 0 0 0 0 0 0 0 0 \cr
+ \ldots\$ Rewrd\_2 : int [1:14799] -1 1 0 0 0 0 0 1 -1 0 \cr
+ \ldots\$ Rewrd\_3 : int [1:14799] 0 -1 1 0 0 0 0 0 1 -1 \cr
+ \ldots\$ Rewrd\_4 : int [1:14799] 0 0 -1 0 0 0 0 0 0 1 \cr
+ \ldots\$ Med\_Fee : int [1:14799] 0 0 0 1 1 -1 -1 0 0 0 \cr
+ \ldots\$ Low\_Fee : int [1:14799] 0 0 0 0 0 1 1 0 0 0 \cr
+ \ldots\$ Bank\_B : int [1:14799] 0 0 0 -1 1 -1 1 0 0 0 \cr
+ \ldots\$ Out\_State : int [1:14799] 0 0 0 0 -1 0 -1 0 0 0 \cr
+ \ldots\$ Med\_Rebate : int [1:14799] 0 0 0 0 0 0 0 0 0 0 \cr
+ \ldots\$ High\_Rebate : int [1:14799] 0 0 0 0 0 0 0 0 0 0 \cr
+ \ldots\$ High\_CredLine: int [1:14799] 0 0 0 0 0 0 0 -1 -1 -1 \cr
+ \ldots\$ Long\_Grace : int [1:14799] 0 0 0 0 0 0 0 0 0 0
+
+ \$ demo :`data.frame': 946 obs. of 4 variables:\cr
+ \ldots\$ id : int [1:946] 1 2 3 4 6 7 8 9 10 11 \cr
+ \ldots\$ age : int [1:946] 60 40 75 40 30 30 50 50 50 40 \cr
+ \ldots\$ income: int [1:946] 20 40 30 40 30 60 50 100 50 40 \cr
+ \ldots\$ gender: int [1:946] 1 1 0 0 0 0 1 0 0 0 \cr
+}
+\details{
+ Each respondent was presented with between 13 and 17 paired comparisons. Thus, this
+ dataset has a panel structure.
+}
+\source{
+ Allenby and Ginter (1995), "Using Extremes to Design Products and Segment
+ Markets," \emph{JMR}, 392-403.
+}
+\references{ Appendix A, \emph{Bayesian Statistics and Marketing}
+ by Allenby, McCulloch, and Rossi. \cr
+ \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\examples{
+data(bank)
+cat(" table of Binary Dep Var", fill=TRUE)
+print(table(bank$choiceAtt[,2]))
+cat(" table of Attribute Variables",fill=TRUE)
+mat=apply(as.matrix(bank$choiceAtt[,3:16]),2,table)
+print(mat)
+cat(" means of Demographic Variables",fill=TRUE)
+mat=apply(as.matrix(bank$demo[,2:3]),2,mean)
+print(mat)
+
+## example of processing for use with rhierBinLogit
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)
+{
+choiceAtt=bank$choiceAtt
+Z=bank$demo
+
+## center demo data so that mean of random-effects
+## distribution can be interpretted as the average respondents
+
+Z[,1]=rep(1,nrow(Z))
+Z[,2]=Z[,2]-mean(Z[,2])
+Z[,3]=Z[,3]-mean(Z[,3])
+Z[,4]=Z[,4]-mean(Z[,4])
+Z=as.matrix(Z)
+
+hh=levels(factor(choiceAtt$id))
+nhh=length(hh)
+lgtdata=NULL
+for (i in 1:nhh) {
+ y=choiceAtt[choiceAtt[,1]==hh[i],2]
+ nobs=length(y)
+ X=as.matrix(choiceAtt[choiceAtt[,1]==hh[i],c(3:16)])
+ lgtdata[[i]]=list(y=y,X=X)
+ }
+
+cat("Finished Reading data",fill=TRUE)
+fsh()
+
+Data=list(lgtdata=lgtdata,Z=Z)
+Mcmc=list(R=10000,sbeta=0.2,keep=20)
+set.seed(66)
+out=rhierBinLogit(Data=Data,Mcmc=Mcmc)
+
+cat(" Deltadraws ",fill=TRUE)
+mat=apply(out$Deltadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+cat(" Vbetadraws ",fill=TRUE)
+mat=apply(out$Vbetadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+}
+
+}
+\keyword{datasets}
diff --git a/man/cgetC.Rd b/man/cgetC.Rd
index a6f783b..aa25dcf 100755
--- a/man/cgetC.Rd
+++ b/man/cgetC.Rd
@@ -24,7 +24,7 @@ cgetC(e, k)
A vector of k+1 cut-offs.
}
\references{
- Rossi et al (2001), \dQuote{Overcoming Scale Usage Heterogeneity,} \emph{JASA}.
+ Rossi et al (2001), \dQuote{Overcoming Scale Usage Heterogeneity,} \emph{JASA}96, 20-31.
}
\author{ Rob McCulloch and Peter Rossi, Graduate School of Business, University of Chicago.
diff --git a/man/cheese.Rd b/man/cheese.Rd
new file mode 100755
index 0000000..81e5459
--- /dev/null
+++ b/man/cheese.Rd
@@ -0,0 +1,97 @@
+\name{cheese}
+\alias{cheese}
+\docType{data}
+\title{ Sliced Cheese Data}
+\description{
+ Panel data with sales volume for a package of Borden Sliced Cheese
+ as well as a measure of display activity and price. Weekly data aggregated
+ to the "key" account or retailer/market level.
+}
+\usage{data(cheese)}
+\format{
+ A data frame with 5555 observations on the following 4 variables.
+ \describe{
+ \item{\code{RETAILER}}{a list of 88 retailers}
+ \item{\code{VOLUME}}{unit sales}
+ \item{\code{DISP}}{a measure of display activity -- per cent ACV on display}
+ \item{\code{PRICE}}{in \$}
+ }
+}
+\source{
+ Boatwright et al (1999), "Account-Level Modeling for Trade Promotion,"
+ \emph{JASA} 94, 1063-1073.
+}
+\references{
+ Chapter 3, \emph{Bayesian Statistics and Marketing} by Rossi et al. \cr
+ \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\examples{
+data(cheese)
+cat(" Quantiles of the Variables ",fill=TRUE)
+mat=apply(as.matrix(cheese[,2:4]),2,quantile)
+print(mat)
+
+##
+## example of processing for use with rhierLinearModel
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)
+{
+
+retailer=levels(cheese$RETAILER)
+nreg=length(retailer)
+nvar=3
+regdata=NULL
+for (reg in 1:nreg) {
+ y=log(cheese$VOLUME[cheese$RETAILER==retailer[reg]])
+ iota=c(rep(1,length(y)))
+ X=cbind(iota,cheese$DISP[cheese$RETAILER==retailer[reg]],
+ log(cheese$PRICE[cheese$RETAILER==retailer[reg]]))
+ regdata[[reg]]=list(y=y,X=X)
+}
+Z=matrix(c(rep(1,nreg)),ncol=1)
+nz=ncol(Z)
+##
+## run each individual regression and store results
+##
+lscoef=matrix(double(nreg*nvar),ncol=nvar)
+for (reg in 1:nreg) {
+ coef=lsfit(regdata[[reg]]$X,regdata[[reg]]$y,intercept=FALSE)$coef
+ if (var(regdata[[reg]]$X[,2])==0) { lscoef[reg,1]=coef[1]; lscoef[reg,3]=coef[2]}
+ else {lscoef[reg,]=coef }
+}
+
+R=2000
+Data=list(regdata=regdata,Z=Z)
+Mcmc=list(R=R,keep=1)
+
+betamean=array(double(nreg*nvar),dim=c(nreg,nvar))
+burnin=100
+
+set.seed(66)
+out=rhierLinearModel(Data=Data,Mcmc=Mcmc)
+
+for (k in 1:nvar) { betamean[,k]=apply(out$betadraw[,k,burnin:R],1,mean)}
+print(betamean)
+cat(" Deltadraws ",fill=TRUE)
+mat=apply(out$Deltadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+cat(" Vbetadraws ",fill=TRUE)
+mat=apply(out$Vbetadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+
+if(0){
+coefn=c("Intercept","Display","LnPrice")
+colors=c("blue","green","red","yellow")
+par(mfrow=c(nvar,1),mar=c(5.1,15,4.1,13))
+for (n in 1:nvar)
+{
+ plot(range(betamean[,n]),range(betamean[,n]),
+ type="n",main=coefn[n],xlab="ls coef",ylab="post mean")
+ points(lscoef[,n],betamean[,n],pch=17,col="blue",cex=1.2)
+ abline(c(0,1))
+}
+}
+
+}
+}
+\keyword{datasets}
diff --git a/man/condMom.Rd b/man/condMom.Rd
index a3f3328..84621d2 100755
--- a/man/condMom.Rd
+++ b/man/condMom.Rd
@@ -12,7 +12,7 @@
condMom(x, mu, sigi, i)
}
\arguments{
- \item{x}{ vector of values to condition on }
+ \item{x}{ vector of values to condition on - ith element not used }
\item{mu}{ length(x) mean vector }
\item{sigi}{ length(x)-dim covariance matrix }
\item{i}{ conditional distribution of ith element }
@@ -20,7 +20,7 @@ condMom(x, mu, sigi, i)
\details{
\eqn{x} \eqn{\sim}{~} \eqn{MVN(mu,Sigma)}.
- \code{condMom} computes moments of \eqn{x_i} given \eqn{x_{-1}}.
+ \code{condMom} computes moments of \eqn{x_i} given \eqn{x_{-i}}.
}
\value{
a list containing:
diff --git a/man/createX.Rd b/man/createX.Rd
index 4222b47..e3a53c6 100755
--- a/man/createX.Rd
+++ b/man/createX.Rd
@@ -6,8 +6,9 @@
\title{ Create X Matrix for Use in Multinomial Logit and Probit Routines }
\description{
\code{createX} makes up an X matrix in the form expected by Multinomial
- Logit (\code{\link{rmnlIndepMetrop}}) and Multinomial Probit (\code{\link{rmnpGibbs}})
- routines. Requires an array of alternative specific variables and/or an
+ Logit (\code{\link{rmnlIndepMetrop}} and \code{\link{rhierMnlRwMixture}})
+ and Probit (\code{\link{rmnpGibbs}} and \code{\link{rmvpGibbs}}) routines.
+ Requires an array of alternative specific variables and/or an
array of "demographics" or variables constant across alternatives which
may vary across choice occasions.
}
@@ -18,9 +19,9 @@ createX(p, na, nd, Xa, Xd, INT = TRUE, DIFF = FALSE, base = p)
\arguments{
\item{p}{ integer - number of choice alternatives }
\item{na}{ integer - number of alternative-specific vars in Xa }
- \item{nd}{ integer - number of "demographics" vars }
+ \item{nd}{ integer - number of non-alternive specific vars }
\item{Xa}{ n x p*na matrix of alternative-specific vars }
- \item{Xd}{ n x nd matrix of "demographic" vars }
+ \item{Xd}{ n x nd matrix of non-alternative specific vars }
\item{INT}{ logical flag for inclusion of intercepts }
\item{DIFF}{ logical flag for differencing wrt to base alternative }
\item{base}{ integer - index of base choice alternative }
diff --git a/man/customerSat.Rd b/man/customerSat.Rd
new file mode 100755
index 0000000..b095642
--- /dev/null
+++ b/man/customerSat.Rd
@@ -0,0 +1,39 @@
+\name{customerSat}
+\alias{customerSat}
+\docType{data}
+\title{ Customer Satifaction Data}
+\description{
+ Responses to a satisfaction survey for a Yellow Pages advertising product.
+ All responses are on a 10 point scale from 1 to 10 (10 is "Excellent"
+ and 1 is "Poor")
+}
+\usage{data(customerSat)}
+\format{
+ A data frame with 1811 observations on the following 10 variables.
+ \describe{
+ \item{\code{q1}}{Overall Satisfaction}
+ \item{\code{q2}}{Setting Competitive Prices}
+ \item{\code{q3}}{Holding Price Increase to a Minimum}
+ \item{\code{q4}}{Appropriate Pricing given Volume}
+ \item{\code{q5}}{Demonstrating Effectiveness of Purchase}
+ \item{\code{q6}}{Reach a Large \# of Customers}
+ \item{\code{q7}}{Reach of Advertising}
+ \item{\code{q8}}{Long-term Exposure}
+ \item{\code{q9}}{Distribution}
+ \item{\code{q10}}{Distribution to Right Geographic Areas}
+ }
+}
+}
+\source{
+ Rossi et al (2001), "Overcoming Scale Usage Heterogeneity,"
+ \emph{JASA} 96, 20-31.
+}
+\references{
+ Case Study 3, \emph{Bayesian Statistics and Marketing} by Rossi et al.\cr
+ \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\examples{
+data(customerSat)
+apply(as.matrix(customerSat),2,table)
+}
+\keyword{datasets}
diff --git a/man/llmnl.Rd b/man/llmnl.Rd
index f278033..702a360 100755
--- a/man/llmnl.Rd
+++ b/man/llmnl.Rd
@@ -17,7 +17,7 @@ llmnl(y, X, beta)
\item{beta}{ k x 1 coefficient vector }
}
\details{
- Let \eqn{mu_i=X_i}\%*\%\eqn{\beta}, then \eqn{Pr(y=j) = e^{mu_j}/\sum_i{mu_i}}.\cr
+ Let \eqn{mu_i=X_i \beta}, then \eqn{Pr(y_i=j) = exp(mu_{i,j})/\sum_kexp(mu_{i,k})}.\cr
\eqn{X_i} is the submatrix of X corresponding to the
ith observation. X has n*p rows.
diff --git a/man/margarine.Rd b/man/margarine.Rd
new file mode 100755
index 0000000..864e4be
--- /dev/null
+++ b/man/margarine.Rd
@@ -0,0 +1,136 @@
+\name{margarine}
+\alias{margarine}
+\docType{data}
+\title{Household Panel Data on Margarine Purchases}
+\description{
+ Panel data on purchases of margarine by 516 households.
+ Demographic variables are included.
+}
+\usage{data(margarine)}
+\format{
+ This is an R object that is a list of two data frames, list(choicePrice,demos)
+
+ List of 2 \cr
+ \$ choicePrice:`data.frame': 4470 obs. of 12 variables:\cr
+ \ldots \$ hhid : int [1:4470] 2100016 2100016 2100016 2100016 \cr
+ \ldots \$ choice : num [1:4470] 1 1 1 1 1 4 1 1 4 1 \cr
+ \ldots \$ PPk\_Stk : num [1:4470] 0.66 0.63 0.29 0.62 0.5 0.58 0.29 \cr
+ \ldots \$ PBB\_Stk : num [1:4470] 0.67 0.67 0.5 0.61 0.58 0.45 0.51 \cr
+ \ldots \$ PFl\_Stk : num [1:4470] 1.09 0.99 0.99 0.99 0.99 0.99 0.99 \cr
+ \ldots \$ PHse\_Stk: num [1:4470] 0.57 0.57 0.57 0.57 0.45 0.45 0.29 \cr
+ \ldots \$ PGen\_Stk: num [1:4470] 0.36 0.36 0.36 0.36 0.33 0.33 0.33 \cr
+ \ldots \$ PImp\_Stk: num [1:4470] 0.93 1.03 0.69 0.75 0.72 0.72 0.72 \cr
+ \ldots \$ PSS\_Tub : num [1:4470] 0.85 0.85 0.79 0.85 0.85 0.85 0.85 \cr
+ \ldots \$ PPk\_Tub : num [1:4470] 1.09 1.09 1.09 1.09 1.07 1.07 1.07 \cr
+ \ldots \$ PFl\_Tub : num [1:4470] 1.19 1.19 1.19 1.19 1.19 1.19 1.19 \cr
+ \ldots \$ PHse\_Tub: num [1:4470] 0.33 0.37 0.59 0.59 0.59 0.59 0.59 \cr
+
+ Pk is Parkay; BB is BlueBonnett, Fl is Fleischmanns, Hse is house,
+ Gen is generic, Imp is Imperial, SS is Shed Spread. \_Stk indicates
+ stick, \_Tub indicates Tub form.
+
+ \$ demos :`data.frame': 516 obs. of 8 variables:\cr
+ \ldots \$ hhid : num [1:516] 2100016 2100024 2100495 2100560 \cr
+ \ldots \$ Income : num [1:516] 32.5 17.5 37.5 17.5 87.5 12.5 \cr
+ \ldots \$ Fs3\_4 : int [1:516] 0 1 0 0 0 0 0 0 0 0 \cr
+ \ldots \$ Fs5 : int [1:516] 0 0 0 0 0 0 0 0 1 0 \cr
+ \ldots \$ Fam\_Size : int [1:516] 2 3 2 1 1 2 2 2 5 2 \cr
+ \ldots \$ college : int [1:516] 1 1 0 0 1 0 1 0 1 1 \cr
+ \ldots \$ whtcollar: int [1:516] 0 1 0 1 1 0 0 0 1 1 \cr
+ \ldots \$ retired : int [1:516] 1 1 1 0 0 1 0 1 0 0 \cr
+
+ Fs3\_4 is dummy (family size 3-4). Fs5 is dummy for family size >= 5.
+ college,whtcollar,retired are dummies reflecting these statuses.
+}
+\details{
+ choice is a multinomial indicator of one of the 10 brands (in order listed under format).
+ All prices are in \$.
+}
+\source{
+ Allenby and Rossi (1991), "Quality Perceptions and Asymmetric Switching Between Brands,"
+ \emph{Marketing Science} 10, 185-205.
+}
+\references{
+ Chapter 5, \emph{Bayesian Statistics and Marketing} by Rossi et al.\cr
+ \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+\examples{
+data(margarine)
+cat(" Table of Choice Variable ",fill=TRUE)
+print(table(margarine$choicePrice[,2]))
+cat(" Means of Prices",fill=TRUE)
+mat=apply(as.matrix(margarine$choicePrice[,3:12]),2,mean)
+print(mat)
+cat(" Quantiles of Demographic Variables",fill=TRUE)
+mat=apply(as.matrix(margarine$demos[,2:8]),2,quantile)
+print(mat)
+
+##
+## example of processing for use with rhierMnlRwMixture
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)
+{
+select= c(1:5,7) ## select brands
+chPr=as.matrix(margarine$choicePrice)
+## make sure to log prices
+chPr=cbind(chPr[,1],chPr[,2],log(chPr[,2+select]))
+demos=as.matrix(margarine$demos[,c(1,2,5)])
+
+## remove obs for other alts
+chPr=chPr[chPr[,2] <= 7,]
+chPr=chPr[chPr[,2] != 6,]
+
+## recode choice
+chPr[chPr[,2] == 7,2]=6
+
+hhidl=levels(as.factor(chPr[,1]))
+lgtdata=NULL
+nlgt=length(hhidl)
+p=length(select) ## number of choice alts
+ind=1
+for (i in 1:nlgt) {
+ nobs=sum(chPr[,1]==hhidl[i])
+ if(nobs >=5) {
+ data=chPr[chPr[,1]==hhidl[i],]
+ y=data[,2]
+ names(y)=NULL
+ X=createX(p=p,na=1,Xa=data[,3:8],nd=NULL,Xd=NULL,INT=TRUE,base=1)
+ lgtdata[[ind]]=list(y=y,X=X,hhid=hhidl[i]); ind=ind+1
+ }
+}
+nlgt=length(lgtdata)
+##
+## now extract demos corresponding to hhs in lgtdata
+##
+Z=NULL
+nlgt=length(lgtdata)
+for(i in 1:nlgt){
+ Z=rbind(Z,demos[demos[,1]==lgtdata[[i]]$hhid,2:3])
+}
+##
+## take log of income and family size and demean
+##
+Z=log(Z)
+Z[,1]=Z[,1]-mean(Z[,1])
+Z[,2]=Z[,2]-mean(Z[,2])
+
+keep=5
+R=20000
+mcmc1=list(keep=keep,R=R)
+out=rhierMnlRwMixture(Data=list(p=p,lgtdata=lgtdata,Z=Z),Prior=list(ncomp=1),Mcmc=mcmc1)
+
+begin=100/keep; end=R/keep
+cat(" Posterior Mean of Delta ",fill=TRUE)
+mat=apply(out$Deltadraw[begin:end,],2,mean)
+print(matrix(mat,ncol=6))
+pmom=momMix(out$probdraw[begin:end,],out$compdraw[begin:end])
+cat(" posterior expectation of mu",fill=TRUE)
+print(pmom$mu)
+cat(" posterior expectation of sd",fill=TRUE)
+print(pmom$sd)
+cat(" posterior expectation of correlations",fill=TRUE)
+print(pmom$corr)
+
+}
+}
+\keyword{datasets}
diff --git a/man/mixDen.Rd b/man/mixDen.Rd
index 73567ab..d215120 100755
--- a/man/mixDen.Rd
+++ b/man/mixDen.Rd
@@ -10,11 +10,11 @@
a normal mixture at each of the points on a user-specifed grid.
}
\usage{
-mixDen(x, p, comps)
+mixDen(x, pvec, comps)
}
\arguments{
\item{x}{ array - ith column gives grid points for ith variable }
- \item{p}{ vector of mixture component probabilites }
+ \item{pvec}{ vector of mixture component probabilites }
\item{comps}{ list of lists of components for normal mixture }
}
\details{
diff --git a/man/numEff.Rd b/man/numEff.Rd
index 0ff4a51..fd08f58 100755
--- a/man/numEff.Rd
+++ b/man/numEff.Rd
@@ -9,7 +9,7 @@
}
\usage{
-numEff(x, m = max(length(x), (100/sqrt(5000)) * sqrt(length(x))))
+numEff(x, m = as.integer(min(length(x), (100/sqrt(5000)) * sqrt(length(x)))))
}
\arguments{
@@ -38,7 +38,9 @@ numEff(x, m = max(length(x), (100/sqrt(5000)) * sqrt(length(x))))
This routine is a utility routine that does \strong{not} check the
input arguments for proper dimensions and type.
}
-
+\examples{
+ numEff(rnorm(1000),m=20)
+ numEff(rnorm(1000))
}
\keyword{ ts }
\keyword{ utilities }
diff --git a/man/rbprobitGibbs.Rd b/man/rbprobitGibbs.Rd
index b4ce26b..1735cae 100755
--- a/man/rbprobitGibbs.Rd
+++ b/man/rbprobitGibbs.Rd
@@ -21,7 +21,7 @@ rbprobitGibbs(Data, Prior, Mcmc)
}
\details{
- Model: \eqn{y = X\beta + e}. \eqn{e} \eqn{\sim}{~} \eqn{N(0,I)}.
+ Model: \eqn{z = X\beta + e}. \eqn{e} \eqn{\sim}{~} \eqn{N(0,I)}. y=1, if z> 0.
Prior: \eqn{\beta} \eqn{\sim}{~} \eqn{N(betabar,A^{-1})}.
@@ -51,6 +51,7 @@ rbprobitGibbs(Data, Prior, Mcmc)
\seealso{ \code{\link{rmnpGibbs}} }
\examples{
+##
## rbprobitGibbs example
##
set.seed(66)
diff --git a/man/rhierBinLogit.Rd b/man/rhierBinLogit.Rd
new file mode 100755
index 0000000..df47ec9
--- /dev/null
+++ b/man/rhierBinLogit.Rd
@@ -0,0 +1,133 @@
+\name{rhierBinLogit}
+\alias{rhierBinLogit}
+\concept{bayes}
+\concept{MCMC}
+\concept{hierarchical models}
+\concept{binary logit}
+
+\title{ MCMC Algorithm for Hierachical Binary Logit }
+\description{
+ \code{rhierBinLogit} implements an MCMC algorithm for hierarchical binary logits with
+ a normal heterogeneity distribution. This is a hybrid sampler with a RW Metropolis step
+ for unit-level logit parameters.
+
+ \code{rhierBinLogit} is designed for use on choice-based conjoint data with partial profiles.
+ The Design matrix is based on differences of characteristics between two alternatives. See
+ Appendix A of \emph{Bayesian Statistics and Marketing} for details.
+}
+\usage{
+rhierBinLogit(Data, Prior, Mcmc)
+}
+\arguments{
+ \item{Data}{ list(lgtdata,Z) (note: Z is optional) }
+ \item{Prior}{ list(Deltabar,ADelta,nu,V) (note: all are optional)}
+ \item{Mcmc}{ list(sbeta,R,keep) (note: all but R are optional)}
+}
+\details{
+ Model: \cr
+ \eqn{y_{hi} = 1} with \eqn{pr=exp(x_{hi}'beta_h)/(1+exp(x_{hi}'beta_h)}. \eqn{beta_h} is nvar x 1.\cr
+ h=1,\ldots,length(lgtdata) units or "respondents" for survey data.
+
+ \eqn{beta_h}= ZDelta[h,] + \eqn{u_h}. \cr
+ Note: here ZDelta refers to Z\%*\%Delta, ZDelta[h,] is hth row of this product.\cr
+ Delta is an nz x nvar array.
+
+ \eqn{u_h} \eqn{\sim}{~} \eqn{N(0,V_{beta})}. \cr
+
+ Priors: \cr
+ \eqn{delta= vec(Delta)} \eqn{\sim}{~} \eqn{N(vec(Deltabar),V_{beta} (x) ADelta^{-1})}\cr
+ \eqn{V_{beta}} \eqn{\sim}{~} \eqn{IW(nu,V)}
+
+ Lists contain:
+ \itemize{
+ \item{\code{lgtdata}}{list of lists with each cross-section unit MNL data}
+ \item{\code{lgtdata[[h]]$y}}{ \eqn{n_h} vector of binary outcomes (0,1)}
+ \item{\code{lgtdata[[h]]$X}}{ \eqn{n_h} by nvar design matrix for hth unit}
+ \item{\code{Deltabar}}{nz x nvar matrix of prior means (def: 0)}
+ \item{\code{ADelta}}{ prior prec matrix (def: .01I)}
+ \item{\code{nu}}{ d.f. parm for IW prior on norm comp Sigma (def: nvar+3)}
+ \item{\code{V}}{ pds location parm for IW prior on norm comp Sigma (def: nuI)}
+ \item{\code{sbeta}}{ scaling parm for RW Metropolis (def: .2)}
+ \item{\code{R}}{ number of MCMC draws}
+ \item{\code{keep}}{ MCMC thinning parm: keep every keepth draw (def: 1)}
+ }
+}
+\value{
+ a list containing:
+ \item{Deltadraw}{R/keep x nz*nvar matrix of draws of Delta}
+ \item{betadraw}{ nlgt x nvar x R/keep array of draws of betas}
+ \item{Vbetadraw}{ R/keep x nvar*nvar matrix of draws of Vbeta}
+ \item{llike}{R/keep vector of log-like values}
+ \item{reject}{R/keep vector of reject rates over nlgt units}
+}
+
+\note{ Some experimentation with the Metropolis scaling paramter (sbeta) may be required. }
+
+\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
+ by Allenby, McCulloch, and Rossi, Chapter 5. \cr
+ \url{http://gsbwww.uchicago.edu/fac/peter.rossi/research/bsm.html}
+}
+
+\author{ Peter Rossi, Graduate School of Business, University of Chicago,
+ \email{Peter.Rossi at ChicagoGsb.edu}.
+}
+\examples{
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)
+{
+
+set.seed(66)
+nvar=5 ## number of coefficients
+nlgt=1000 ## number of cross-sectional units
+nobs=10 ## number of observations per unit
+nz=2 ## number of regressors in mixing distribution
+
+## set hyper-parameters
+## B=ZDelta + U
+
+Z=matrix(c(rep(1,nlgt),runif(nlgt,min=-1,max=1)),nrow=nlgt,ncol=nz)
+Delta=matrix(c(-2,-1,0,1,2,-1,1,-.5,.5,0),nrow=nz,ncol=nvar)
+iota=matrix(1,nrow=nvar,ncol=1)
+Vbeta=diag(nvar)+.5*iota\%*\%t(iota)
+
+## simulate data
+lgtdata=NULL
+
+for (i in 1:nlgt)
+{ beta=t(Delta)\%*\%Z[i,]+as.vector(t(chol(Vbeta))\%*\%rnorm(nvar))
+ X=matrix(runif(nobs*nvar),nrow=nobs,ncol=nvar)
+ prob=exp(X\%*\%beta)/(1+exp(X\%*\%beta))
+ unif=runif(nobs,0,1)
+ y=ifelse(unif<prob,1,0)
+ lgtdata[[i]]=list(y=y,X=X,beta=beta)
+}
+
+Data=list(Dat=lgtdata,Demo=Z)
+out=rhierBinLogit(Data=list(lgtdata=lgtdata,Z=Z),Mcmc=list(R=5000))
+
+cat(" Deltadraws ",fill=TRUE)
+mat=apply(out$Deltadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(Delta),mat); rownames(mat)[1]="delta"; print(mat)
+cat(" Vbetadraws ",fill=TRUE)
+mat=apply(out$Vbetadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+mat=rbind(as.vector(Vbeta),mat); rownames(mat)[1]="Vbeta"; print(mat)
+
+if(0){
+td=as.vector(Delta)
+par(mfrow=c(2,2))
+matplot(out$Deltadraw[,(1:nvar)],type="l")
+abline(h=td[1:nvar],col=(1:nvar))
+matplot(out$Deltadraw[,((nvar+1):(2*nvar))],type="l")
+abline(h=td[(nvar+1):(2*nvar)],col=(1:nvar))
+matplot(out$Vbetadraw[,c(1,7,13,19,25)],type="l")
+abline(h=1.5)
+matplot(out$Vbetadraw[,-c(1,7,13,19,25)],type="l")
+abline(h=.5)
+}
+
+}
+
+}
+\seealso{ \code{\link{rhierMnlRwMixture}} }
+\keyword{ models}
+
diff --git a/man/rhierLinearModel.Rd b/man/rhierLinearModel.Rd
index 3e0d77b..54b54c0 100755
--- a/man/rhierLinearModel.Rd
+++ b/man/rhierLinearModel.Rd
@@ -14,9 +14,9 @@
rhierLinearModel(Data, Prior, Mcmc)
}
\arguments{
- \item{Data}{ list(regdata,Z)}
- \item{Prior}{ list(Deltabar,A,nu.e,ssq,nu,V)}
- \item{Mcmc}{ list(R,keep)}
+ \item{Data}{ list(regdata,Z) (Z optional). }
+ \item{Prior}{ list(Deltabar,A,nu.e,ssq,nu,V) (optional).}
+ \item{Mcmc}{ list(R,keep) (R required).}
}
\details{
Model: length(regdata) regression equations. \cr
@@ -89,7 +89,7 @@ V=nu*diag(c(rep(1,nvar)))
A=diag(c(rep(.01,nz)),ncol=nz)
Deltabar=matrix(c(rep(0,nz*nvar)),nrow=nz)
-R=1000
+R=2000
Data=list(regdata=regdata,Z=Z)
Prior=list(Deltabar=Deltabar,A=A,nu.e=nu.e,ssq=ssq,nu=nu,V=V)
diff --git a/man/rhierMnlRwMixture.Rd b/man/rhierMnlRwMixture.Rd
index 215f5d1..bd13ad4 100755
--- a/man/rhierMnlRwMixture.Rd
+++ b/man/rhierMnlRwMixture.Rd
@@ -18,29 +18,29 @@
rhierMnlRwMixture(Data, Prior, Mcmc)
}
\arguments{
- \item{Data}{ list(m,lgtdata,Z) (note: Z is optional) }
- \item{Prior}{ list(deltabar,Ad,mubar,Amu,nu,V,ncomp) (note: all but ncomp are optional)}
- \item{Mcmc}{ list(s,c,R,keep) (note: all but R are optional)}
+ \item{Data}{ list(p,lgtdata,Z) ( Z is optional) }
+ \item{Prior}{ list(deltabar,Ad,mubar,Amu,nu,V,ncomp) (all but ncomp are optional)}
+ \item{Mcmc}{ list(s,c,R,keep) (R required)}
}
\details{
Model: \cr
\eqn{y_i} \eqn{\sim}{~} \eqn{MNL(X_i,theta_i)}. i=1,\ldots, length(lgtdata). \eqn{theta_i} is nvar x 1.
- \eqn{theta_i}= ZD[i,] + \eqn{u_i}. \cr
- Note: here ZDelta refers to Z\%*\%D, ZD[i,] is ith row of this product.\cr
- D is an nvar x nz array.
+ \eqn{theta_i}= ZDelta[i,] + \eqn{u_i}. \cr
+ Note: here ZDelta refers to Z\%*\%D, ZDelta[i,] is ith row of this product.\cr
+ Delta is an nz x nvar array.
- \eqn{u_i} \eqn{\sim}{~} \eqn{N(mu_{ind},Sigma_{ind})}. \eqn{ind} \eqn{\sim}{~} multinomial(p). \cr
+ \eqn{u_i} \eqn{\sim}{~} \eqn{N(mu_{ind},Sigma_{ind})}. \eqn{ind} \eqn{\sim}{~} multinomial(pvec). \cr
Priors: \cr
- \eqn{p} \eqn{\sim}{~} dirichlet (a)\cr
- \eqn{delta= vec(D)} \eqn{\sim}{~} \eqn{N(deltabar,A_d^{-1})}\cr
- \eqn{mu_j} \eqn{\sim}{~} \eqn{N(mubar,Amu^{-1}(x)Sigma_j)}\cr
+ \eqn{pvec} \eqn{\sim}{~} dirichlet (a)\cr
+ \eqn{delta= vec(Delta)} \eqn{\sim}{~} \eqn{N(deltabar,A_d^{-1})}\cr
+ \eqn{mu_j} \eqn{\sim}{~} \eqn{N(mubar,Sigma_j (x) Amu^{-1})}\cr
\eqn{Sigma_j} \eqn{\sim}{~} IW(nu,V) \cr
Lists contain:
\itemize{
- \item{\code{m}}{ m is number of choice alternatives}
+ \item{\code{p}}{ p is number of choice alternatives}
\item{\code{lgtdata}}{list of lists with each cross-section unit MNL data}
\item{\code{lgtdata[[i]]$y}}{ \eqn{n_i} vector of multinomial outcomes (1,\ldots,m)}
\item{\code{lgtdata[[i]]$X}}{ \eqn{n_i} by nvar design matrix for ith unit}
@@ -61,7 +61,7 @@ rhierMnlRwMixture(Data, Prior, Mcmc)
a list containing:
\item{Deltadraw}{R/keep x nz*nvar matrix of draws of Delta, first row is initial value}
\item{betadraw}{ nlgt x nvar x R/keep array of draws of betas}
- \item{probdraw}{ R/keep x ncomp matrix of draws of probs of mixture components}
+ \item{probdraw}{ R/keep x ncomp matrix of draws of probs of mixture components (pvec)}
\item{compdraw}{ list of list of lists (length R/keep)}
}
\note{
@@ -75,10 +75,11 @@ rhierMnlRwMixture(Data, Prior, Mcmc)
Note: Z does \strong{not} include an intercept and is centered for ease of interpretation.\cr
- Be careful in assessing prior parameter,Amu. .01 is too small for many applications. See
+ Be careful in assessing prior parameter, Amu. .01 is too small for many applications. See
Allenby et al, chapter 5 for full discussion.\cr
+
+ Large R values may be requires (>20,000).
- R=5000 is used in the example below. This is too short for reliable results. Use R > 10000 in practice.
}
\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
by Allenby, McCulloch, and Rossi, Chapter 5. \cr
@@ -96,7 +97,7 @@ if(nchar(Sys.getenv("LONG_TEST")) != 0) # set env var LONG_TEST to run
{
set.seed(66)
-m=3 # num of choice alterns
+p=3 # num of choice alterns
ncoef=3
nlgt=300 # num of cross sectional units
nz=2
@@ -108,16 +109,16 @@ comps=NULL
comps[[1]]=list(mu=c(0,-1,-2),rooti=diag(rep(1,3)))
comps[[2]]=list(mu=c(0,-1,-2)*2,rooti=diag(rep(1,3)))
comps[[3]]=list(mu=c(0,-1,-2)*4,rooti=diag(rep(1,3)))
-p=c(.4,.2,.4)
+pvec=c(.4,.2,.4)
## simulate data
simlgtdata=NULL
ni=rep(50,300)
for (i in 1:nlgt)
-{ betai=Delta\%*\%Z[i,]+as.vector(rmixture(1,p,comps)$x)
+{ betai=Delta\%*\%Z[i,]+as.vector(rmixture(1,pvec,comps)$x)
X=NULL
for(j in 1:ni[i])
- { Xone=cbind(rbind(c(rep(0,m-1)),diag(m-1)),runif(m,min=-1.5,max=0))
+ { Xone=cbind(rbind(c(rep(0,p-1)),diag(p-1)),runif(p,min=-1.5,max=0))
X=rbind(X,Xone) }
outa=simmnlwX(ni[i],X,betai)
simlgtdata[[i]]=list(y=outa$y,X=X,beta=betai)
@@ -141,11 +142,11 @@ deltabar=rep(0,ncoef*nz)
Amu=matrix(.01,ncol=1)
a=rep(5,ncoef)
-R=5000
+R=20000
keep=5
c=2
s=2.93/sqrt(ncoef)
-Data1=list(m=m,lgtdata=simlgtdata,Z=Z)
+Data1=list(p=p,lgtdata=simlgtdata,Z=Z)
Prior1=list(ncomp=ncomp,nu=nu,V=V,Amu=Amu,mubar=mubar,a=a,Ad=Ad,deltabar=deltabar)
Mcmc1=list(s=s,c=c,R=R,keep=keep)
out=rhierMnlRwMixture(Data=Data1,Prior=Prior1,Mcmc=Mcmc1)
@@ -155,7 +156,7 @@ cat(" Deltadraws ",fill=TRUE)
mat=apply(out$Deltadraw[begin:end,],2,quantile,probs=c(.01,.05,.5,.95,.99))
mat=rbind(as.vector(Delta),mat); rownames(mat)[1]="delta"; print(mat)
-tmom=momMix(matrix(p,nrow=1),list(comps))
+tmom=momMix(matrix(pvec,nrow=1),list(comps))
pmom=momMix(out$probdraw[begin:end,],out$compdraw[begin:end])
mat=rbind(tmom$mu,pmom$mu)
rownames(mat)=c("true","post expect")
@@ -182,13 +183,13 @@ abline(h=simlgtdata[[100]]$beta[3])
plot(out$betadraw[101,3,])
abline(h=simlgtdata[[101]]$beta[3])
par(mfrow=c(4,1))
-plot(out$deltadraw[,1])
+plot(out$Deltadraw[,1])
abline(h=Delta[1,1])
-plot(out$deltadraw[,2])
+plot(out$Deltadraw[,2])
abline(h=Delta[2,1])
-plot(out$deltadraw[,3])
+plot(out$Deltadraw[,3])
abline(h=Delta[3,1])
-plot(out$deltadraw[,6])
+plot(out$Deltadraw[,6])
abline(h=Delta[3,2])
begin=100
end=1000
@@ -201,7 +202,7 @@ par(mfrow=c(2,ncoef))
for(i in 1:ncoef)
{plot(grid[,i],mden[,i],type="l")}
for(i in 1:ncoef)
-tden=mixDen(grid,p,comps)
+tden=mixDen(grid,pvec,comps)
for(i in 1:ncoef)
{plot(grid[,i],tden[,i],type="l")}
}
diff --git a/man/rivGibbs.Rd b/man/rivGibbs.Rd
index 87b367b..4f85d57 100755
--- a/man/rivGibbs.Rd
+++ b/man/rivGibbs.Rd
@@ -16,8 +16,8 @@ rivGibbs(Data, Prior, Mcmc)
}
\arguments{
\item{Data}{ list(z,w,x,y) }
- \item{Prior}{ list(md,Ad,mbg,Abg,nu,V) this is an optional parm}
- \item{Mcmc}{ list(R,keep)}
+ \item{Prior}{ list(md,Ad,mbg,Abg,nu,V) (optional) }
+ \item{Mcmc}{ list(R,keep) (R required) }
}
\details{
Model:\cr
diff --git a/man/rmixture.Rd b/man/rmixture.Rd
index f8c405c..df82a6c 100755
--- a/man/rmixture.Rd
+++ b/man/rmixture.Rd
@@ -8,15 +8,15 @@
\code{rmixture} simulates iid draws from a Multivariate Mixture of Normals
}
\usage{
-rmixture(n, p, comps)
+rmixture(n, pvec, comps)
}
\arguments{
\item{n}{ number of observations }
- \item{p}{ ncomp x 1 vector of prior probabilities for each mixture component }
+ \item{pvec}{ ncomp x 1 vector of prior probabilities for each mixture component }
\item{comps}{ list of mixture component parameters }
}
\details{
- comps is a list of length(ncomp) = length(p). comps[[j]][[1]] is mean vector for the jth component.
+ comps is a list of length, ncomp = length(pvec). comps[[j]][[1]] is mean vector for the jth component.
comps[[j]][[2]] is the inverse of the cholesky root of Sigma for that component
}
\value{
diff --git a/man/rmnlIndepMetrop.Rd b/man/rmnlIndepMetrop.Rd
index e30fc5c..d79ba6f 100755
--- a/man/rmnlIndepMetrop.Rd
+++ b/man/rmnlIndepMetrop.Rd
@@ -12,20 +12,20 @@
rmnlIndepMetrop(Data, Prior, Mcmc)
}
\arguments{
- \item{Data}{ list(m,X,y)}
+ \item{Data}{ list(p,y,X)}
\item{Prior}{ list(A,betabar) optional}
\item{Mcmc}{ list(R,keep,nu) }
}
\details{
- Model: \eqn{Pr(y=j) = exp(x_j'beta)/\sum_k{e^{x_k'beta}}}. \cr
+ Model: y \eqn{\sim} {~} MNL(X,beta). \eqn{Pr(y=j) = exp(x_j'beta)/\sum_k{e^{x_k'beta}}}. \cr
Prior: \eqn{beta} \eqn{\sim}{~} \eqn{N(betabar,A^{-1})} \cr
list arguments contain:
\itemize{
- \item{\code{m}}{number of alternatives}
+ \item{\code{p}}{number of alternatives}
+ \item{\code{y}}{ nobs vector of multinomial outcomes (1,\ldots, p)}
\item{\code{X}}{nobs*m x nvar matrix}
- \item{\code{y}}{ nobs vector of multinomial outcomes (1,\ldots, m)}
\item{\code{A}}{ nvar x nvar pds prior prec matrix (def: .01I)}
\item{\code{betabar}}{ nvar x 1 prior mean (def: 0)}
\item{\code{R}}{ number of MCMC draws}
@@ -42,21 +42,18 @@ rmnlIndepMetrop(Data, Prior, Mcmc)
\seealso{ \code{\link{rhierMnlRwMixture}} }
\examples{
##
-if(nchar(Sys.getenv("LONG_TEST")) != 0) # set env var LONG_TEST to run
-{
set.seed(66)
-n=200; m=3; beta=c(1,-1,1.5,.5)
-simout=simmnl(m,n,beta)
+n=200; p=3; beta=c(1,-1,1.5,.5)
+simout=simmnl(p,n,beta)
A=diag(c(rep(.01,length(beta)))); betabar=rep(0,length(beta))
R=2000
-Data=list(y=simout$y,X=simout$X,m=m); Mcmc=list(R=R,keep=1) ; Prior=list(A=A,betabar=betabar)
+Data=list(y=simout$y,X=simout$X,p=p); Mcmc=list(R=R,keep=1) ; Prior=list(A=A,betabar=betabar)
out=rmnlIndepMetrop(Data=Data,Prior=Prior,Mcmc=Mcmc)
cat(" Betadraws ",fill=TRUE)
mat=apply(out$betadraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
mat=rbind(beta,mat); rownames(mat)[1]="beta"; print(mat)
-}
}
\keyword{ models }
diff --git a/man/rmnpGibbs.Rd b/man/rmnpGibbs.Rd
index ad1a999..940a8c5 100755
--- a/man/rmnpGibbs.Rd
+++ b/man/rmnpGibbs.Rd
@@ -16,14 +16,14 @@ rmnpGibbs(Data, Prior, Mcmc)
\arguments{
\item{Data}{ list(p, y, X)}
- \item{Prior}{ list(betabar,A,nu,V)}
- \item{Mcmc}{ list(beta0,sigma0,R,keep) }
+ \item{Prior}{ list(betabar,A,nu,V) (optional)}
+ \item{Mcmc}{ list(beta0,sigma0,R,keep) (R required) }
}
\details{
model: \cr
\eqn{w_i = X_i\beta + e}. \eqn{e} \eqn{\sim}{~} \eqn{N(0,Sigma)}. note: \eqn{w_i, e} are (p-1) x 1.\cr
- \eqn{y_i = j}, if \eqn{w_{ij} > w_{i,-j}} j=1,\ldots,p-1. \eqn{w_{i,-j}} means elements of \eqn{w_i}
+ \eqn{y_i = j}, if \eqn{w_{ij} > max(0,w_{i,-j})} j=1,\ldots,p-1. \eqn{w_{i,-j}} means elements of \eqn{w_i}
other than the jth. \cr
\eqn{y_i = p}, if all \eqn{w_i < 0}.\cr
diff --git a/man/rmvpGibbs.Rd b/man/rmvpGibbs.Rd
index c7c829d..b8ecebe 100755
--- a/man/rmvpGibbs.Rd
+++ b/man/rmvpGibbs.Rd
@@ -17,8 +17,8 @@ rmvpGibbs(Data, Prior, Mcmc)
}
\arguments{
\item{Data}{ list(p,y,X)}
- \item{Prior}{ list(betabar,A,nu,V)}
- \item{Mcmc}{ list(beta0,sigma0,R,keep) }
+ \item{Prior}{ list(betabar,A,nu,V) (optional)}
+ \item{Mcmc}{ list(beta0,sigma0,R,keep) (R required) }
}
\details{
@@ -36,7 +36,7 @@ rmvpGibbs(Data, Prior, Mcmc)
\itemize{
\item{\code{p}}{dimension of multivariate probit}
\item{\code{X}}{n*p x k Design Matrix}
- \item{\code{y}}{n x 1 vector of multinomial outcomes}
+ \item{\code{y}}{n*p x 1 vector of 0,1 outcomes}
\item{\code{betabar}}{k x 1 prior mean (def: 0)}
\item{\code{A}}{k x k prior precision matrix (def: .01I)}
\item{\code{nu}}{ d.f. parm for IWishart prior (def: (p-1) + 3)}
@@ -95,7 +95,7 @@ mat=apply(out$betadraw/sqrt(out$sigmadraw[,ind]),2,quantile,probs=c(.01,.05,.5,.
mat=rbind(beta,mat); rownames(mat)[1]="beta"; print(mat)
rdraw=matrix(double((R)*p*p),ncol=p*p)
rdraw=t(apply(out$sigmadraw,1,nmat))
-cat(" Sigmadraws ",fill=TRUE)
+cat(" Draws of Correlation Matrix ",fill=TRUE)
mat=apply(rdraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
mat=rbind(as.vector(Sigma),mat); rownames(mat)[1]="Sigma"; print(mat)
diff --git a/man/rnmixGibbs.Rd b/man/rnmixGibbs.Rd
index e9f6fb9..0f08d0b 100755
--- a/man/rnmixGibbs.Rd
+++ b/man/rnmixGibbs.Rd
@@ -14,15 +14,16 @@ rnmixGibbs(Data, Prior, Mcmc)
}
\arguments{
\item{Data}{ list(y) }
- \item{Prior}{ list(Mubar,A,nu,V,a,ncomp)}
- \item{Mcmc}{ list(R,keep) }
+ \item{Prior}{ list(Mubar,A,nu,V,a,ncomp) (only ncomp required)}
+ \item{Mcmc}{ list(R,keep) (R required) }
}
\details{
Model: \cr
- \eqn{y_i} \eqn{\sim}{~} \eqn{N(mu_ind,Sigma_ind)}. \cr
- ind \eqn{\sim}{~} iid multinomial(p). p is a ncomp x 1 vector of probs. \cr
+ \eqn{y_i} \eqn{\sim}{~} \eqn{N(mu_{ind_i},Sigma_{ind_i})}. \cr
+ ind \eqn{\sim}{~} iid multinomial(p). p is a ncomp x 1 vector of probs.
+
Priors:\cr
- \eqn{mu_j} \eqn{\sim}{~} \eqn{N(mubar,Sigma (x) A^{-1})}. \eqn{mubar=vec(Mubar)}. \cr
+ \eqn{mu_j} \eqn{\sim}{~} \eqn{N(mubar,Sigma_j (x) A^{-1})}. \eqn{mubar=vec(Mubar)}. \cr
\eqn{Sigma_j} \eqn{\sim}{~} IW(nu,V).\cr
note: this is the natural conjugate prior -- a special case of multivariate
regression.\cr
@@ -51,7 +52,7 @@ rnmixGibbs(Data, Prior, Mcmc)
a list containing:
\item{probdraw}{R/keep x ncomp array of mixture prob draws}
\item{zdraw}{R/keep x nobs array of indicators of mixture comp identity for each obs}
- \item{compsdraw}{R/keep lists of lists of comp parm draws}
+ \item{compdraw}{R/keep lists of lists of comp parm draws}
}
\note{
@@ -83,10 +84,10 @@ sigfac = c(1,1,1);mufac=c(1,2,3); compsmv=list()
for(i in 1:k) compsmv[[i]] = list(mu=mufac[i]*1:dim,sigma=sigfac[i]*sigma)
comps = list() # change to "rooti" scale
for(i in 1:k) comps[[i]] = list(mu=compsmv[[i]][[1]],rooti=solve(chol(compsmv[[i]][[2]])))
-p=(1:k)/sum(1:k)
+pvec=(1:k)/sum(1:k)
nobs=5000
-dm = rmixture(nobs,p,comps)
+dm = rmixture(nobs,pvec,comps)
Data=list(y=dm$x)
ncomp=9
@@ -94,7 +95,7 @@ Prior=list(ncomp=ncomp,a=c(rep(1,ncomp)))
Mcmc=list(R=2000,keep=1)
out=rnmixGibbs(Data=Data,Prior=Prior,Mcmc=Mcmc)
-tmom=momMix(matrix(p,nrow=1),list(comps))
+tmom=momMix(matrix(pvec,nrow=1),list(comps))
pmom=momMix(out$probdraw[500:2000,],out$compdraw[500:2000])
mat=rbind(tmom$mu,pmom$mu)
rownames(mat)=c("true","post expect")
diff --git a/man/rscaleUsage.Rd b/man/rscaleUsage.Rd
index 60bdafb..16ea665 100755
--- a/man/rscaleUsage.Rd
+++ b/man/rscaleUsage.Rd
@@ -15,21 +15,21 @@ rscaleUsage(Data,Prior, Mcmc)
}
\arguments{
\item{Data}{ list(k,x)}
- \item{Prior}{ list(nu,V,mubar,Am,gsigma,gl11,gl22,gl12,Lambdanu,LambdaV,ge) optional }
- \item{Mcmc}{ list(R,keep,ndghk,printevery,e,y,mu,Sigma,sigma,tau,Lambda) optional }
+ \item{Prior}{ list(nu,V,mubar,Am,gsigma,gl11,gl22,gl12,Lambdanu,LambdaV,ge) (optional) }
+ \item{Mcmc}{ list(R,keep,ndghk,printevery,e,y,mu,Sigma,sigma,tau,Lambda) (optional) }
}
\details{
- Model: n=nrow(X) individuals respond to m=ncol(X) questions. all questions are on a scale 1, \ldots, k.\cr
-
+ Model: n=nrow(x) individuals respond to m=ncol(x) questions. all questions are on a scale 1, \ldots, k.
for respondent i and question j, \cr
- \eqn{x_ij = d}, if \eqn{c_d-1 \le y_i,j \le c_d}. \cr
+ \eqn{x_{ij} = d}, if \eqn{c_{d-1} \le y_{ij} \le c_d}. \cr
d=1,\ldots,k. \eqn{c_d = a + bd +ed^2}. \cr
\eqn{y_i = mu + tau_i*iota + sigma_i*z_i}. \eqn{z_i} \eqn{\sim}{~} \eqn{N(0,Sigma)}. \cr
- \eqn{(tau_i,ln(sigma_i))} \eqn{\sim}{~} \eqn{N(phi,Lamda)}. \eqn{phi=(0,lambda_{22})}. \cr
+
Priors:\cr
+ \eqn{(tau_i,ln(sigma_i))} \eqn{\sim}{~} \eqn{N(phi,Lamda)}. \eqn{phi=(0,lambda_{22})}. \cr
mu \eqn{\sim}{~} \eqn{N(mubar, Am{^-1})}.\cr
Sigma \eqn{\sim}{~} IW(nu,V).\cr
Lambda \eqn{\sim}{~} IW(Lambdanu,LambdaV).\cr
@@ -37,7 +37,7 @@ rscaleUsage(Data,Prior, Mcmc)
}
\value{
a list containing:
- \item{drawSigma}{R/keep x m*m array of Sigma draws}
+ \item{Sigmadraw}{R/keep x m*m array of Sigma draws}
\item{mudraw}{R/keep x m array of mu draws}
\item{taudraw}{R/keep x n array of tau draws}
\item{sigmadraw}{R/keep x n array of sigma draws}
@@ -49,6 +49,12 @@ rscaleUsage(Data,Prior, Mcmc)
\code{Prior} and setting \code{R} in Mcmc and \code{Data} only. If you wish to change prior settings and/or
the grids used, please read the case study in Allenby et al carefully.
}
+\section{Warning}{
+ \eqn{tau_i}, \eqn{sigma_i} are identified from the scale usage patterns in the m questions asked per
+ respondent (\# cols of x). Do not attempt to use this on data sets with only a small number of
+ total questions!
+
+}
\references{ For further discussion, see \emph{Bayesian Statistics and Marketing}
by Allenby, McCulloch, and Rossi, Case Study on Scale Usage Heterogeneity. \cr
@@ -58,4 +64,22 @@ rscaleUsage(Data,Prior, Mcmc)
\author{ Rob McCulloch and Peter Rossi, Graduate School of Business, University of Chicago,
\email{Peter.Rossi at ChicagoGsb.edu}.
}
+
+\examples{
+##
+if(nchar(Sys.getenv("LONG_TEST")) != 0)
+{
+data(customerSat)
+surveydat = list(k=10,x=as.matrix(customerSat))
+
+mcmc = list(R=1000)
+set.seed(66)
+out=rscaleUsage(Data=surveydat,Mcmc=mcmc)
+
+cat(" mudraws ",fill=TRUE)
+mat=apply(out$mudraw,2,quantile,probs=c(.01,.05,.5,.95,.99))
+print(mat)
+
+}
+}
\keyword{ models }
diff --git a/man/simmnl.Rd b/man/simmnl.Rd
index dc6ce63..bc6e75e 100755
--- a/man/simmnl.Rd
+++ b/man/simmnl.Rd
@@ -7,10 +7,10 @@
\code{simmnl} simulates from the MNL model.
}
\usage{
-simmnl(j, n, beta)
+simmnl(p, n, beta)
}
\arguments{
- \item{j}{ number choice alternatives }
+ \item{p}{ number choice alternatives }
\item{n}{ number of observations }
\item{beta}{ MNL coefficient vector }
}
@@ -19,7 +19,7 @@ simmnl(j, n, beta)
}
\value{
- \item{y}{ n x 1 vector of multinomial outcomes (1, \ldots, j)}
+ \item{y}{ n x 1 vector of multinomial outcomes (1, \ldots, p)}
\item{X} { n x k design matrix }
\item{beta}{beta vector }
\item{prob}{n x j array of choice probabilities }
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/r-cran-bayesm.git
More information about the debian-science-commits
mailing list