#############################################################################
# EXAMPLE 1: Cluster robust standard errors data.ma01
#############################################################################
data(data.ma01)
dat <- data.ma01
#*** Model 1: Linear regression
mod1 <- lm.cluster( data = dat , formula = read ~ hisei + female ,
cluster = "idschool" )
coef(mod1)
vcov(mod1)
summary(mod1)
# estimate Model 1, but cluster is provided as a vector
mod1b <- lm.cluster( data = dat, formula = read ~ hisei + female,
cluster = dat$idschool)
summary(mod1b)
#*** Model 2: Logistic regression
dat$highmath <- 1 * ( dat$math > 600 ) # create dummy variable
mod2 <- glm.cluster( data = dat , formula = highmath ~ hisei + female ,
cluster = "idschool" , family="binomial")
coef(mod2)
vcov(mod2)
summary(mod2)
## Not run:
# #############################################################################
# # EXAMPLE 2: Cluster robust standard errors for multiply imputed datasets
# #############################################################################
#
# library(mitools)
# data(data.ma05)
# dat <- data.ma05
#
# # imputation of the dataset: use six imputations
# resp <- dat[ , - c(1:2) ]
# imp <- mice::mice( resp , imputationMethod="norm" , maxit=3 , m=6 )
# datlist <- mids2datlist( imp )
#
# # linear regression with cluster robust standard errors
# mod <- lapply( datlist, FUN = function(data){
# lm.cluster( data=data , formula=denote ~ migrant+ misei ,
# cluster = dat$idclass )
# } )
# # extract parameters and covariance matrix
# betas <- lapply( mod , FUN = function(rr){ coef(rr) } )
# vars <- lapply( mod , FUN = function(rr){ vcov(rr) } )
# # conduct statistical inference
# summary(pool_mi( qhat = betas, u = vars ))
# ## End(Not run)
Run the code above in your browser using DataLab