data(api)
rclus1<-as.svrepdesign(svydesign(id=~dnum, weights=~pw, data=apiclus1,
fpc=~fpc))
withCrossval(rclus1, api00~api99+ell+stype,
trainfun=function(X,y,w,tuning) lm.wfit(X,y,w),
testfun=function(X, trainfit,tuning) X%*%coef(trainfit),
intercept=TRUE,loss="MSE",tuning=1)
## More realistic example using lasso
## tuning parameter is number of variables in model
##
## library(glmnet)
## ftrain=function(X,y,w,tuning) {
## m<-glmnet(X,y,weights=w)
## lambda<-m$lambda[min(which(m$df>=tuning))]
## list(m,lambda)
## }
## ftest=function(X, trainfit, tuning){
## predict(trainfit[[1]], newx=X, s=trainfit[[2]])
## }
##
## withCrossval(rclus1, api00~api99+ell+stype+mobility+enroll,
## trainfun=ftrain,
## testfun=ftest,
## intercept=FALSE,loss="MSE",
## tuning=0:3)
##
## [1] 11445.2379 9649.1150 800.0742 787.4171
##
## Models with two or three predictors are about equally good
Run the code above in your browser using DataLab