## Not run:
# ## simulate data
# set.seed(23432)
# ## training set
# n <- 500
# p <- 50
# X <- matrix(rnorm(n*p), nrow = n, ncol = p)
# colnames(X) <- paste("X", 1:p, sep="")
# X <- data.frame(X)
# Y <- X[, 1] + sqrt(abs(X[, 2] * X[, 3])) + X[, 2] - X[, 3] + rnorm(n)
#
# ## test set
# m <- 1000
# newX <- matrix(rnorm(m*p), nrow = m, ncol = p)
# colnames(newX) <- paste("X", 1:p, sep="")
# newX <- data.frame(newX)
# newY <- newX[, 1] + sqrt(abs(newX[, 2] * newX[, 3])) + newX[, 2] -
# newX[, 3] + rnorm(m)
#
# # generate Library and run Super Learner
# SL.library <- c("SL.glm", "SL.randomForest", "SL.gam",
# "SL.polymars", "SL.mean")
# test <- SampleSplitSuperLearner(Y = Y, X = X, newX = newX, SL.library = SL.library,
# verbose = TRUE, method = "method.NNLS")
# test
#
# # library with screening
# SL.library <- list(c("SL.glmnet", "All"), c("SL.glm", "screen.randomForest",
# "All", "screen.SIS"), "SL.randomForest", c("SL.polymars", "All"), "SL.mean")
# test <- SuperLearner(Y = Y, X = X, newX = newX, SL.library = SL.library,
# verbose = TRUE, method = "method.NNLS")
# test
#
# # binary outcome
# set.seed(1)
# N <- 200
# X <- matrix(rnorm(N*10), N, 10)
# X <- as.data.frame(X)
# Y <- rbinom(N, 1, plogis(.2*X[, 1] + .1*X[, 2] - .2*X[, 3] +
# .1*X[, 3]*X[, 4] - .2*abs(X[, 4])))
#
# SL.library <- c("SL.glmnet", "SL.glm", "SL.knn", "SL.gam", "SL.mean")
#
# # least squares loss function
# test.NNLS <- SampleSplitSuperLearner(Y = Y, X = X, SL.library = SL.library,
# verbose = TRUE, method = "method.NNLS", family = binomial())
# test.NNLS
# ## End(Not run)
Run the code above in your browser using DataLab