# NOT RUN {
if (require("glmnet") && require("ranger")) {
## generate data
set.seed(1)
n <- 150 # number of observations
p <- 5 # number of covariates
D <- rbinom(n, 1, 0.5) # random treatment assignment
Z <- matrix(runif(n*p), n, p) # design matrix
Y0 <- as.numeric(Z %*% rexp(p) + rnorm(n)) # potential outcome without treatment
Y1 <- 2 + Y0 # potential outcome under treatment
Y <- ifelse(D == 1, Y1, Y0) # observed outcome
## column names of Z
colnames(Z) <- paste0("V", 1:p)
## specify learners
learners <- c("lasso", "mlr3::lrn('ranger', num.trees = 10)")
## glmnet v4.1.3 isn't supported on Solaris, so skip Lasso in this case
if(Sys.info()["sysname"] == "SunOS") learners <- learners[-1]
## specify quantile cutoffs (the 4 quartile groups here)
quantile_cutoffs <- c(0.25, 0.5, 0.75)
## specify the differenced generic targets of GATES and CLAN
# use G4-G1, G4-G2, G4-G3 as differenced generic targets in GATES
diff_GATES <- setup_diff(subtract_from = "most",
subtracted = c(1,2,3))
# use G1-G3, G1-G2 as differenced generic targets in CLAN
diff_CLAN <- setup_diff(subtract_from = "least",
subtracted = c(3,2))
## perform generic ML inference
# small number of splits to keep computation time low
x <- GenericML(Z, D, Y, learners, num_splits = 2,
quantile_cutoffs = quantile_cutoffs,
diff_GATES = diff_GATES,
diff_CLAN = diff_CLAN,
parallel = FALSE)
## access BLP generic targets for best learner and make plot
get_BLP(x, plot = TRUE)
## access GATES generic targets for best learner and make plot
get_GATES(x, plot = TRUE)
## access CLAN generic targets for "V1" & best learner and make plot
get_CLAN(x, variable = "V1", plot = TRUE)
}
# }
Run the code above in your browser using DataLab