data(iris)
rf <- function(formula, ...) {
learner$new(formula,
info = "grf::probability_forest",
estimate = function(x, y, ...) {
grf::probability_forest(X = x, Y = y, ...)
},
predict = function(object, newdata) {
predict(object, newdata)$predictions
},
estimate.args = list(...)
)
}
args <- expand.list(
num.trees = c(100, 200), mtry = 1:3,
formula = c(Species ~ ., Species ~ Sepal.Length + Sepal.Width)
)
models <- lapply(args, function(par) do.call(rf, par))
x <- models[[1]]$clone()
x$estimate(iris)
predict(x, newdata = head(iris))
# \donttest{
# Reduce Ex. timing
a <- targeted::cv(models, data = iris)
cbind(coef(a), attr(args, "table"))
# }
# defining learner via function with arguments y (response)
# and x (design matrix)
f1 <- learner$new(
estimate = function(y, x) lm.fit(x = x, y = y),
predict = function(object, newdata) newdata %*% object$coefficients
)
# defining the learner via arguments formula and data
f2 <- learner$new(
estimate = function(formula, data, ...) glm(formula, data, ...)
)
# generic learner defined from function (predict method derived per default
# from stats::predict
f3 <- learner$new(
estimate = function(dt, ...) {
lm(y ~ x, data = dt)
}
)
## ------------------------------------------------
## Method `learner$summary`
## ------------------------------------------------
lr <- learner_glm(y ~ x, family = "nb")
lr$summary()
lr_sum <- lr$summary() # store returned summary in new object
names(lr_sum)
print(lr_sum)
Run the code above in your browser using DataLab