# \donttest{
### These examples require an activated Python environment as described in
### Bartz-Beielstein, T., Rehbach, F., Sen, A., and Zaefferer, M.:
### Surrogate Model Based Hyperparameter Tuning for Deep Learning with SPOT,
### June 2021. http://arxiv.org/abs/2105.14625.
PYTHON_RETICULATE <- FALSE
if(PYTHON_RETICULATE){
## data preparation
target <- "age"
batch_size <- 32
prop <- 2/3
dfGeneric <- getDataCensus(target = target, nobs = 1000)
data <- getGenericTrainValTestData(dfGeneric = dfGeneric, prop = prop)
specList <- genericDataPrep(data=data, batch_size = batch_size)
## model configuration:
cfg <- getModelConf(list(model="dl"))
x <- matrix(cfg$default, nrow=1)
transformFun <- cfg$transformations
types <- cfg$type
lower <- cfg$lower
upper <- cfg$upper
kerasConf <- getKerasConf()
### First example: simple function call:
message("objectiveFunctionEvaluation(): x before transformX().")
print(x)
if (length(transformFun) > 0) { x <- transformX(xNat=x, fn=transformFun)}
message("objectiveFunctionEvaluation(): x after transformX().")
print(x)
funKerasGeneric(x, kerasConf = kerasConf, specList = specList)
### Second example: evaluation of several (three) hyperparameter settings:
xxx <- rbind(x,x,x)
funKerasGeneric(xxx, kerasConf = kerasConf, specList)
### Third example: spot call with extended verbosity:
res <- spot(x = NULL,
fun = funKerasGeneric,
lower = lower,
upper = upper,
control = list(funEvals=50,
handleNAsMethod = handleNAsMean,
noise = TRUE,
types = types,
plots = TRUE,
progress = TRUE,
seedFun = 1,
seedSPOT = 1,
transformFun=transformFun),
kerasConf = kerasConf,
specList = specList)
}
# }
Run the code above in your browser using DataLab