# NOT RUN {
### Example optimizing Rosenbrock 2D function
### Note that this example is not stochastic, as the
### function is not evaluated in expectation based on
### batches of data, but rather it has a given absolute
### form that never varies.
library(stochQN)
fr <- function(x) { ## Rosenbrock Banana function
x1 <- x[1]
x2 <- x[2]
100 * (x2 - x1 * x1)^2 + (1 - x1)^2
}
grr <- function(x) { ## Gradient of 'fr'
x1 <- x[1]
x2 <- x[2]
c(-400 * x1 * (x2 - x1 * x1) - 2 * (1 - x1),
200 * (x2 - x1 * x1))
}
### Initial values of x
x_opt = as.numeric(c(0, 2))
cat(sprintf("Initial values of x: [%.3f, %.3f]\n",
x_opt[1], x_opt[2]))
### Will use constant step size throughout
### (not recommended)
step_size = 1e-2
### Initialize the optimizer
optimizer = adaQN_free()
### Keep track of the iteration number
curr_iter <- 0
### Run a loop for many iterations
### (Note that some iterations might require more
### than 1 calculation request)
for (i in 1:200) {
req <- run_adaQN_free(optimizer, x_opt, step_size)
if (req$task == "calc_grad") {
update_gradient(optimizer, grr(req$requested_on))
} else if (req$task == "calc_fun_val_batch") {
update_fun(optimizer, fr(req$requested_on))
}
### Track progress every 10 iterations
if (req$info$iteration_number > curr_iter) {
curr_iter <- req$info$iteration_number
}
if ((curr_iter %% 10) == 0) {
cat(sprintf(
"Iteration %3d - Current function value: %.3f\n",
req$info$iteration_number, fr(x_opt)))
}
}
cat(sprintf("Current values of x: [%.3f, %.3f]\n",
x_opt[1], x_opt[2]))
# }
Run the code above in your browser using DataLab