# NOT RUN {
loss.vec <- c(
-9.9, -12.8, -19.2, -22.1, -24.5, -26.1, -28.5, -30.1, -32.2,
-33.7, -35.2, -36.8, -38.2, -39.5, -40.7, -41.8, -42.8, -43.9,
-44.9, -45.8)
seg.vec <- seq_along(loss.vec)
exact.df <- penaltyLearning::modelSelectionC(loss.vec, seg.vec, seg.vec)
## Solve the optimization using grid search.
L.grid <- with(exact.df,{
seq(min(max.log.lambda)-1,
max(min.log.lambda)+1,
l=100)
})
lambda.grid <- exp(L.grid)
kstar.grid <- sapply(lambda.grid, function(lambda){
crit <- with(exact.df, model.complexity * lambda + model.loss)
picked <- which.min(crit)
exact.df$model.id[picked]
})
grid.df <- data.frame(log.lambda=L.grid, segments=kstar.grid)
library(ggplot2)
## Compare the results.
ggplot()+
ggtitle("grid search (red) agrees with exact path computation (black)")+
geom_segment(aes(min.log.lambda, model.id,
xend=max.log.lambda, yend=model.id),
data=exact.df)+
geom_point(aes(log.lambda, segments),
data=grid.df, color="red", pch=1)+
ylab("optimal model complexity (segments)")+
xlab("log(lambda)")
# }
Run the code above in your browser using DataLab