# NOT RUN {
#### Set up the gradient function
mysig <- 1 ## std dev
errdist <- distr::Norm(0, sd=mysig)
modeldistname <- truedistname <- "Gauss" ## used for savefile name
mm0 <- function(xx){xx}
nn <- 300
xx <- sort(runif(n=nn, 0, 7))
yy <- mm0(xx) + errdist@r(nn)
## plot(xx,yy)
myScale <- mysig
AAfunc_Gauss <- purrr::partial(AAfunc_Gauss_generic, sig=!!mysig)
AA_Gauss <- purrr::partial(AA, func=!!AAfunc_Gauss)
BBfunc_Gauss <- purrr::partial(BBfunc_Gauss_generic, sig=!!mysig)
BB_Gauss <- purrr::partial(BB, func=!!BBfunc_Gauss)
mygradSIR <-
grad_SIR_Gauss <- ## just for ease of reference
purrr::partial(grad_SIR_generic,
rescale=TRUE, ## factor of nn/2
AAfunc=!!AA_Gauss, BBfunc=!!BB_Gauss)
## Now run the gradient descent
savefilenameUnique <- paste("graddesc_", modeldistname, "_", truedistname,
"_n", nn,
"_", format(Sys.time(), "%Y-%m-%d-%T"), ".rsav", sep="")
print(paste("The unique save file name for this run is", savefilenameUnique))
stepsize <- nn^(1/2) ## Has to be tuned
MM <- 100 ## Total number iterations is MM * JJ
JJ <- 2
eps <- (max(yy)-min(yy)) / (1000 * nn^(1/5) * myScale)
## print *and* SAVE every 'printevery' iterations.
## here no save occurs, printevery > MM
printevery <- 1000
init <- yy
mmhat <- UMRgradDesc(yy=yy, grad=mygradSIR, ## from settings file
init=init,
stepsize=stepsize, MM=MM,
printevery=printevery,
filename=paste0("../saves/", savefilenameUnique))
#### some classical/matched [oracle] estimators
isoreg_std <- Iso::ufit(y=yy, x=xx, lmode=Inf)
mmhat_std = isoreg_std$y ## Isotonic regression
linreg_std <- lm(yy~xx)
# }
Run the code above in your browser using DataLab