### Here's an itty bitty example:
### we use stochastic search to find the minimum number in a vector
### GP isn't used here, and hence neither are p.ndx.ls nor f.d
### however, we still need to create them since MSS.snow requires their existence
if (FALSE) {
fun.load.simpleExample <- function() {
if( run.parallel ) {
sfExport("xx")
}
p.ndx.ls <- list( c(1) )
p.ndx.ls <<- p.ndx.ls
f.d <- list( dlog.norm )
f.d <<- f.d
FUN.MH <- function(jj, GP.mx, X) {
our.cost <- sample(xx, 1)
}
FUN.MH <<- FUN.MH
FUN.GP <- NULL
FUN.GP <<- FUN.GP
FUN.I <- function(envmh, X) {
cat( "Hello, I have found an even smaller number in xx ---> ", envmh$current.best, "\n" )
}
FUN.I <<- FUN.I
FUN.EXIT <- function(envmh, X) {
cat( "Done", "\n" )
}
FUN.EXIT <<- FUN.EXIT
}
xx <- 1:600
GP <- c(1)
run.parallel <- TRUE
sfInit(TRUE, 2)
MH.source <- fun.load.simpleExample
MH.source()
MSS.snow(MH.source, Inf, p.ndx.ls, f.d, matrix(1, nrow=28), 28, 7)
sfStop()
### Here's another itty bitty example:
### we use stochastic search to find the mean of a vector
### i.e., the argmin? of sum ( x - ? )^2
fun.load.simpleExample2 <- function() {
if( run.parallel ) {
sfExport("xx")
}
p.ndx.ls <- list( c(1) )
p.ndx.ls <<- p.ndx.ls
f.d <- list( unif.mh )
f.d <<- f.d
FUN.MH <- function(jj, GP.mx, X) {
our.cost <- sum( ( xx - GP.mx[jj, 1] )^2 )
return(our.cost)
}
FUN.MH <<- FUN.MH
FUN.GP <- NULL
FUN.GP <<- FUN.GP
FUN.I <- function(envmh, X) {
cat( "Improvement ---> ", envmh$current.best, " ---- " , envmh$GP, "\n" )
}
FUN.I <<- FUN.I
FUN.EXIT <- function(envmh, X) {
our.cost <- envmh$current.best
GP <- envmh$GP
cat( "Done", "\n" )
cat( envmh$GP, our.cost, "\n" )
}
FUN.EXIT <<- FUN.EXIT
}
##set.seed(99999)
xx <- rnorm(300, 5, 10)
GP <- c(1)
run.parallel <- TRUE
sfInit(TRUE, 2)
MH.source <- fun.load.simpleExample2
MH.source()
MSS.snow(MH.source, Inf, p.ndx.ls, f.d, matrix(1/10, nrow=140, ncol=length(GP)), 140, 14)
sfStop()
##### in fact:
mean(xx)
}
Run the code above in your browser using DataLab