## 10 member ensemble for the O3 data
if (FALSE) {
data( "ozone2")
fitObject<- spatialProcess( ozone2$lon.lat, ozone2$y[16,],
smoothness=.5)
nx<- 35
ny<- 25
xGridList<- fields.x.to.grid( fitObject$x, nx=nx, ny=ny)
xGrid<- make.surface.grid( xGridList)
allTime0<- system.time(
look0<- sim.spatialProcess(fitObject, xp=xGrid, M=5)
)
print( allTime0)
# for M=5 this does not make much sense ... however here are the
# Monte Carlo based prediction standard deviations.
predictSE<- apply( look0, 1, sd)
# compare to predictSE(fitObject, xp=xGrid)
## Local simulation and same grid size for prediction
## this runs much faster compared to the exact method above
## as nx, ny are increased e.g. nx= 128, ny=128 has dramatic difference
allTime1<- system.time(
look<- simLocal.spatialProcess(fitObject, M=5,nx=nx, ny=ny,
NNSize=5)
)
print( allTime1)
print( look$timing)
allTime2<- system.time(
look<- simLocal.spatialProcess(fitObject, M=5,nx=nx, ny=ny,
NNSize=5,
NNSizePredict=6,
fast=TRUE)
)
print( allTime2)
print( look$timing)
}
if (FALSE) {
## A simple example for setting up a bootstrap
## M below should be
## set to a much larger sample size, however, ( e.g. M <- 200) for better
## statistics
data( ozone2)
obj<- spatialProcess( ozone2$lon.lat,ozone2$y[16,] )
aHat<- obj$summary["aRange"]
lambdaHat<- obj$summary["lambda"]
######### boot strap
# create M independent copies of the observation vector
# here we just grab the model information from the
# spatialProcess object above.
#
# However, one could just create the list
# obj<- list( x= ozone2$lon.lat,
# cov.function.name="stationary.cov",
# summary= c( tau= 9.47, sigma2= 499.79, aRange= .700),
# cov.args= list( Covariance="Matern", smoothness=1.0),
# weights= rep( 1, nrow(ozone2$lon.lat) )
# )
# Here summary component has the parameters
# tau, sigma2 and aRange
# and cov.args component has the remaining ones.
set.seed(223)
M<- 25
ySynthetic<- simSpatialData( obj, M)
bootSummary<- NULL
for( k in 1:M){
cat( k, " ")
# here the MLEs are found using the easy top level level wrapper
# see mKrigMLEJoint for a more efficient strategy
newSummary<- spatialProcess(obj$x,ySynthetic[,k],
cov.params.start= list(
aRange = aHat,
lambda = lambdaHat)
)$summary
bootSummary<- rbind( bootSummary, newSummary)
}
cat( fill= TRUE)
# the results and 95
stats( bootSummary )
obj$summary
tmpBoot<- bootSummary[,c("lambda", "aRange") ]
confidenceInterval <- apply(tmpBoot, 2,
quantile, probs=c(0.025,0.975) )
# compare to estimates used as the "true" parameters
obj$summary[2:5]
print( t(confidenceInterval) )
# compare to confidence interval using large sample theory
print( obj$CITable)
}
if (FALSE) {
# conditional simulation with covariates
# colorado climate example
# note how elevation enters either as a vector or image/grid
data(COmonthlyMet)
fit1E<- spatialProcess(CO.loc,CO.tmin.MAM.climate, XMat=CO.elev )
# conditional simulation application for filling missing data
good<- !is.na(CO.tmin.MAM.climate )
infill<- sim.spatialProcess( fit1E, xp=CO.loc[!good,],
XMat= CO.elev[!good], M= 10)
# conditional simulation on a grid with covariates.
# get an elevation grid ... NGRID<- 50 gives a nicer image but takes longer
NGRID <- 25
# get elevations on a grid
COGrid<- list( x=seq( -109.75, -100.5, ,NGRID), y= seq(36, 41.75,,NGRID) )
COGridPoints<- make.surface.grid( COGrid)
# elevations are a bilinear interpolation from the 4km
# Rocky Mountain elevation fields data set.
data( RMelevation)
# COElevTmp<- interp.surface.grid(RMelevation,COGrid )
# check that predicted surface is OK
#look<- predictSurface( fit1E, gridList=COGrid, XMat=COElevTmp )
# elevations as a vector
COElevGrid<- interp.surface( RMelevation, COGridPoints )
# NOTE call to sim.spatialProcess treats the grid points as just a matrix
# of locations the plot has to "reshape" these into a grid
# to use with image.plot
SEout<- sim.spatialProcess( fit1E, xp=COGridPoints, XMat= COElevGrid, M= 30)
# for just the smooth surface in lon/lat
# SEout<- sim.spatialProcess( fit1E, xp=COGridPoints,
# drop.XMat=TRUE, M= 30)
# of course, M should be larger to reduce Monte Carlo error.
surSE<- apply( SEout, 1, sd )
image.plot( as.surface( COGridPoints, surSE))
points( fit1E$x, col="magenta", pch=16)
}
### Approximate conditional simulation
if (FALSE) {
# create larger lon/lat grid
# see NNSize and NNSizePredict for additional arguments to control the
# size of nearest neighborhoods used.
NGRID <-200
COGrid2<- list( x=seq( -109.75, -100.5, ,NGRID),
y= seq(36, 41.75,,NGRID) )
# interpolation elevations to this grid.
COElevGrid2<- interp.surface.grid( RMelevation, COGrid2 )
# check that predicted surface is OK
# look<- predictSurface.mKrig( fit1E, gridList=COGrid2, XMatGrid=COElevGrid2, fast=TRUE )
set.seed(222)
system.time(
SEout0<- simLocal.spatialProcess( fit1E,COGrid2,
XMatGrid= COElevGrid2$z,
M= 10)
)
# now with rapid, approximate prediction
set.seed(222)
system.time(
SEout1<- simLocal.spatialProcess( fit1E,COGrid2 ,
XMatGrid= COElevGrid2$z,
M= 50, fast = TRUE)
)
look <- apply( SEout1$z,c(1,2), sd)
imagePlot(SEout1$x, SEout1$y, look, col=viridis(256) )
points( fit1E$x, pch=16, cex=.5, col="magenta")
title("Monte Carlo prediction SE")
}
#### example using Krig object and exact conditional simulation.
if (FALSE) {
data( ozone2)
set.seed( 399)
# fit to day 16 from Midwest ozone data set.
out<- Krig( ozone2$lon.lat, ozone2$y[16,], Covariance="Matern",
aRange=1.0,smoothness=1.0, na.rm=TRUE)
# NOTE aRange =1.0 is not the best choice but
# the six missing data locations
xp<- ozone2$lon.lat[ is.na(ozone2$y[16,]),]
# 30 draws from process at xp given the data
sim.out<- sim.Krig( out,xp, M=30)
}
Run the code above in your browser using DataLab