# NOT RUN {
data( ozone2)
# x is a two column matrix where each row is a location in lon/lat
# coordinates
x<- ozone2$lon.lat
# y is a vector of ozone measurements at day 16. Note some missing values.
y<- ozone2$y[16,]
# artifically reduce size of data for a quick example to pass CRAN ...
x<- x[1:75,]
y<- y[1:75]
# lots of default choices made here -- see gridN to increase
# the number of points in grid searches for MLEs
# without specifying lambda or aRange both are found in a robust
# way uses grid searches
# profiling over lambda and aRange is not reuqired but completes the full
# example. Omit this for a faster computation.
obj<- spatialProcess( x, y, profileLambda=TRUE, profileARange=TRUE)
# summary of model
summary( obj)
# diagnostic plots
set.panel(2,2)
plot(obj)
# plot 1 data vs. predicted values
# plot 2 residuals vs. predicted
# plot 3 criteria to select the smoothing
# parameter lambda = tau^2 / sigma
# the x axis has log10 lambda
# Note that here the GCV function is minimized
# while the log profile likelihood is maximzed.
# plot 4 the log profile likelihood used to
# determine range parameter aRange.
#
set.panel()
# predictions on a grid
surface( obj, xlab="longitude", ylab="latitude")
US( add=TRUE, col="grey", lwd=2)
title("Predicted ozone (in PPB) June 18, 1987 ")
#(see also predictSurface for more control on evaluation grid, predicting
# outside convex hull of the data. and plotting)
# prediction standard errors, note two steps now to generate
# and then plot surface
look<- predictSurfaceSE( obj)
surface( look, xlab="longitude", ylab="latitude")
points( x, col="magenta")
title("prediction standard errors (PPB)")
# here is a sanity check -- call spatialProcess with the MLEs found
# above, better get the same predictions!
objTest<- spatialProcess( x, y,
lambda=obj$MLESummary["lambda"],
aRange=obj$MLESummary["aRange"]
)
test.for.zero(objTest$fitted.values, obj$fitted.values,
tag="sanity check" )
# }
# NOT RUN {
# working with covariates and filling in missing station data
# using an ensemble method
# see the example under help(sim.spatialProcess) to see how to
# handle a conditional simulation on a grid of predictions with
# covariates.
data(COmonthlyMet)
fit1E<- spatialProcess(CO.loc,CO.tmin.MAM.climate, Z=CO.elev,
gridARange= seq(.25, 2.0, length.out=10)
)
set.panel( 2,2)
plot( fit1E)
# conditional simulation at missing data
notThere<- is.na(CO.tmin.MAM.climate )
xp <- CO.loc[notThere,]
Zp <- CO.elev[notThere]
infill<- sim.spatialProcess( fit1E, xp=xp,
Z= Zp, M= 10)
#
# interpretation is that these infilled values are all equally plausible
# given the observations and also given the estimated covariance model
#
# EXTRA CREDIT: standardize the infilled values to have
# conditional mean and variance from the exact computations
# e.g. predict( fit1E, xp=CO.loc[!good,], Z= CO.elev[!good])
# and predictSE(fit1E, xp=CO.loc[!good,], Z= CO.elev[!good])
# with these standardization one would still preserve the correlations
# among the infilled values that is also important for considering them as a
# multivariate prediction.
# conditional simulation on a grid but not using the covariate of elevation
fit2<- spatialProcess(CO.loc,CO.tmin.MAM.climate,
gridARange= seq(.25, 2.0, length.out=10)
)
# note larger range parameter
# create 2500 grid points using a handy fields function
gridList <- fields.x.to.grid( fit2$x, nx=50,ny=50)
xGrid<- make.surface.grid( gridList)
ensemble<- sim.spatialProcess( fit2, xp=xGrid, M= 5)
# this is an "n^3" computation so increasing the grid size
# can slow things down for computation
image.plot( as.surface( xGrid, ensemble[1,]))
set.panel()
# }
# NOT RUN {
# }
# NOT RUN {
## changing the covariance model.
data(ozone2)
x<- ozone2$lon.lat
y<- ozone2$y[16,]
# a comparison to using an exponential and Wendland covariance function
# and great circle distance -- just to make range easier to interpret.
obj <- spatialProcess( x, y,
Distance = "rdist.earth")
obj2<- spatialProcess( x, y,
cov.args = list(Covariance = "Exponential"),
Distance = "rdist.earth" )
obj3<- spatialProcess( x, y,
cov.args = list(Covariance = "Wendland",
dimension = 2,
k = 2),
Distance = "rdist.earth")
# obj2 could be also be fit using the argument:
# cov.args = list(Covariance = "Matern", smoothness=.5)
#
# Note very different range parameters - BTW these are in miles
# but similar nugget variances.
rbind( Whittle= obj$summary,
Exp= obj2$summary,
Wendland= obj3$summary
)
# since the exponential is Matern with smoothness == .5 the first two
# fits can be compared in terms of their likelihoods
# the ln likelihood value is slightly higher for obj verses obj2 (-613.9 > -614.9)
# these are the _negative_ log likelihoods so suggests a preference for the
# smoothness = 1.0 (Whittle) model
#
# does it really matter in terms of spatial prediction?
set.panel( 3,1)
surface( obj)
US( add=TRUE)
title("Matern sm= 1.0")
surface( obj2)
US( add=TRUE)
title("Matern sm= .5")
surface( obj3)
US( add=TRUE)
title("Wendland k =2")
# prediction standard errors
# these take a while because prediction errors are based
# directly on the Kriging weight matrix
# see mKrig for an alternative.
set.panel( 2,1)
out.p<- predictSurfaceSE( obj, nx=40,ny=40)
surface( out.p)
US( add=TRUE)
title("Matern sm= 1.0")
points( x, col="magenta")
#
out.p<- predictSurfaceSE( obj, nx=40,ny=40)
surface( out.p)
US( add=TRUE)
points( x, col="magenta")
title("Matern sm= .5")
set.panel(1,1)
# }
# NOT RUN {
# }
# NOT RUN {
### comparison with GeoR
data(ozone2)
x<- ozone2$lon.lat
y<- ozone2$y[16,]
good<-!is.na(y)
x1<- x[good,]
y1<- y[good]
# spatialProcess takes longer because of grid search on aRange.
obj<- spatialProcess( x1, y1,
mKrig.args = list(m=1),
smoothness = .5)
library( geoR)
ml.n <- likfit(coords= x1, data=y1, ini = c(570, 3), nug = 50)
# compare to
stuffFields<- obj$MLESummary[c("lnProfileLike.FULL",
"aRange","tau","sigma2")]
stuffGeoR<- c( ml.n$loglik, ml.n$phi, sqrt(ml.n$nugget),
ml.n$sigmasq)
test.for.zero( max(stuffFields/stuffGeoR), 1, tol=.004)
# }
Run the code above in your browser using DataLab