#Getting temperature and relative humidity data for North Carolina, USA,
#6-12 hours from now depending on when the GFS model was last run.
#Get values at the ground surface and at the 800 mb level
#Then make a contour plot of the surface temperature.
#Using the Global Forecast System 0.5x0.5 model
#Get the first two models available
urls.out <- CrawlModels(abbrev = "gfs0.5", depth = 2)
#Get predictions, variables, and levels
#If the first URL fails, try the second
#(sometimes there's a lag time between the URL appearing
#and the data becoming available)
model.parameters <- ParseModelPage(urls.out[1])
#Get the 6 hour prediction
pred.6 <- model.parameters$pred[grep("06$", model.parameters$pred)]
#What region of the atmosphere to get data for
#2 meters above the ground and at 800 mb
levels <- c("2 m above ground", "800 mb")
#What data to return
variables <- c("TMP", "RH") #Temperature and relative humidity
#What region of the world to get data for - omit this and you get the whole planet!
model.domain <- c(-84, -74, 37, 32) #Get the area around North Carolina, USA
file.name <- "fcst.grb" #Name of file to save downloaded data to
#Get the data
GribGrab(urls.out[1], pred.6, levels, variables,
model.domain = model.domain, file.name = file.name)
#Read the data, only capturing temperature at the 2 m above ground level
model.data <- ReadGrib(file.name, c("2 m above ground"), c("TMP"))
#Make it into an array
model.array <- ModelGrid(model.data)
#Make a contour plot of the temperature around North Carolina, USA:
contour(x = model.array$x, y = model.array$y,
model.array$z[1,1,,] - 273.15, xlab = "Longitude", ylab = "Latitude",
main = paste("North Carolina Temperatures for",
model.array$fcst.date, "GMT in Celsius"))
Run the code above in your browser using DataLab