###########################################################################
############################# Run this set up code: #######################
###########################################################################
# set seed:
seed=38
# Define training and test files:
qdata.trainfn = paste(system.file(package="ModelMap"),"/external/DATATRAIN.csv",sep="")
qdata.testfn = paste(system.file(package="ModelMap"),"/external/DATATEST.csv",sep="")
# Define folder for all output:
folder=getwd()
# Create a list of the filenames (including paths) for the rast Look up Tables:
rastLUTfn=list( paste(system.file(package="ModelMap"),"/external/LUT_2001.csv",sep=""),
paste(system.file(package="ModelMap"),"/external/LUT_2004.csv",sep=""))
# Load rast LUT tables, and add path to the filenames in column 1:
rastLUT<-lapply(rastLUTfn, function(x){ y <- read.table(x,header=FALSE,sep=",",stringsAsFactors=FALSE)
y[,1] <- paste(system.file(package="ModelMap"),"external",y[,1],sep="/")
return(y)})
# Define identifier for individual training and test data points:
unique.rowname="ID"
# Define Number of rows of raster to read in at one time
# if crashes with warning: "unable to assign..." lower this number
numrows=500
###########################################################################
############## Pick one of the following sets of definitions: #############
###########################################################################
########## Continuous Response, Continuous Predictors ############
#file name to store model:
MODELfn="RF_Bio_TC"
#file name for validation predictions:
MODELpredfn="RF_Bio_TC_PRED.csv"
#names from column 2 of rastLUT:
predList=c("TCB","TCG","TCW")
#define which predictors are categorical:
predFactor=FALSE
# Response name and type:
response.name="BIO"
response.type="continuous"
# Map name:
asciifn<-c("RF_Bio_TC_01.txt","RF_Bio_TC_04.txt")
asciifn<-paste(folder,asciifn,sep="/")
########## binary Response, Continuous Predictors ############
#file name to store model:
MODELfn="RF_CONIFTYP_TC"
#file name for validation predictions:
MODELpredfn="RF_CONIFTYP_TC.csv"
#names from column 2 of rastLUT:
predList=c("TCB","TCG","TCW")
#define which predictors are categorical:
predFactor=FALSE
# Response name and type:
response.name="CONIFTYP"
# This variable is 1 if a conifer or mixed conifer type is present,
# otherwise 0.
response.type="binary"
# Map name:
asciifn<-c("RF_CONIFTYP_TC_01.txt","RF_CONIFTYP_TC_04.txt")
asciifn<-paste(folder,asciifn,sep="/")
########## Continuous Response, Categorical Predictors ############
# In this example, NLCD is a categorical predictor.
#
# You must decide what you want to happen if there are categories
# present in the data to be predicted (either the validation/test set
# or in the image file) that were not present in the original training data.
# Choices:
# na.action = "na.omit"
# Any validation datapoint or image pixel with a value for any
# categorical predictor not found in the training data will be
# returned as NA.
# na.action = "na.roughfix"
# Any validation datapoint or image pixel with a value for any
# categorical predictor not found in the training data will have
# the most common category for that predictor substituted,
# and the a prediction will be made.
# You must also let R know which of the predictors are categorical, in other
# words, which ones R needs to treat as factors.
# This vector must be a subset of the predictors given in predList
#file name to store model:
MODELfn="RF_BIO_TCandNLCD"
#file name for validation predictions:
MODELpredfn="RF_BIO_TCandNLCD_PRED.csv"
#names from column 2 of rastLUT:
predList=c("TCB","TCG","TCW","NLCD")
#define which predictors are categorical:
predFactor=c("NLCD")
# Response name and type:
response.name="BIO"
response.type="continuous"
# Map name:
asciifn<-c( "RF_BIO_TCandNLCD_01.txt","RF_BIO_TCandNLCD_04.txt")
asciifn<-paste(folder,asciifn,sep="/")
###########################################################################
############### Then run this code to building model: #####################
###########################################################################
### create model before batching (only run this code once ever!) ###
model.obj = model.map( model.obj=NULL,
model.type="RF",
qdata.trainfn=qdata.trainfn,
qdata.testfn=qdata.testfn,
folder=folder,
MODELfn=MODELfn,
rastLUT=rastLUT[[1]],
predList=predList,
predFactor=predFactor,
response.name=response.name,
response.type=response.type,
unique.rowname=unique.rowname,
seed=seed,
# Model Validation Arguments
predict=FALSE,
# Mapping arguments
map=FALSE
)
###########################################################################
#### Then Run this code make validation predictions and diagnostics: ######
###########################################################################
model.obj = model.map( model.obj=model.obj,
qdata.trainfn=qdata.trainfn,
qdata.testfn=qdata.testfn, #set qdata.testfn=FALSE to use OOB on training data
folder=folder,
MODELfn=MODELfn,
rastLUT=rastLUT[[1]],
predList=predList,
predFactor=predFactor,
response.name=response.name,
response.type=response.type,
unique.rowname=unique.rowname,
seed=seed,
# Model Validation Arguments
predict=TRUE,
diagnostics=TRUE,
DIAGNOSTICfn=MODELfn,
device.type=c("jpeg","pdf"),
MODELpredfn=MODELpredfn,
v.fold=FALSE,
na.action="na.roughfix",
# Mapping arguments
map=FALSE
)
###########################################################################
################# Then Run this code to create maps: ######################
###########################################################################
### button - Batch (must have already created model) ###
load(paste(folder,"/",MODELfn,sep=""))
for(i in 1:length(rastLUTfn)){
print("##########################################################")
print(paste("Starting",asciifn[i]))
print("##########################################################")
model.obj = model.map( model.obj=model.obj,
folder=folder,
rastLUT=rastLUT[[i]],
seed=seed,
# Model Validation Arguments
predict=FALSE,
na.action="na.roughfix",
# Mapping arguments
map=TRUE,
numrows = numrows,
asciifn=asciifn[i]
)
}Run the code above in your browser using DataLab