## 1. test validity methods
# this should work fine
data(sp1)
depths(sp1) <- id ~ top + bottom
# horizon logic can be tested via data.frame and
# test_hz_logic(i, topcol, bottomcol, test.NA=TRUE, strict=FALSE)
## 2. use the digest library to detect duplicate data
data(sp1)
# make a copy, make new IDs, and stack
s.1 <- sp1
s.2 <- sp1
s.2$id <- paste(s.2$id, '-copy', sep='')
s <- rbind(s.1, s.2)
depths(s) <- id ~ top + bottom
# digests are computed from horizon-level data only
# horizon boundaries and 'prop'
# result is an index of unqique profiles
if(require(digest)) {
u <- unique(s, vars=c('top', 'bottom', 'prop'))
}
# compare with and without dupes:
# note subsetting of SoilProfileCollection
cbind(dupes=length(s), no.dupes=length(s[u, ]))
## 3. concatonate SoilProfileCollection objects
require(plyr)
d <- ldply(1:10, random_profile)
# promote to SoilProfileCollection and plot
depths(d) <- id ~ top + bottom
plot(d)
# split into new SoilProfileCollection objects by index
d.1 <- d[1, ]
d.2 <- d[2, ]
d.345 <- d[3:5, ]
# recombine
d.new <- rbind(d.345, d.1, d.2)
plot(d.new)
# these next examples should throw an error
# insert a missing horizon boundary
data(sp1)
sp1$top[1] <- NA
depths(sp1) <- id ~ top + bottom
# insert a bogus horizon boundary
## NOTE: this is currently valid, as this check breaks slice(SPC, ...)
data(sp1)
sp1$top[2] <- 30
depths(sp1) <- id ~ top + bottom
Run the code above in your browser using DataLab