# NOT RUN {
# workflow for small memory environments, downloading small batches of bundles
# for really small memory environments consider also using the _count option in
# your FHIR search request.
# You can iteratively download, crack and save the bundles until all bundles are processed or the
# desired number of bundles is reached.
url <- "http://hapi.fhir.org/baseR4/Observation"
count <- 0
while(!is.null(url) && count < 5){
bundles <- fhir_search(url, verbose = 2, max_bundles = 2)
tables <- fhir_crack(bundles, list(Obs=list(resource = "//Observation")))
save(tables, file = paste0(tempdir(),"/table_", count, ".RData"))
count <- count + 1
url <- fhir_next_bundle_url()
}
# }
# NOT RUN {
# }
Run the code above in your browser using DataLab