# NOT RUN {
# }
# NOT RUN {
library(googleCloudStorageR)
library(bigQueryR)
gcs_global_bucket("your-project")
## custom upload function to ignore quotes and column headers
f <- function(input, output) {
write.table(input, sep = ",", col.names = FALSE, row.names = FALSE,
quote = FALSE, file = output, qmethod = "double")}
## upload files to Google Cloud Storage
gcs_upload(mtcars, name = "mtcars_test1.csv", object_function = f)
gcs_upload(mtcars, name = "mtcars_test2.csv", object_function = f)
## create the schema of the files you just uploaded
user_schema <- schema_fields(mtcars)
## load files from Google Cloud Storage into BigQuery
bqr_upload_data(projectId = "your-project",
datasetId = "test",
tableId = "from_gcs_mtcars",
upload_data = c("gs://your-project/mtcars_test1.csv",
"gs://your-project/mtcars_test2.csv"),
schema = user_schema)
## for big files, its helpful to create your schema on a small sample
## a quick way to do this on the command line is:
# "head bigfile.csv > head_bigfile.csv"
# }
# NOT RUN {
# }
Run the code above in your browser using DataCamp Workspace