if (FALSE) {
# Create a folder to store the data
data_folder <- "data_raw"
dir.create(data_folder)
# Load crop production data
crop_production <- get_faostat_bulk(code = "QCL", data_folder = data_folder)
# Cache the file i.e. save the data frame in the serialized RDS format for faster load time later.
saveRDS(crop_production, "data_raw/crop_production_e_all_data.rds")
# Now you can load your local version of the data from the RDS file
crop_production <- readRDS("data_raw/crop_production_e_all_data.rds")
# Use the lower level functions to download zip files,
# then read the zip files in separate function calls.
# In this example, to avoid a warning about "examples lines wider than 100 characters"
# the url is split in two parts: a common part 'url_bulk_site' and a .zip file name part.
# In practice you can enter the full url directly as the `url_bulk` argument.
# Notice also that I have choosen to load global data in long format (normalized).
url_bulk_site <- "https://fenixservices.fao.org/faostat/static/bulkdownloads"
url_crops <- file.path(url_bulk_site, "crop_production_E_All_Data_(Normalized).zip")
url_forestry <- file.path(url_bulk_site, "Forestry_E_All_Data_(Normalized).zip")
# Download the files
download_faostat_bulk(url_bulk = url_forestry, data_folder = data_folder)
download_faostat_bulk(url_bulk = url_crops, data_folder = data_folder)
# Read the files and assign them to data frames
crop_production <- read_faostat_bulk("data_raw/crop_production_E_All_Data_(Normalized).zip")
forestry <- read_faostat_bulk("data_raw/Forestry_E_All_Data_(Normalized).zip")
# Save the data frame in the serialized RDS format for fast reuse later.
saveRDS(crop_production, "data_raw/crop_production_e_all_data.rds")
saveRDS(forestry,"data_raw/forestry_e_all_data.rds")
}
Run the code above in your browser using DataLab