# NOT RUN {
# Build a catalog
ctg <- readLAScatalog("filder/to/las/files/")
# Set some options
opt_filter(ctg) <- "-keep_first"
# Summary gives a summary of how the catalog will be processed
summary(ctg)
# We can seamlessly use lidR functions
hmean <- grid_metrics(ctg, mean(Z), 20)
ttops <- tree_detection(ctg, lmf(5))
# For low memory config it is probably advisable not to load entire files
# and process chunks instead
opt_chunk_size(ctg) <- 500
# Outputs are expected to be strictly identical
hmean <- grid_metrics(ctg, mean(Z), 20)
ttops <- tree_detection(ctg, lmf(5))
# Sometimes the output is likely to be very large
dtm <- grid_terrain(ctg, 1, tin())
# In that case it is advisable to write the output(s) to files
opt_output_files(ctg) <- "path/to/folder/DTM_chunk_{XLEFT}_{YBOTTOM}"
# Raster will be written to disk. The list of written files is returned
# or, in this specific case, a virtual raster mosaic.
dtm <- grid_terrain(ctg, 1, tin())
# When chunks are files the original names of the las files can be preserved
opt_chunk_size(ctg) <- 0
opt_output_files(ctg) <- "path/to/folder/DTM_{ORIGINALFILENAME}"
dtm <- grid_terrain(ctg, 1, tin())
# For some functions, files MUST be written to disk. Indeed, it is certain that R cannot
# handle the entire output.
opt_chunk_size(ctg) <- 0
opt_output_files(ctg) <- "path/to/folder/{ORIGINALFILENAME}_norm"
opt_laz_compression(ctg) <- TRUE
new_ctg <- lasnormalize(ctg, tin())
# The user has access to the catalog engine through the function catalog_apply
output <- catalog_apply(ctg, FUN, ...)
# }
Run the code above in your browser using DataLab