#!/usr/bin/env python # coding: utf-8 # # Convert point cloud data to [COPC](https://COPC.io) format # Convert .laz to .copc.laz using [pdal](https://pdal.io/en/stable/) library, following the workflow in this [blog post](https://bertt.wordpress.com/2022/03/29/cloud-optimized-point-cloud-copc/), although in the blog post they convert from the native projection to web mercator before converting to COPC. [It's not necessary to reproject the data](https://github.com/copcio/copcio.github.io/discussions/57#discussioncomment-4656516). So we just use one pipeline to convert the original .laz to COPC. # In[1]: import pdal import fsspec # In[2]: fs_read = fsspec.filesystem('s3', anon=True) # In[3]: url = 's3://cmgp-sfm-public-read-bucket/RSCC_EventResponse_Jan2023/20230105_MB_SC_Cap_UltraHi_NAD83_2011_UTM10N_ConfWater_Trimmed.laz' fs_read.info(url) # In[4]: #fs_read.ls('s3://cmgp-sfm-public-read-bucket/RSCC_EventResponse_Jan2023/LAZ_for_Release_2023_1_11/') # bucket doesn't allow anon listing # In[5]: from pathlib import Path p = Path(url) local_laz_file = f'{p.stem}.laz' local_laz_file # In[6]: fs_local = fsspec.filesystem('file') # In[7]: if not fs_local.exists(local_laz_file): fs_read.download(url, local_laz_file) # #### Convert LAZ to COPC # In[8]: local_copc_file = f"{p.stem}.6339.copc.laz" write_copc_pipeline = [ { "type":"readers.las", "filename":local_laz_file }, { "type":"writers.copc", "filename":local_copc_file } ] # In[9]: write_copc_pipeline = str(write_copc_pipeline).replace("'", '"') # In[10]: pipeline = pdal.Pipeline(write_copc_pipeline) count = pipeline.execute() # #### Upload to ESIP public read bucket # In[11]: fs_write = fsspec.filesystem('s3', profile='esip-qhub') # In[12]: s3_copc_file = f's3://esip-qhub-public/testing/{local_copc_file}' _ = fs_write.upload(local_copc_file, s3_copc_file) # #### Check the uploaded file # In[13]: fs_read.info(s3_copc_file) # #### View with COPC viewer (Click the viewer_output URL produced by this cell): # In[14]: viewer_url = f'https://viewer.copc.io/?copc=https://esip-qhub-public.s3.amazonaws.com/testing/{local_copc_file}' print(viewer_url) # #### Notes on run environment # This notebook was run on the ESIP Nebari deployment (JupyterHub on Kubernetes) at https://nebari.esipfed.org, using the 32GB server type (it exceeded memory with the 8GB or 16GB server types). We used this conda environment: # ``` # name: pdal # channels: # - conda-forge # dependencies: # - python=3.10 # - pdal # - python-pdal # - fsspec # - s3fs # - ipykernel # ``` # In[ ]: