#!/usr/bin/env python # coding: utf-8 # ## Accessing Sentinel-3 SRAL data with the Planetary Computer STAC API # # The [Sentinel 3 SRAL instrument](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-altimetry) uses Synthetic Aperture Radar (SAR) to measure surface height, significant wave height, and wind speed over the ocean surface. # There are two SRAL collections in the Plantery Computer: # # - Land measurements: `sentinel-3-sral-lan-l2-netcdf` # - Water measurements: `sentinel-3-slstr-wat-l2-netcdf` # # This notebook demonstrates accessing and visualizing data from both collections. # # ### Data Access # # This notebook works with or without an API key, but you will be given more permissive access to the data with an API key. If you are using the [Planetary Computer Hub](https://planetarycomputer.microsoft.com/compute) to run this notebook, then your API key is automatically set to the environment variable `PC_SDK_SUBSCRIPTION_KEY` for you when your server is started. Otherwise, you can view your keys by signing in to the [developer portal](https://planetarycomputer.developer.azure-api.net/). The API key may be manually set via the environment variable `PC_SDK_SUBSCRIPTION_KEY` or the following code: # # ```python # import planetary_computer # planetary_computer.settings.set_subscription_key() # ``` # # The datasets hosted by the Planetary Computer are available in [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/). We'll use [pystac-client](https://pystac-client.readthedocs.io/) to search the Planetary Computer's [STAC API](https://planetarycomputer.microsoft.com/api/stac/v1/docs) for the subset of the data that we care about, and then we'll load the data directly from Azure Blob Storage. We'll specify a `modifier` so that we can access the data stored in the Planetary Computer's private Blob Storage Containers. See [Reading from the STAC API](https://planetarycomputer.microsoft.com/docs/quickstarts/reading-stac/) and [Using tokens for data access](https://planetarycomputer.microsoft.com/docs/concepts/sas/) for more. # # # ### Water measurements # # The collection's description provides more information about the water product. # In[1]: import planetary_computer import pystac_client from IPython.display import display, Markdown catalog = pystac_client.Client.open( "http://planetarycomputer.microsoft.com/api/stac/v1", modifier=planetary_computer.sign_inplace, ) collection = catalog.get_collection("sentinel-3-sral-wat-l2-netcdf") display(Markdown(f"### {collection.id}\n\n{collection.description}")) # ### Define the area of interest and search the water collection # # We'll search for items. # Since this is an altimetry product, the swaths of data are very narrow, making a point-based search unrealistic. # We search for any product that intersects the Gulf of Mexico. # In[2]: import xarray as xr import fsspec geometry = { "type": "Polygon", "coordinates": [ [ [-95.8376336, 28.3457752], [-92.3259525, 29.3100131], [-89.5198038, 28.8301497], [-87.049173, 30.183914], [-83.1468269, 28.6421247], [-81.6808886, 23.9413637], [-85.0055647, 22.0886049], [-90.4653539, 21.4938454], [-90.8618749, 19.4939949], [-92.9054832, 18.7158187], [-96.1081528, 19.1774025], [-97.6942368, 23.4665839], [-95.8376336, 28.3457752], ] ], } search = catalog.search( collections=["sentinel-3-sral-wat-l2-netcdf"], intersects=geometry ) item = next(search.items()) # ### Available Assets and Metadata # # Each item includes a handful of assets linking to NetCDF files with the data or additional metadata files. # In[3]: import rich.table t = rich.table.Table("Key", "Value") for key, asset in item.assets.items(): t.add_row(key, asset.description) t # ### Reading data # # We can use xarray to read each NetCDF file directly from Blob Storage. # In[4]: dataset = xr.open_dataset(fsspec.open(item.assets["standard-measurement"].href).open()) dataset # ### Visualizing # # There's a huge numbers of variables here, and going into detail on them is beyond the scope of this example. # To demonstrate data access and visualization, we'll plot a significant wave height value, `swh_ocean_01_ku`. # This is 1HZ data from the KU band of the altimeter. # Significant wave height is ["an average measurement of the largest 33% of waves."](https://www.weather.gov/mfl/waves) # In[5]: import pandas data = pandas.DataFrame( { # Longitude values are between zero and 360, so we remap them to -180 to 180. "longitude": (dataset.lon_01.data.ravel() + 180) % 360 - 180, "latitude": dataset.lat_01.data.ravel(), "swh": dataset.swh_ocean_01_ku.load().data.ravel(), } ).dropna() data # In[11]: import shapely.geometry from cartopy.crs import PlateCarree import matplotlib.pyplot as plt fig, ax = plt.subplots( subplot_kw=dict(projection=PlateCarree()), figsize=(12, 8), ) ax.add_geometries( shapely.geometry.shape(item.geometry), crs=PlateCarree(), color="coral", alpha=0.1, ) data.plot.scatter( x="longitude", y="latitude", c="swh", ax=ax, colormap="viridis", marker=".", alpha=0.8, ) ax.coastlines(); # Not very interesting. # Let's try the mean sea surface height stored in `mean_sea_surf_sol1_01`. # In[13]: data = pandas.DataFrame( { # Longitude values are between zero and 360, so we remap them to -180 to 180. "longitude": (dataset.lon_01.data.ravel() + 180) % 360 - 180, "latitude": dataset.lat_01.data.ravel(), "ssh": dataset.mean_sea_surf_sol1_01.load().data.ravel(), } ).dropna() fig, ax = plt.subplots( subplot_kw=dict(projection=PlateCarree()), figsize=(12, 8), ) ax.add_geometries( shapely.geometry.shape(item.geometry), crs=PlateCarree(), color="coral", alpha=0.1, ) data.plot.scatter( x="longitude", y="latitude", c="ssh", ax=ax, colormap="viridis", marker=".", alpha=0.2, ) ax.coastlines(); # ### Land data # # Let's do the same process, but for the land product, which is very similar to the water product but just processed by a different centre. # We'll use the reduced NetCDF this time, which has the most useful subset of variables. # We'll plot `mean_dyn_topo_01`, which is the mean dynamic topography. # In[14]: collection = catalog.get_collection("sentinel-3-sral-lan-l2-netcdf") display(Markdown(f"### {collection.id}\n\n{collection.description}")) # In[15]: search = catalog.search( collections=["sentinel-3-sral-lan-l2-netcdf"], intersects=geometry, ) item = next(search.items()) t = rich.table.Table("Key", "Value") for key, asset in item.assets.items(): t.add_row(key, asset.description) t # In[16]: dataset = xr.open_dataset(fsspec.open(item.assets["reduced-measurement"].href).open()) dataset # In[17]: import pandas data = pandas.DataFrame( { "longitude": dataset.lon_01.data.ravel(), "latitude": dataset.lat_01.data.ravel(), "topography": dataset.mean_dyn_topo_01.load().data.ravel(), } ).dropna() data # In[21]: fig, ax = plt.subplots( subplot_kw=dict(projection=PlateCarree()), figsize=(12, 8), ) ax.add_geometries( shapely.geometry.shape(item.geometry), crs=PlateCarree(), color="coral", alpha=0.1, ) data.plot.scatter( x="longitude", y="latitude", c="topography", ax=ax, colormap="viridis", marker=".", alpha=0.2, ) ax.coastlines();