#!/usr/bin/env python # coding: utf-8 # ## Accessing Sentinel-3 SLSTR data with the Planetary Computer STAC API # # The [Sentinel 3 SLSTR instrument](https://sentinel.esa.int/web/sentinel/technical-guides/sentinel-3-slstr) is a Along-Track Scanning Radiometer (ATSR) designed to provide reference land surface and sea surface temperatures. # There are three SLSTR collections in the Plantery Computer: # # - Fire radiative power: `sentinel-3-slstr-frp-l2-netcdf` # - Land surface temperature: `sentinel-3-slstr-lst-l2-netcdf` # - Water surface temperature: `sentinel-3-slstr-wst-l2-netcdf` # # This notebook demonstrates accessing and visualizing data from all three collections. # # ### Data Access # # This notebook works with or without an API key, but you will be given more permissive access to the data with an API key. If you are using the [Planetary Computer Hub](https://planetarycomputer.microsoft.com/compute) to run this notebook, then your API key is automatically set to the environment variable `PC_SDK_SUBSCRIPTION_KEY` for you when your server is started. Otherwise, you can view your keys by signing in to the [developer portal](https://planetarycomputer.developer.azure-api.net/). The API key may be manually set via the environment variable `PC_SDK_SUBSCRIPTION_KEY` or the following code: # # ```python # import planetary_computer # planetary_computer.settings.set_subscription_key() # ``` # # The datasets hosted by the Planetary Computer are available in [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/). We'll use [pystac-client](https://pystac-client.readthedocs.io/) to search the Planetary Computer's [STAC API](https://planetarycomputer.microsoft.com/api/stac/v1/docs) for the subset of the data that we care about, and then we'll load the data directly from Azure Blob Storage. We'll specify a `modifier` so that we can access the data stored in the Planetary Computer's private Blob Storage Containers. See [Reading from the STAC API](https://planetarycomputer.microsoft.com/docs/quickstarts/reading-stac/) and [Using tokens for data access](https://planetarycomputer.microsoft.com/docs/concepts/sas/) for more. # # # ### Land surface temperature # # The collection's description provides more information about the LST product. # In[1]: import planetary_computer import pystac_client from IPython.display import display, Markdown catalog = pystac_client.Client.open( "https://planetarycomputer.microsoft.com/api/stac/v1", modifier=planetary_computer.sign_inplace, ) collection = catalog.get_collection("sentinel-3-slstr-lst-l2-netcdf") display(Markdown(f"### {collection.id}\n\n{collection.description}")) # ### Define the area of interest and search the land surface temperature collection # # We'll search for items over the coordinates `[-105, 40]`. # In[2]: import xarray as xr import fsspec search = catalog.search( collections=["sentinel-3-slstr-lst-l2-netcdf"], intersects={"type": "Point", "coordinates": [-105, 40]}, ) item = next(search.items()) # ### Available Assets and Metadata # # Each item includes a handful of assets linking to NetCDF files with the data or additional metadata files. # In[3]: import rich.table t = rich.table.Table("Key", "Value") for key, asset in item.assets.items(): t.add_row(key, asset.description) t # ### Reading data # # We can use xarray to read each NetCDF file directly from Blob Storage. # In[4]: dataset = xr.open_dataset(fsspec.open(item.assets["lst-in"].href).open()) dataset # ### Geolocating and subsetting the data # # To plot the land surface temperature data, we will use the georeferencing information contained in the `slstr-geodetic-in` asset. # There's so many points, and the data have such a large spatial extent, that we only need a random sample of the data rather than every point. # In[5]: geo = xr.open_dataset(fsspec.open(item.assets["slstr-geodetic-in"].href).open()).load() geo # In[6]: import pandas data = ( pandas.DataFrame( { "longitude": geo.longitude_in.data.ravel(), "latitude": geo.latitude_in.data.ravel(), "lst": dataset.LST.load().data.ravel(), } ) .dropna() .sample(10000) ) data # ### Plotting land surface temperature data # # We use a scatter plot to visualize the land surface temperature points over the globe. # We also plot the item geometry, to give us a sense of the satellite's field of view and where that field of view crosses the land. # In[7]: import shapely.geometry from cartopy.crs import PlateCarree from cartopy.feature import BORDERS import matplotlib.pyplot as plt fig, ax = plt.subplots(figsize=(12, 12), subplot_kw=dict(projection=PlateCarree())) ax.add_geometries( shapely.geometry.shape(item.geometry), crs=PlateCarree(), color="coral", alpha=0.1 ) data.plot.scatter( x="longitude", y="latitude", c="lst", ax=ax, colormap="viridis", marker=".", alpha=0.2, ) ax.set(ybound=(20, 50), xbound=(-120, -90)) ax.coastlines() ax.add_feature(BORDERS, linestyle="-") ax.gridlines(draw_labels=True, dms=True, x_inline=False, y_inline=False); # ### Water surface temperature data # # Let's do the same process, but for the water surface temperature product. # The data structure is a little different. # In[8]: collection = catalog.get_collection("sentinel-3-slstr-wst-l2-netcdf") display(Markdown(f"### {collection.id}\n\n{collection.description}")) # In[9]: search = catalog.search( collections=["sentinel-3-slstr-wst-l2-netcdf"], intersects={"type": "Point", "coordinates": [-105, 40]}, ) item = next(search.items()) t = rich.table.Table("Key", "Value") for key, asset in item.assets.items(): t.add_row(key, asset.description) t # In[10]: dataset = xr.open_dataset(fsspec.open(item.assets["l2p"].href).open()) dataset # In[11]: import pandas data = ( pandas.DataFrame( { "longitude": dataset.lon.data.ravel(), "latitude": dataset.lat.data.ravel(), "sea_surface_temperature": dataset.sea_surface_temperature.load().data.ravel(), } ) .dropna() .sample(10000) ) data # In[12]: figure, axes = plt.subplots(figsize=(12, 8), subplot_kw=dict(projection=PlateCarree())) axes.add_geometries( shapely.geometry.shape(item.geometry), crs=PlateCarree(), color="coral", alpha=0.1 ) data.plot.scatter( x="longitude", y="latitude", c="sea_surface_temperature", ax=axes, colormap="viridis", marker=".", alpha=0.8, ) axes.coastlines(); # ### Fire radiative power # # We'll do the same for fire radiative power. # This product's items are smaller chips. # Let's use an item that crosses over California to give ourselves the best chance of catching data of interest. # In[13]: collection = catalog.get_collection("sentinel-3-slstr-frp-l2-netcdf") display(Markdown(f"### {collection.id}\n\n{collection.description}")) # In[14]: search = catalog.search( collections=["sentinel-3-slstr-frp-l2-netcdf"], intersects={"type": "Point", "coordinates": [-121, 40]}, ) item = next(search.items()) t = rich.table.Table("Key", "Value") for key, asset in item.assets.items(): t.add_row(key, asset.description) t # In[15]: dataset = xr.open_dataset(fsspec.open(item.assets["frp-in"].href).open()) dataset # In[16]: import pandas data = pandas.DataFrame( { "longitude": dataset.longitude.data.ravel(), "latitude": dataset.latitude.data.ravel(), "radiance": dataset.F1_Fire_pixel_radiance.load().data.ravel(), } ).dropna() data # In[17]: from cartopy.feature import RIVERS, COASTLINE figure, axes = plt.subplots(figsize=(12, 8), subplot_kw=dict(projection=PlateCarree())) axes.add_feature(RIVERS) axes.add_feature(COASTLINE) data.plot.scatter( x="longitude", y="latitude", c="radiance", ax=axes, colormap="viridis", marker="." ) axes.set_extent((-125, -120, 37, 42));