#!/usr/bin/env python # coding: utf-8 # ## Visualizing Hurricane Florence # # This examples makes a true-color animation of Hurricane Florence by stitching together images from GOES. It builds off this example from [pytroll-examples](https://github.com/pytroll/pytroll-examples/blob/main/satpy/GOES-16%20ABI%20-%20True%20Color%20Animation%20-%20Hurricane%20Florence.ipynb). You can see the output of that example [here](https://twitter.com/PyTrollOrg/status/1039555399433834497). # # Here's what our final animation will look like: # # # In[1]: import urllib.request import contextily as ctx import geopandas import matplotlib.animation as animation import matplotlib.pyplot as plt import pandas as pd import planetary_computer import pystac_client import rioxarray import xarray as xr import odc.stac import dask.distributed # ### Find the storm # # First, we need to find where on earth the storm was. The NCEI [International Best Track Archive for Climate Stewardship](https://www.ncei.noaa.gov/access/metadata/landing-page/bin/iso?id=gov.noaa.ncdc:C00834) provides files with all the information we need. # In[2]: file, _ = urllib.request.urlretrieve( "https://www.ncei.noaa.gov/data/international-best-track-archive-for-" "climate-stewardship-ibtracs/v04r00/access/netcdf/IBTrACS.NA.v04r00.nc" ) # The storm id comes from the text file in # https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs # /v04r00/access/netcdf/ # The name of this file changes with the update date, so we can't access it programmatically. STORM_ID = b"2018242N13343" ds = xr.open_dataset(file) storm_loc = (ds.sid == STORM_ID).argmax().item() data = ds.sel(storm=storm_loc) geometry = geopandas.points_from_xy(data.lon, data.lat) # `geometry` is a geopandas GeoArray with points tracking the location of the storm over time. We'll match those up with the timestamps to plot plot storm's trajectory. We'll also overlay the time period covered by our animation in red. # In[3]: df = ( geopandas.GeoDataFrame( dict( time=pd.to_datetime(data.time).tz_localize("UTC"), geometry=geopandas.points_from_xy(data.lon, data.lat), ) ) .set_crs(4326) .dropna() ) start = pd.Timestamp("2018-09-11T13:00:00Z") stop = pd.Timestamp("2018-09-11T15:40:00Z") # In[4]: ax = df.to_crs(epsg=3857).plot(figsize=(12, 12)) subset = df[df.time.dt.date == start.date()] subset.to_crs(epsg=3857).plot(ax=ax, color="r") ctx.add_basemap(ax) ax.set_axis_off() ax.set(title="Path of Hurricane Florence (animation period in red)"); # Let's save the bounding box for the subset of points we're animating . We'll use it in our query later on. # In[5]: bbox = list(subset.total_bounds) # ### Get the imagery # # Now we'll get the GOES imagery using the Planteary Computer's STAC API. We'll use the `goes-cmi` collection. We'll also have the API filter down the images to just the "mesoscale" images (GOES takes images with various fields of view). # In[6]: catalog = pystac_client.Client.open( "https://planetarycomputer.microsoft.com/api/stac/v1/", modifier=planetary_computer.sign_inplace, ) search = catalog.search( collections=["goes-cmi"], bbox=bbox, datetime=[start, stop], query={"goes:image-type": {"eq": "MESOSCALE"}}, ) items = sorted(search.item_collection(), key=lambda x: x.datetime) # Let's load and plot the first item, just to make sure we're on the right track. # In[7]: ds = rioxarray.open_rasterio(items[0].assets["C01_2km"].href).load() ds[0].plot.imshow(figsize=(16, 9), cmap="Blues"); # We'll use `odc-stac` to load all the bands and time steps into a single datacube. # In[8]: # Drop assets in different projections # https://github.com/opendatacube/odc-stac/issues/70 bands = {"C01_2km", "C02_2km", "C03_2km"} for item in items: item.assets = {k: v for k, v in item.assets.items() if k in bands} # In[9]: ds = odc.stac.stac_load(items, bands=bands, chunks={}) key_to_common_name = { k: item.assets[k].to_dict()["eo:bands"][0]["common_name"] for k in bands } ds = ds.rename(key_to_common_name) ds # GOES doesn't have a true green band, which we need for our true color animation. We'll simulate it with a linear combination of the other bands (See [Bah et. al (2018)](https://agupubs.onlinelibrary.wiley.com/doi/10.1029/2018EA000379) for more on this technique). # In[10]: green = 0.45 * ds["red"] + 0.1 * ds["nir09"] + 0.45 * ds["blue"] green # Now we'll normalize the data and apply a gamma correction for plotting. # In[11]: γ = 2.2 rgb = ( ds.assign(green=green) .to_array(dim="band") .transpose("time", "band", "y", "x") .sel(band=["red", "green", "blue"]) ) rgb = rgb / rgb.max(dim=["band", "y", "x"]) rgb = (rgb ** (1 / γ)).clip(0, 1) rgb # Finally, we're ready to kick off all the computation we've built up. Because this is a relatively small amount of data, we'll use Dask to process it in parallel on a single machine. See [Scale with Dask](https://planetarycomputer.microsoft.com/docs/quickstarts/scale-with-dask/) for more about Dask. # In[ ]: client = dask.distributed.Client() print(client.dashboard_link) # In[13]: get_ipython().run_line_magic('time', 'rgb = rgb.compute()') # Let's check out the first image. # In[14]: fig, ax = plt.subplots(figsize=(16, 16)) rgb.isel(time=0).plot.imshow(rgb="band", add_labels=False) ax.set_axis_off() # ### Create the animation # # We'll use matplotlib's [FuncAnimation](https://matplotlib.org/stable/api/_as_gen/matplotlib.animation.FuncAnimation.html) to create the animation. # In[15]: fig, ax = plt.subplots(figsize=(16, 16)) fig.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=None, hspace=None) ax.set_axis_off() img = rgb[0].plot.imshow(ax=ax, add_colorbar=False, rgb="band", add_labels=False) label = ax.text( 0.4, 0.03, pd.Timestamp(rgb.time.data[0]).isoformat(), transform=ax.transAxes, color="k", size=20, ) def animate(i): img.set_data(rgb[i].transpose("y", "x", "band")) label.set_text(pd.Timestamp(rgb.time.data[i]).isoformat()) return img, label ani = animation.FuncAnimation(fig, animate, frames=len(rgb), interval=120) ani.save( "goes.mp4", fps=15, extra_args=["-vcodec", "libx264"], savefig_kwargs=dict(pad_inches=0, transparent=True), ) # And we can display it in this notebook using IPython. # In[16]: from IPython.display import Video Video("goes.mp4") # ### Next steps # # Learn more about GOES and using the Planetary Computer # # * [GOES quickstart](../datasets/goes/goes-example.ipynb) # * [Reading from the STAC API](https://planetarycomputer.microsoft.com/docs/quickstarts/reading-stac/) # * [Scale with Dask](https://planetarycomputer.microsoft.com/docs/quickstarts/scale-with-dask/)