#!/usr/bin/env python # coding: utf-8 # # Image overlay # # This notebook shows how you can overlay an image on a Leaflet map, and use a slider to walk through a time series. This is similar to the Video overlay example, but offers more control as you can do what you like with the slider, including going backwards. # # The following libraries are needed: # * `tqdm` # * `rasterio` # * `matplotlib` # * `pandas` # * `ipyleaflet` # # The recommended way is to `conda install -c conda-forge` them. # In[ ]: import ftplib import os from tqdm import tqdm import rasterio from rasterio.warp import reproject, Resampling from affine import Affine import matplotlib.pyplot as plt import numpy as np import pandas as pd import subprocess import datetime import asyncio from time import time from base64 import b64encode from ipyleaflet import Map, ImageOverlay, basemaps from ipywidgets import VBox, Checkbox, SelectionRangeSlider try: from StringIO import StringIO py3 = False except ImportError: from io import StringIO, BytesIO py3 = True # Here we download some satellite rainfall estimates ([NASA's Global Precipitation Measurement](https://www.nasa.gov/mission_pages/GPM/main/index.html)). They come in the WGS84 projection and cover the entire globe between latitudes 60N and 60S. They have a spatial resolution of 0.1° and a temporal resolution of 30 minutes. We first download each individual files for the day 2017/01/01 (48 files). # In[ ]: os.makedirs("tif", exist_ok=True) ftp = ftplib.FTP("arthurhou.pps.eosdis.nasa.gov") passwd = "ebwje/cspdibsuAhnbjm/dpn" login = "" for c in passwd: login += chr(ord(c) - 1) ftp.login(login, login) ftp.cwd("gpmdata/2017/01/01/gis") lst = [ i for i in ftp.nlst() if i.startswith("3B-HHR-GIS.MS.MRG.3IMERG.") and i.endswith(".V06B.tif") ] for filename in tqdm(lst): if not os.path.exists("tif/" + filename): ftp.retrbinary("RETR " + filename, open("tif/" + filename, "wb").write) ftp.quit() # When we convert them into images, we will need to apply a color map to the data. In order to scale this color map, we need to extract the maximum value present in the whole data set. Since the maximum value will appear very rarely, the visual rendering will be better if we saturate the images. # In[ ]: image_vmax = 0 for f in os.listdir("tif"): dataset = rasterio.open("tif/" + f) data = dataset.read()[0][300:1500] data = np.where(data == 9999, np.nan, data) / 10 # in mm/h image_vmax = max(image_vmax, np.nanmax(data)) image_vmax *= 0.1 # saturate images # We reproject our data (originally in WGS84, also known as EPSG:4326) to Web Mercator (also known as EPSG:3857), which is the projection used by Leaflet. After applying a color map (here `plt.cm.jet`), we save each image to a PNG file. # In[ ]: # At this point if GDAL complains about not being able to open EPSG support file gcs.csv, try in the terminal: # export GDAL_DATA=`gdal-config --datadir` def to_web_mercator(data): with rasterio.Env(): rows, cols = 1200, 3600 src_transform = Affine(0.1, 0, -180, 0, -0.1, 60) src_crs = {"init": "EPSG:4326"} source = ( np.where((data == 9999) | (~np.isfinite(data)), 0, data) / 10 ) # in mm/h dst_crs = {"init": "EPSG:3857"} bounds = [-180, -60, 180, 60] dst_transform, width, height = rasterio.warp.calculate_default_transform( src_crs, dst_crs, cols, rows, *bounds ) dst_shape = height, width destination = np.zeros(dst_shape) reproject( source, destination, src_transform=src_transform, src_crs=src_crs, dst_transform=dst_transform, dst_crs=dst_crs, resampling=Resampling.nearest, ) return destination def to_png(data, vmax, fname): fig, ax = plt.subplots(1, figsize=(36, 12)) fig.subplots_adjust(left=0, right=1, bottom=0, top=1) ax.imshow(data, vmin=0, vmax=vmax, interpolation="nearest", cmap=plt.cm.jet) ax.axis("tight") ax.axis("off") plt.savefig(fname) plt.close() os.makedirs("png", exist_ok=True) for fname in tqdm(os.listdir("tif")): png_name = fname[:-3] + "png" if not os.path.exists("png/" + png_name): dataset = rasterio.open("tif/" + fname) data = dataset.read()[0][300:1500] data_web = to_web_mercator(data) to_png(data_web, image_vmax, "png/" + png_name) # Then we create a map, a slider and a check box. The check box allows to walk through the images as snapshots (the left and right boundaries of the slider are tied together). When unchecked, you can move the left and right boundaries as you want and the mean precipitation in that range will be displayed. # In[ ]: center = [0, 0] zoom = 2 m = Map( center=center, zoom=zoom, interpolation="nearest", basemap=basemaps.CartoDB.DarkMatter, ) bounds = [(-60, -180), (60, 180)] start_date = datetime.datetime(2017, 1, 1) end_date = datetime.datetime(2017, 1, 2) dates = pd.date_range(start_date, end_date, freq="30min") options = [(date.strftime(" %d:%H:%M "), date) for date in dates] index = (0, 1) date_slider = SelectionRangeSlider( options=options, index=index, description="Time", orientation="horizontal", layout={"width": "900px"}, ) snapshot = Checkbox(value=True, description="Snapshot", disabled=False) # The computing of the mean precipitation can take some time, so we don't want to trigger it every time the slider moves. The following code implements a debouncer which ensures that a new computation is triggered only when the user stops moving the slider. There is also a throttler which ensures that snapshot images are not displayed too often, which gives a smoother animation. # In[ ]: class Timer: def __init__(self, timeout, callback): self._timeout = timeout self._callback = callback self._task = asyncio.ensure_future(self._job()) async def _job(self): await asyncio.sleep(self._timeout) self._callback() def cancel(self): self._task.cancel() def debounce(wait): """Decorator that will postpone a function's execution until after `wait` seconds have elapsed since the last time it was invoked.""" def decorator(fn): timer = None def debounced(*args, **kwargs): nonlocal timer def call_it(): fn(*args, **kwargs) if timer is not None: timer.cancel() timer = Timer(wait, call_it) return debounced return decorator def throttle(wait): """Decorator that prevents a function from being called more than once every wait period.""" def decorator(fn): time_of_last_call = 0 scheduled = False new_args, new_kwargs = None, None def throttled(*args, **kwargs): nonlocal new_args, new_kwargs, time_of_last_call, scheduled def call_it(): nonlocal new_args, new_kwargs, time_of_last_call, scheduled time_of_last_call = time() fn(*new_args, **new_kwargs) scheduled = False time_since_last_call = time() - time_of_last_call new_args = args new_kwargs = kwargs if not scheduled: new_wait = max(0, wait - time_since_last_call) Timer(new_wait, call_it) scheduled = True return throttled return decorator # The following is the logic for displaying a new image when the slider moves. The images are sent to the browser by embedding the data into the URL. # In[ ]: snapshot_fname = None io = None def get_tif_name(date): hour = str(date.hour).zfill(2) minu1 = str(date.minute).zfill(2) minu2 = str(date.minute + 29).zfill(2) minu3 = str(date.hour * 60 + date.minute).zfill(4) fname = ( "3B-HHR-GIS.MS.MRG.3IMERG.20170101-S" + hour + minu1 + "00-E" + hour + minu2 + "59." + minu3 + ".V06B.tif" ) return fname def display_png(fname): global io with open(fname, "rb") as f: data = b64encode(f.read()) if py3: data = data.decode("ascii") imgurl = "data:image/png;base64," + data if io is None: io = ImageOverlay(url=imgurl, bounds=bounds, opacity=0.4) m.add_layer(io) else: io.url = imgurl @debounce(0.2) def show_mean(): mean_data = None mean_vmax = 0 nb = 0 date = date_slider.value[0] while date < date_slider.value[1]: nb += 1 fname = get_tif_name(date) dataset = rasterio.open("tif/" + fname) data = dataset.read()[0][300:1500] data = np.where(data == 9999, np.nan, data) / 10 # in mm/h mean_vmax = max(mean_vmax, np.nanmax(data)) if mean_data is None: mean_data = data else: mean_data = mean_data + data date += datetime.timedelta(minutes=30) mean_data = mean_data / nb mean_vmax *= 0.1 / nb # saturate image data_web = to_web_mercator(mean_data) to_png(data_web, mean_vmax, "png/mean.png") display_png("png/mean.png") @throttle(0.1) def show_snapshot(change): global snapshot_fname if change["new"][0] == end_date: date_slider.value = [end_date - datetime.timedelta(minutes=30), end_date] elif change["new"][1] == start_date: date_slider.value = [start_date, start_date + datetime.timedelta(minutes=30)] elif change["new"][0] != change["old"][0]: # left has changed date_slider.value = [ change["new"][0], change["new"][0] + datetime.timedelta(minutes=30), ] else: # right has changed date_slider.value = [ change["new"][1] - datetime.timedelta(minutes=30), change["new"][1], ] new_fname = get_tif_name(date_slider.value[0])[:-3] + "png" if new_fname != snapshot_fname: display_png("png/" + new_fname) snapshot_fname = new_fname def date_changed(change): if snapshot.value: show_snapshot(change) else: show_mean() date_slider.observe(date_changed, "value") # In[ ]: VBox([date_slider, snapshot, m])