#!/usr/bin/env python # coding: utf-8 # # Storm Surge Alerts Feeds # # ## Development Notes # # Notes about development of ATOM/RSS feeds for storm surge alerts. # Started in the process of working out the details of how to provide # a feed to Port Metro Vancouver during the 2015/2016 storm surge season. # Consideration is also given to the possibility of providing a wider collection # of feeds in the future. # Port Metro Vancouver presently consumes [Environment Canada feeds](https://weather.gc.ca/business/index_e.html), # so we use that as our pattern. # # Feeds are generically referred to as RSS, # but there are 2 current standards for them: # [ATOM](https://en.wikipedia.org/wiki/Atom_%28standard%29) # and [RSS-2.0](https://en.wikipedia.org/wiki/RSS). # EC uses ATOM, # and it seems to be somewhat more favourable technically, # so that's what we'll focus on initially. # # The IETF standard for ATOM is [RFC 4287](https://tools.ietf.org/html/rfc4287). # # The [Vancouver weather forecast web page](http://weather.gc.ca/city/pages/bc-74_metric_e.html) # has links to the [weather forecast feed](http://weather.gc.ca/rss/city/bc-74_e.xml), # and the [weather alerts feed](http://weather.gc.ca/rss/warning/bc-74_e.xml). # Using the Firefox "View Page Source" function on the page that loads from either of those # feed links lets you see the structure of the feed content. # # Other useful links: # # * A blog post about [creating ATOM ID elements](http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id) # # * A blog post (linked from the above one) about [using tag URIs as ATOM IDs](http://web.archive.org/web/20110514113830/http://www.taguri.org/) # # * The Python `feedgen` package [documentation](http://lkiesow.github.io/python-feedgen/), # [Github repository](https://github.com/lkiesow/python-feedgen), # and [PyPI page](https://pypi.python.org/pypi/feedgen/). # ### URLs # # The storm surge alerts feeds will be located under http://salishsea.eos.ubc.ca/storm-surge/. # # The initial ATOM feed for Port Metro Vancouver (PMV) will be http://salishsea.eos.ubc.ca/storm-surge/atom/pmv.xml. # # That pattern can be extended to provide other feed like: # * One containing alert messages for the entire model domain # based on Ben's narrative template developed during the Aug-2015 sprint: # http://salishsea.eos.ubc.ca/storm-surge/atom/alerts.xml # * Feeds for specific locations: http://salishsea.eos.ubc.ca/storm-surge/atom/PointAtkinson.xml # * Feeds customized for other stakeholders # # This URL structure also allows for the possibility of generating RSS-2.0 feeds in the future # at locations like: # * http://salishsea.eos.ubc.ca/storm-surge/rss/pmv.xml # * http://salishsea.eos.ubc.ca/storm-surge/rss/alerts.xml # * http://salishsea.eos.ubc.ca/storm-surge/rss/PointAtkinson.xml # * etc. # # Even though the PMV and Point Atkinson feeds are for the same geolocation # and are expressions of the same model results, # they are treated separately so that the PMV feed can be customized with # units, # threshold levels, # etc. # requested by that stakeholder. # ### Generating the PMV Feed # # EC appears to generate a new feed each time the [forecast page](http://weather.gc.ca/city/pages/bc-74_metric_e.html) # is updated # (so, hourly, at least) # rather than appeding new feed entries to an existing feed. # We will adopt the same practice for the PMV feed: # * A new feed will be generated each day after the forecast and forecast2 runs # * In contrast to EC, who always have at least new current conditions to report on, # we will only generate feed entries when the Point Atkinson water level is forecast to # exceed the risk and extreme risk thresholds # # We'll use the [`feedgen` package](http://lkiesow.github.io/python-feedgen/) # to construct the feed and its entries and store them to disk. # In[1]: from pprint import pprint import arrow from feedgen.feed import FeedGenerator # In[2]: fg = FeedGenerator() utcnow = arrow.utcnow() fg.title('Salish Sea NEMO Model Storm Surge Alerts for Port Metro Vancouver') fg.id( 'tag:salishsea.eos.ubc.ca,2015-12-12:/storm-surge/atom/pmv/{utcnow}' .format(utcnow=utcnow.format('YYYYMMDDHHmmss'))) fg.language('en-ca') fg.author(name='Salish Sea MEOPAR Project', uri='http://salishsea.eos.ubc.ca/') fg.rights( 'Copyright {this_year}, Salish Sea MEOPAR Project Contributors and The University of British Columbia' .format(this_year=utcnow.year)) fg.link(href='http://salishsea.eos.ubc.ca/storm-surge/atom/pmv.xml', rel='self', type='application/atom+xml') fg.link(href='http://salishsea.eos.ubc.ca/storm-surge/forecast.html', rel='related', type='text/html') pprint(fg.atom_str(pretty=True).decode('ascii')) # Instead of pretty-printing the ASCII version of the feed, # the production code will save it as binary data in a file with: # ``` # fg.atom_file(os.path.join(path_to_storm_surge, 'atom', 'pmv.xml') # ``` # # The only really interesting bit in the code above is the calculation of the `id` element for the feed. # Quoting [Mark Pilgrim's blog post](http://web.archive.org/web/20110514113830/http://diveintomark.org/archives/2004/05/28/howto-atom-id): # # > There are three requirements for an Atom ID: # > # > 1. The ID must be a valid URI, as defined by RFC 2396. # > 2. The ID must be globally unique, across all Atom feeds, everywhere, for all time. This part is actually easier than it sounds. # > 3. The ID must never, ever change. # # We use the [tag URI](http://web.archive.org/web/20110514113830/http://www.taguri.org/) # technique to create our `id` with the EC tactic of appending the UTC feed create date/time # as a string of digits: # # `tag:salishsea.eos.ubc.ca,2015-12-12:/storm-surge/atom/pmv/20151221020629` # # This tag is composed of: # * Our domain name: `salishsea.eos.ubc.ca` # * A comma # * The date on which this scheme was conceived: `2015-12-12` # * A colon # * The path to the feed file, excluding the `.xml` extension: `/storm-surge/atom/pmv` # * The UTC date/time when the feed was created as a string of digits, prepended with a '/': `/20151221020629` # ### Generating a PMV Feed Entry # # The feed above will be generated after every forecast and forecast2 run # by the `make_feeds` worker between the successful completion of the # `make_site_page forecast publish` worker and the launch of the `push_to_web` worker. # # If the Point Atkinson water level is forecast to exceed the risk and extreme risk thresholds, # the `make_feeds` worker will also add an entry to the feed. # Apart from the calculation of the alert text for the entry, # its creation looks like: # In[3]: fe = fg.add_entry() now = arrow.now() fe.title('Storm Surge Alert for Point Atkinson') fe.id( 'tag:salishsea.eos.ubc.ca,{today}:/storm-surge/atom/pmv/{now}' .format( today=now.format('YYYY-MM-DD'), now=now.format('YYYYMMDDHHmmss'))) fe.author(name='Salish Sea MEOPAR Project', uri='http://salishsea.eos.ubc.ca/') fe.content('', type='html') fe.link(href='salishsea.eos.ubc.ca/nemo/results/forecast/publish_20dec15.html', rel='alternate', type='text/html') pprint(fg.atom_str(pretty=True).decode('ascii')) # Each entry also requires a unique `id`. # Here we use a similar algorithm to the feed `id` except that the date/time is local rather than UTC: # # `tag:salishsea.eos.ubc.ca,2015-12-20:/storm-surge/atom/pmv/20151220180702` # # This tag is composed of: # * Our domain name: `salishsea.eos.ubc.ca` # * A comma # * The date on which the entry was created: `2015-12-20` # * A colon # * The path to the feed file, excluding the `.xml` extension: `/storm-surge/atom/pmv` # * The local date/time when the entry was created as a string of digits, prepended with a '/': `/20151220180702` # ### Calculation of the Alert Text for the Entry `summary` Element # # The `summary` element of the feed entry for a forecast for which there is # a water level alert condition will be something like: # ``` # STORM SURGE ADVISORY! # Extreme sea levels expected for the marine areas of Vancouver # # Synopsis: # Strong winds over the northeast Pacific Ocean are expected # to produce elevated sea levels near Vancouver early Sunday morning. # These elevated sea levels may present a flood risk to # coastal structures and communities at high tide. # # Point Atkinson # Maximum Water Level: 5.37 m above chart datum # Wind: 33.67 km/hr (18.18 knots) from the W (276°) # Time: Dec 13, 2015 07:22 [PST] # # Wind speed and direction are average over the 4 hours preceding # the maximum water level to give information regarding wave setup # that may augment flood risks. # ``` # # Things that need to be calculated for this message: # * The risk level: risk or extreme risk to reflect the words used in the # storm surge forecast page graphic # * Maximum water level above chart datum in metres # * Average wind speed in km/hr and knots over the 4 hours preceding the maximum water level # * Average wind direction as a compass point (e.g. SW) and a bearing in degrees that the wind is coming from # * Date and time of the maximum water level with the appropriate timezone indicated # # The `summary` element can be either plain text for HTML. # We probably need to use the latter to get formatting like above. # That opens the possibility of font faces (bold, italic, ...) # and inclusion of a link to the appropriate forecast page, # although the `rel="alternate"` `link` element may provide that. # # Most of the code that we need to calculate that text is available in the `SalishSeaNowcast` package, # in particular, # in the `nowcast.figures` module. # There is also a Mako template that generates RST in `SalishSeaNowcast/nowcast/www/templates/surgetext.mako`. # In[21]: import datetime import os import netCDF4 as nc import numpy as np from salishsea_tools import ( nc_tools, stormtools, unit_conversions, wind_tools, ) from salishsea_tools.places import PLACES from nowcast import figures # The next cell mounts the `/results` filesystem on `skookum` locally. # It is intended for use if when this notebook is run on a laptop or other # non-Waterhole machine that has `sshfs` installed. # Don't execute the cell if that doesn't describe your situation. # In[3]: get_ipython().system('sshfs skookum:/results results') # #### Calculating the Values to use in the Template # # The `nowcast.figures.plot_threshold_website()` has code # (reproduced below) that calculates: # * The maximum sea surface height in a `grid_T` run results file # * The time at which it occurs # In[8]: site_name = 'Point Atkinson' grid_T_15m = nc.Dataset('results/SalishSea/forecast/20dec15/{}.nc'.format(site_name.replace(' ', ''))) tidal_predictions = 'results/nowcast-sys/tools/SalishSeaNowcast/nowcast/tidal_predictions/' weather_path = 'results/forcing/atmospheric/GEM2.5/operational/fcst' ssh_model, t_model = figures.load_model_ssh(grid_T_15m) ttide = figures.get_tides(site_name, tidal_predictions) ssh_corr = figures.correct_model_ssh(ssh_model, t_model, ttide) residual = figures.compute_residual(ssh_corr, t_model, ttide) max_ssh, _, max_ssh_time, _, max_ssh_wind, i_max_ssh_wind = figures.get_maxes( ssh_corr, t_model, residual, figures.SITES[site_name]['lon'], figures.SITES[site_name]['lat'], weather_path) # During development of the `nowcast.workers.make_feeds()` worker # the above code was refactored to: # In[4]: site_name = 'Point Atkinson' grid_T_15m = nc.Dataset('results/SalishSea/forecast/20dec15/{}.nc'.format(site_name.replace(' ', ''))) tidal_predictions = 'results/nowcast-sys/tools/SalishSeaNowcast/nowcast/tidal_predictions/' ssh_model, t_model = nc_tools.ssh_timeseries_at_point(grid_T_15m, 0, 0, datetimes=True) ttide = figures.get_tides(site_name, tidal_predictions) ssh_corr = figures.correct_model_ssh(ssh_model, t_model, ttide) max_ssh = np.max(ssh_corr) + figures.SITES[site_name]['msl'] max_ssh_time = t_model[np.argmax(ssh_corr)] # From those results we can calculate: # * Maximum water level above chart datum in metres # * Date and time of the maximum water level with the appropriate timezone indicated # In[5]: max_ssh, arrow.get(max_ssh_time).to('local') # Formating the date/time would be easy if it weren't for adding the timezone name: # In[6]: a = arrow.get(max_ssh_time).to('local') '{datetime} [{tzname}]'.format(datetime=a.format('ddd MMM DD, YYYY HH:mm'), tzname=a.tzinfo.tzname(a.datetime)) # We also want to "humanize" the date/time into a phrase like "early Monday afternoon". # There is code in the # [surge_warning notebook](http://nbviewer.ipython.org/urls/bitbucket.org/salishsea/tools/raw/tip/SalishSeaNowcast/nowcast/notebooks/surge_warning.ipynb) # that forms the basis for a function to do that. # In[12]: def humanize_time_of_day(date_time): day_of_week = date_time.format('dddd') if date_time.hour < 12: part_of_day = 'morning' early_late = 'early' if date_time.hour < 8 else 'late' elif date_time.hour >= 12 and date_time.hour < 17: part_of_day = 'afternoon' early_late = 'early' if date_time.hour < 15 else 'late' else: part_of_day = 'evening' early_late = 'early' if date_time.hour < 20 else 'late' return ' '.join((early_late, day_of_week, part_of_day)) # In[13]: humanize_time_of_day(a) # During development of the `nowcast.workers.make_feeds()` worker # a slightly different version of that function that uses the same # time of day descriptions as Environment Canada does in the public # weather forecasts was implemented as # `salishsea_tools.unit_conversions.humanize_time_of_day()`: # In[7]: unit_conversions.humanize_time_of_day(a) # Some code adapted from `nowcast.figures.plot_threshold_map()` # provides the risk level: # In[14]: def storm_surge_risk_level(max_ssh, site_msl, max_historic_ssh, ttide): max_tide_ssh = max(ttide.pred_all) + site_msl extreme_threshold = max_tide_ssh + (max_historic_ssh - max_tide_ssh) / 2 risk_level = ( None if max_ssh < max_tide_ssh else 'extreme risk' if max_ssh > extreme_threshold else 'moderate risk') return risk_level # The above function has been added to the `stormtools` module. # In[8]: risk_level = stormtools.storm_surge_risk_level(site_name, max_ssh, ttide) print(risk_level) # There is code in `nowcast.figures.get_model_winds()` that finds the wind component arrays # at the grid point in the weather forcing dataset at the lat-lon-time point that is closest # to the tide gauge site at the time of the maximum water level: # In[9]: weather_path = 'results/forcing/atmospheric/GEM2.5/operational/fcst' weather = nc.Dataset(os.path.join(weather_path, '{:ops_y%Ym%md%d.nc}'.format(max_ssh_time))) weather_lats = weather.variables['nav_lat'][:] weather_lons = weather.variables['nav_lon'][:] - 360 j, i = figures.find_model_point( figures.SITES[site_name]['lon'], figures.SITES[site_name]['lat'], weather_lons, weather_lats) u_wind = weather.variables['u_wind'][:, j, i] v_wind = weather.variables['v_wind'][:, j, i] # Since we regularly need to get wind component values at the weather grid # points closest to tide gauge stations, # we'll put the grid indices in the `salishsea_tools.places.PLACES` data structure. # In[12]: print(j, i) print(PLACES[site_name]['wind grid ji']) # Having done that we can use the `salishsea_tools.nc_tools.uv_wind_timeseries_at_point()` # function to get the wind component time series: # In[13]: wind = nc_tools.uv_wind_timeseries_at_point(weather, *PLACES[site_name]['wind grid ji']) # `nowcast.figures.get_maxes()` above gave us `i_max_ssh_wind`, the index in the wind component arrays # of the hour in which the maximum water level occurs. # In[18]: i_max_ssh_wind = np.asscalar(i_max_ssh_wind) # As a result of the development refactoring we're no longer using # `nowcast.figures.get_maxes()` to provide `i_max_ssh_wind`. # Instead we'll just grab the code that calculates it: # In[19]: i_max_ssh_wind = np.asscalar( np.where( wind.time == arrow.get( max_ssh_time.year, max_ssh_time.month, max_ssh_time.day, max_ssh_time.hour))[0]) print(i_max_ssh_wind) # We use that to calculate the average wind components in the 4 hours preceding that hour: # In[20]: u_wind_4h_avg = np.mean(u_wind[i_max_ssh_wind-4:i_max_ssh_windax_ssh_wind]) v_wind_4h_avg = np.mean(v_wind[i_max_ssh_wind-4:i_max_ssh_wind]) u_wind_4h_avg, v_wind_4h_avg # In[23]: # Calculating speed and direction arrays from components values or arrays # should probably be a library function in the planned `salishsea_tools.wind_tools` module def wind_speed_dir(u_wind, v_wind): wind_speed = np.sqrt(u_wind**2 + v_wind**2) wind_dir = np.arctan2(v_wind, u_wind) wind_dir = np.rad2deg(wind_dir + (wind_dir < 0) * 2 * np.pi) return wind_speed, wind_dir # In[24]: wind_speed_4h_avg, wind_dir_4h_avg = wind_speed_dir(u_wind_4h_avg, v_wind_4h_avg) wind_speed_4h_avg, wind_dir_4h_avg # In[22]: wind_speed_4h_avg, wind_dir_4h_avg = wind_tools.wind_speed_dir(u_wind_4h_avg, v_wind_4h_avg) wind_speed_4h_avg, wind_dir_4h_avg # Conversion of the wind speed from m/s to km/hr and knots is easily # accomplished with some conversion factors and functions that should be # added to a module in the `SalishSeaTools` package: # In[25]: M_PER_S__KM_PER_HR = 3600 / 1000 M_PER_S__KNOTS = 3600 / 1852 def mps_kph(m_per_s): return m_per_s * M_PER_S__KM_PER_HR def mps_knots(m_per_s): return m_per_s * M_PER_S__KNOTS # Those conversion factors and functions are implemented in the `salishsea_tools.unit_conversions` module. # In[26]: unit_conversions.mps_kph(wind_speed_4h_avg), unit_conversions.mps_knots(wind_speed_4h_avg) # We need a function that converts wind bearings from # the "wind to" orientation that physicists use to the # "wind from" orientation that people are used to: # In[17]: def wind_to_from(wind_to): return 270 - wind_to if wind_to <= 270 else 270 - wind_to + 360 # This function is also now implemented as `salishsea_tools.unit_converstions.wind_to_from()`. # In[27]: unit_conversions.wind_to_from(wind_dir_4h_avg) # We also need a function that converts compass bearings to compass headings: # In[19]: def bearing_heading( bearing, headings=['N', 'NNE', 'NE', 'ENE', 'E', 'ESE', 'SE', 'SSE', 'S', 'SSW', 'SW', 'WSW', 'W', 'WNW', 'NW', 'NNW', 'N'], ): return headings[int(round(bearing * (len(headings) - 1) / 360, 0))] # This function is available as `salishsea_tools.unit_conversions.bearing_heading()`: # In[29]: unit_conversions.bearing_heading(unit_conversions.wind_to_from(wind_dir_4h_avg)) # #### Rendering the Template for the `summary` Element # # We'll start with a reStructuredText template based on `SalishSeaNowcast/nowcast/www/templates/surgetext.mako`: # In[21]: import docutils.core import IPython.display import mako.template # In[25]: template = """ **STORM SURGE ADVISORY** ${'Extreme' if 'extreme' in conditions[tide_gauge_stn]['risk_level'] else 'Elevated'} sea levels expected for the marine areas of ${city} **Synopsis**: Strong winds over the northeast Pacific Ocean are expected to produce elevated sea levels near ${city} ${conditions[tide_gauge_stn]['humanized_max_ssh_time']}. These elevated sea levels may present a flood risk to coastal structures and communities at high tide. ${tide_gauge_stn_info(tide_gauge_stn, conditions)} Wind speed and direction are averages over the 4 hours preceding the maximum water level to give information regarding wave setup that may augment flood risks. <%def name="tide_gauge_stn_info(stn, conditions)"> **${stn}** **Risk Level:** ${conditions[stn]['risk_level'].title()} **Maximum Water Level:** ${round(conditions[stn]['max_ssh_msl'], 1)} m above chart datum **Wind:** ${int(round(conditions[stn]['wind_speed_4h_avg_kph'], 0))} km/hr (${int(round(conditions[stn]['wind_speed_4h_avg_knots'], 0))} knots) from the ${conditions[stn]['wind_dir_4h_avg_heading']} (${int(round(conditions[stn]['wind_dir_4h_avg_bearing'], 0))}°) **Time:** ${conditions[stn]['max_ssh_time'].format('ddd MMM DD, YYYY HH:mm')} [${conditions[stn]['max_ssh_time_tzname']}] """ # In[26]: max_ssh_time_local = arrow.get(max_ssh_time).to('local') values = { 'city': 'Vancouver', 'tide_gauge_stn': 'Point Atkinson', 'conditions': { 'Point Atkinson': { 'risk_level': risk_level, 'max_ssh_msl': max_ssh_msl, 'wind_speed_4h_avg_kph': mps_kph(wind_speed_4h_avg), 'wind_speed_4h_avg_knots': mps_knots(wind_speed_4h_avg), 'wind_dir_4h_avg_heading': bearing_heading(wind_to_from(wind_dir_4h_avg)), 'wind_dir_4h_avg_bearing': wind_to_from(wind_dir_4h_avg), 'max_ssh_time': max_ssh_time_local, 'max_ssh_time_tzname': max_ssh_time_local.tzinfo.tzname(max_ssh_time_local.datetime), 'humanized_max_ssh_time': humanize_time_of_day(max_ssh_time_local), }, }, } rendered_rst = mako.template.Template(template).render(**values) print(rendered_rst) # Rendering the RST to HTML: # In[27]: parts = docutils.core.publish_parts(rendered_rst, writer_name='html') IPython.display.HTML(parts['body']) # Finally, we have everything we need to generate the feed and an entry if the model # is predicting a storm surge risk: # In[36]: fg = FeedGenerator() utcnow = arrow.utcnow() fg.title('Salish Sea NEMO Model Storm Surge Alerts for Port Metro Vancouver') fg.id( 'tag:salishsea.eos.ubc.ca,2015-12-12:/storm-surge/atom/pmv/{utcnow}' .format(utcnow=utcnow.format('YYYYMMDDHHmmss'))) fg.language('en-ca') fg.author(name='Salish Sea MEOPAR Project', uri='http://salishsea.eos.ubc.ca/') fg.rights( 'Copyright {this_year}, Salish Sea MEOPAR Project Contributors and The University of British Columbia' .format(this_year=utcnow.year)) fg.link(href='http://salishsea.eos.ubc.ca/storm-surge/atom/pmv.xml', rel='self', type='application/atom+xml') fg.link(href='http://salishsea.eos.ubc.ca/storm-surge/forecast.html', rel='related', type='text/html') if risk_level is not None: rendered_rst = mako.template.Template(template).render(**values) html = docutils.core.publish_parts(rendered_rst, writer_name='html') now = arrow.now() fe = fg.add_entry() fe.title('Storm Surge Alert for Point Atkinson') fe.id( 'tag:salishsea.eos.ubc.ca,{today}:/storm-surge/atom/pmv/{now}' .format( today=now.format('YYYY-MM-DD'), now=now.format('YYYYMMDDHHmmss'))) fe.author(name='Salish Sea MEOPAR Project', uri='http://salishsea.eos.ubc.ca/') fe.content(html['body'], type='html') fe.link(href='http://salishsea.eos.ubc.ca/nemo/results/forecast/publish_20dec15.html', rel='alternate', type='text/html') pprint(fg.atom_str(pretty=True).decode('utf8')) # To store the feed in a file instead of rendering it to a string use: # ``` # fg.atom_file('pmv.xml') # ``` # # Now the code in this notebook will be used to create a `nowcast.worker` module that # will generate feeds and store them in the appropriate locations after every # forecast and forecast2 run.