#!/usr/bin/env python # coding: utf-8 # Dependencies | Version # --- | --- # SatPy | 0.9.0 # PyResample | 1.10.1 # Trollimage | 1.5.3 # PyKdtree | 1.3.1 # # # # Quickstart with EARS-NWC #

In December 2012 EUMETSAT extended the EARS services to include a new service with the purpose of supporting European users with cloud information # from polar orbiting satellites in near real time.

#

This EARS-NWC service provides the parameters Cloud Mask, Cloud Type and # Cloud Top Temperature and Height (CTTH) as derived using the Polar Platform # System (PPS) software package from the NWC SAF. The products are derived # from AVHRR data received at the EARS core stations with a reception coverage including Europe and the North Atlantic. Products are disseminated on EUMETCast (EUMETSAT data channel 1) with a timeliness better than 30 # minutes, and available in netCDF4 format. The geolocation information is available on a tie-point grid and stored in each product. #

At the moment the satellites contributing to the service are Metop-B and NOAA-19.

# ## Loading and concatenating the cloud type granules # In[1]: from satpy import Scene from satpy import find_files_and_readers from datetime import datetime DATA_DIR = "/home/a000680/laptop/Nordisk/EARS_NWC/data/case_20180314/ears" myfiles = find_files_and_readers(base_dir=DATA_DIR, start_time=datetime(2018, 3, 14, 10, 5), end_time=datetime(2018, 3, 14, 10, 11), reader='nc_nwcsaf_pps') scene = Scene(filenames=myfiles) scene.load(['ct']) # Now we have loaded and concatenated the ct field of the Cloudtype product granules in the time window given by the start amd end times above # In[2]: print(scene['ct'].data.shape) print(scene['ct'].data.compute()) #

Also the geolocation has been unpacked. That is the the full resolution # geolocation information has been recreated from the tie point grid by # interpolating and extrapolating the longitudes and latitudes on the tie point # grid. This is accomplished using the python-geotiepoints tool, but this is # transparent to the user:

# In[3]: print(scene['ct'].area.lats.shape) # Now let’s visualise the cloudtype data using the Nowcasting SAF palette read from the file: # In[4]: scene.load(['cloudtype']) scene.show('cloudtype') # # # Load all CTTH granules, concatenate, reproject, generate composite image, and save to disk # In[5]: from glob import glob import os.path # In[6]: DATA_DIR = "/home/a000680/laptop/Nordisk/EARS_NWC/data/case_20180314/ears" # ## Make a list of all CTTH files in the directory, both bzipped and unpacked ones: # In[7]: myfiles = glob(os.path.join(DATA_DIR, "*CTTH_C*.nc.bz2")) myfiles = myfiles + glob(os.path.join(DATA_DIR, "*CTTH_C*.nc")) # ## Generate the scene object # In[8]: scene = Scene(filenames=myfiles, reader='nc_nwcsaf_pps') # ## Load the cloud height composite image # In[9]: scene.load(['cloud_top_height']) # ## Resample the data to the pre-defined "euron1" area # In[10]: lscn = scene.resample('euron1', radius_of_influence=5000) # In[11]: lscn.show('cloud_top_height') # # ## Save the image to file # In[11]: dt_start = lscn.attrs['start_time'].strftime('%Y%m%dT%H%MZ') dt_end = lscn.attrs['end_time'].strftime('%Y%m%dT%H%MZ') lscn.save_dataset( 'cloud_top_height', filename='./ears_nwc_ctth_{starttime}_{endtime}.png'.format(starttime=dt_start, endtime=dt_end))