#!/usr/bin/env python # coding: utf-8 # ## Workflow # # (for v0.3.0) # https://github.com/ASFHyP3/hyp3-isce2/releases/tag/v0.3.0 # # https://github.com/ASFHyP3/hyp3-isce2/tree/v0.3.0 # # # 1. Find a burst to process # 2. Search ASF for scenes # 3. Select dates to process # 4. Create processing subfolders to execute burst processing in # In[1]: import geopandas as gpd import asf_search as asf import requests import os # not sure why DEBUG statements is happening in other libraries if imported after hyp3_isce2... import logging import hyp3_isce2.burst as hb # https://github.com/ASFHyP3/hyp3-isce2/issues/53 ? rootlogs = logging.getLogger() rootlogs.setLevel('WARNING') import numpy as np import xmlschema import lxml import re # In[2]: import hyp3_isce2 hyp3_isce2.__version__ # syntax changing rapidly, so stick with 0.3.0 # In[3]: # https://geojson.io/#new&map=15.23/47.654452/-122.303174 gfa = gpd.GeoDataFrame.from_features( { "type": "FeatureCollection", "features": [ { "type": "Feature", "properties": {}, "geometry": { "coordinates": [ -122.29994319751066, 47.657472535245574 ], "type": "Point" } } ] }, crs=4326 ) gfa.explore() # ## Find a single burst # # 1. You need to download the burst database # # https://sar-mpc.eu/test-data-sets/ # # Sentinel-1 Burst ID Map, version 20220530, generated by the SAR-MPC service, available on Sentinel-1 MPC Test data sets # In[4]: # For now consider selecting a burst covering a point gf = gpd.read_file('S1_burstid_20220530/IW/sqlite/burst_map_IW_000001_375887.sqlite3', mask=gfa) # In[5]: gf.head() # In[6]: #gf.explore() # In[7]: # Ascending test burstId = '137_IW2_292399' relorb = int(burstId.split('_')[0]) subswath = burstId.split('_')[1] idnum = int(burstId.split('_')[2]) ind = (gf.relative_orbit_number == relorb) & (gf.subswath_name == subswath) & (gf.burst_id == idnum) myburst = gf[ind] myburst # ## Search ASF Archive # In[8]: results = asf.geo_search(platform=[asf.PLATFORM.SENTINEL1], processingLevel=asf.SLC, beamMode=asf.BEAMMODE.IW, relativeOrbit=relorb, intersectsWith=str(myburst.geometry.values[0]), ) gf = gpd.GeoDataFrame.from_features(results.geojson(), crs=4326) len(gf) # In[9]: # Get a summer scene from each year gf = gf.set_index(gpd.pd.to_datetime(gf.startTime)) gf.head(2) # In[10]: # 1st acquisition of every month #gf.groupby([gf.index.year, gf.index.month]).first() # 1st acquisition of given month every year subset = gf[gf.index.month == 5] subset = subset.groupby(subset.index.year).first() # In[11]: subset # In[12]: subset.reset_index(drop=True).set_crs(4326).explore(column='startTime', style_kwds=dict(fillOpacity=.3)) # In[13]: # Get IPF version for each of these def get_ipf(sceneName): sat = sceneName[0]+sceneName[2] url = f'https://datapool.asf.alaska.edu/METADATA_SLC/{sat}/{sceneName}.iso.xml' #print(url) r = requests.get(url) IPF = re.search(r'\(version(.*?)\)',r.text).group(1).strip() return IPF # In[14]: subset['IPF'] = subset.sceneName.apply(get_ipf) # In[15]: subset.IPF # In[16]: # Only IPF>=3.4 has burstnumbers in the metadata, others require reverse-lookup based on TANX def get_metadata_xml(session, params, outfile=None): root = hb.download_metadata(session, params, outfile) return root def get_ipf(root): ''' for consolidated XML metadata get version ''' ipfnode = root.find('.//safe:software', {'safe':'http://www.esa.int/safe/sentinel-1.0'}) return ipfnode.attrib['version'] def get_burstnumber(session, row, myburst, product_schema='./support/s1-level-1-product.xsd'): ''' given an ASF frame, determine relative burst number corresponding to standard esa burstid session = asf session from hyp3-isce2 row = geodataframe row of asf_search results myburst = geoseries for burst of interest Note: requires support/s1-level-1-product.xsd XML schema from SLC SAFE for parsing metadata ''' params = hb.BurstParams(row.sceneName, myburst.subswath_name, row.polarization[:2], 1) # Get All XML metadata for SLC root = get_metadata_xml(session, params) IPF = get_ipf(root) # Extract correct section of xml for product in root.findall('.//product'): prod_pol = product.find('polarisation').text prod_swath = product.find('swath').text if (prod_pol == params.polarization) and (prod_swath == params.swath): node = product.find('content') node.tag = 'product' string = lxml.etree.tostring(node, encoding='unicode') # Convert to python dictionary xs = xmlschema.XMLSchema(product_schema) parsed = xs.to_dict(string, validation='lax')[0] if IPF >= '003.4': burstid = [t.get('burstId').get('$') for t in parsed['swathTiming']['burstList']['burst']] burstnum = burstid.index(myburst.burst_id) else: tanx = np.array([t['azimuthAnxTime'] for t in parsed['swathTiming']['burstList']['burst']]) burstnum = np.argmin(np.abs(tanx - myburst.time_from_anx_sec)) return burstnum # In[17]: myburst # '137_IW2_292399' # In[18]: asf_session = hb.get_asf_session() # In[19]: # For small number of granules just iterate over pandas dataframe (slow but works) Bursts = [] for i,row in subset.iterrows(): num = get_burstnumber(asf_session, row, myburst.iloc[0]) Bursts.append(num) # In[20]: subset['burst'] = Bursts # In[21]: subset['date'] = gpd.pd.to_datetime(subset.startTime).dt.strftime('%Y%m%d') # In[22]: with gpd.pd.option_context('display.max_colwidth', None): display(subset.loc[:,['date','sceneName','IPF','burst']]) # In[ ]: os.mkdir('ascending') for i in range(len(subset)-1): ref = subset.iloc[i] sec = subset.iloc[i+1] os.mkdir(f'ascending/{ref.date}_{sec.date}') cmd = f'''python -m hyp3_isce2 ++process insar_tops_burst --reference-scene {ref.sceneName} --secondary-scene {sec.sceneName} \ --swath-number 2 --polarization VV --reference-burst-number {ref.burst} --secondary-burst-number {sec.burst} --azimuth-looks 4 --range-looks 20\n''' with open(f'ascending/{ref.date}_{sec.date}/cmd.txt', 'w') as f: f.write(cmd) # In[ ]: