#!/usr/bin/env python # coding: utf-8 # # The search for dark vessels: AIS / ship detection fusion workflow # # All ships above a certain size are by law required to use an AIS (Automated Identification System) transponder and # are therefore trackable at all times wherever they go. A ship without such a signal is called a "dark vessel". While # there are not always sinister reasons for this behaviour this quite often indicates that something "fishy" is going # on. Not surprisingly there are many organisations which are interested in this kind of information. # # In the following we show how dark vessels can be identified by combining a Machine Learning-based algorithm working on # satellite images (provided by Airbus, see [ship detection block on UP42 marketplace]( https://marketplace.up42.com/block/79e3e48c-d65f-4528-a6d4-e8d20fecc93c) for more details) with AIS signals (provided by [ExactEarth on UP42](https://marketplace.up42.com/block/54217695-73f4-4528-a575-a429e9af6568)). # # The workflow consists of the following steps: # # - Get SPOT imagery for the given AOI # - Execute format conversion, CRS conversion, tiling, ship detection and ship identification blocks via parallel jobs # - Visualize the results # # To run the example costs around 8,000 UP42 credits. *Disclaimer* In a real-world scenario we would only use SPOT Streaming data as input for the shop detection and identification. In the context of this analysis we are using SPOT Download data so we can download it (which the Streaming data license does not allow) and produce attractive visualisations. Using streming data would result in a much lower number of credits. # ## Setup # # Import required libraries # In[258]: import up42 import geopandas as gpd import rasterio from rasterio.plot import show import matplotlib.pyplot as plt from shapely.geometry import box import geojson # Configure areas of interest # In[259]: aoi_sby = {"type":"FeatureCollection","features":[{"type":"Feature","properties":{}, "geometry":{"type":"Polygon","coordinates":[[[112.713691,-7.183133], [112.73185,-7.183133], [112.73185,-7.172643], [112.713691,-7.172643], [112.713691,-7.183133]]]}}]} aoi_gib = {"type":"FeatureCollection","features":[{"type":"Feature","properties":{}, "geometry":{"type":"Polygon","coordinates":[[[-5.370973,36.115663], [-5.348018,36.115663], [-5.355756,36.141326], [-5.3598,36.147976], [-5.370531,36.148606], [-5.370973,36.115663]]]}}]} aois = [{'title': 'SBY', 'geometry': aoi_sby}, {'title': 'GIB', 'geometry': aoi_gib}] # Authenticate with UP42 # In[260]: #up42.authenticate(project_id="123", project_api_key="456") up42.authenticate(cfg_file="config.json") up42.settings(log=False) # # Catalog Search # # Search cloudfree SPOT images for the two aois and visualise the quicklooks. # In[261]: catalog = up42.initialize_catalog() for aoi in aois: print("\n---------" + aoi["title"] + "---------\n") search_paramaters = catalog.construct_parameters(geometry=aoi['geometry'], start_date="2019-01-01", end_date="2020-12-31", sensors=["spot"], max_cloudcover=5, sortby="acquisitionDate", ascending=False, limit=3) search_results = catalog.search(search_paramaters) # Download & Visualise quicklooks catalog.download_quicklooks(image_ids=search_results.id.to_list(), sensor="spot") display(search_results.head()) catalog.plot_quicklooks(figsize=(18,5), titles=search_results.scene_id.to_list()) # Select least cloud scene for further workflow aoi["scene_id"] = search_results.scene_id.to_list()[0] # In[213]: # Optional: Select ideal scenes manually aois[0]["scene_id"] = "DS_SPOT6_202009290220156_FR1_FR1_FR1_FR1_E113S07_01627" aois[1]["scene_id"] = "DS_SPOT7_202007101047552_FR1_FR1_FR1_FR1_W005N36_03414" # # Run ship identification workflow on selected images # In[214]: up42.settings(log=True) project = up42.initialize_project() # Increase the parallel job limit for the project. #project.update_project_settings(max_concurrent_jobs=10) # Create or update a workflow for the ship identification # In[238]: workflow = project.create_workflow("Ship identification download", use_existing=True) # Add or update workflows tasks # In[240]: input_tasks= ['oneatlas-spot-display', 'data-conversion-dimap', 'crs-conversion', 'tiling', 'ship-detection', 'ship-identification'] workflow.add_workflow_tasks(input_tasks=input_tasks) workflow # ## Run jobs in parallel # # Construct workflow input parameters & run jobs # In[242]: input_parameters_list = [] for aoi in aois: input_parameters = workflow.construct_parameters(geometry=aoi['geometry'], geometry_operation="bbox", scene_ids=[aoi["scene_id"]]) input_parameters['crs-conversion:1']['output_epsg_code'] = 3857 input_parameters_list.append(input_parameters) input_parameters_list # In[243]: jobs = workflow.run_jobs_parallel(input_parameters_list=input_parameters_list) # # Download & Visualise results # In[244]: jobtask = job.get_jobtasks() # In[245]: data_results_paths, detection_results, identification_results = [], [], [] for job in jobs: _, _, data_task, _, detection_task, identification_task = job.get_jobtasks() data_paths = data_task.download_results() data_results_paths.append([p for p in data_paths if p.endswith(".tif")]) detection_results.append(detection_task.get_results_json()) identification_paths = identification_task.download_results() geojson_path = [p for p in identification_paths if p.endswith(".geojson")][0] with open(geojson_path) as f: gj = geojson.load(f) identification_results.append(gj) # In[251]: for i, (paths, detection, identification) in enumerate(zip(data_results_paths, detection_results, identification_results)): with rasterio.open(paths[0]) as src: fig, ax = plt.subplots(figsize=(13, 13)) ships = gpd.GeoDataFrame.from_features(detection, crs="epsg:4326") ships = ships.to_crs(epsg=3857) ships_ais = gpd.GeoDataFrame.from_features(identification, crs="urn:ogc:def:crs:OGC:1.3:CRS84") ships_ais = ships_ais.to_crs(epsg=3857) # Exclude ships without a vessel name ships_ais = ships_ais[ships_ais["vessel_name"].notnull()] ships.plot(ax=ax, facecolor=(0,0,0,0), edgecolor='red', linewidth=2) ships_ais.plot(ax=ax, facecolor=(0,0,0,0), edgecolor='orange', linewidth=2) show(src.read((1, 2, 3)), transform=src.transform, ax=ax, title= f"{aois[i]['title']}: {ships.shape[0]} ships detected, {len(ships_ais.index)} identified") plt.show() # # Summary # # In the above visualisations all ships identified using AIS are shown in orange, while those unidentified in red. This # does not necessarily indicate that those red shops are doing anything illegal; they simply might be too small or the AIS signal could not be found because the time window used for searching is too small. The point is that the red ships are candidate dark vessels - any real-world analysis would need to dive deeper. The analyses are only there to showcase how such an analysis works and we are using the two ports for just that - we don't want to imply at all that someting something fishy is going on there. # In[ ]: