import numpy as np
import xarray as xr
import pandas as pd
from salishsea_tools import viz_tools, places, visualisations
from matplotlib import pyplot as plt, dates
from datetime import datetime, timedelta
from calendar import month_name
from scipy.io import loadmat
from tqdm.notebook import tqdm
from salishsea_tools import nc_tools
from dask.diagnostics import ProgressBar
import cmocean
%matplotlib inline
plt.rcParams.update({'font.size': 12, 'axes.titlesize': 'medium'})
## Data for original cold and warm years
monthly_array_temp_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['votemper']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
### 2008 original temp
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
### 2019 original
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
monthly_array_temp_slice[monthly_array_temp_slice == 0 ] = np.nan
monthly_array_temp_slicemean = \
np.nanmean(np.nanmean(monthly_array_temp_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_temp_slicemean))
(14, 12)
/tmp/ipykernel_2066066/391916811.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_temp_slice, axis = 2),axis = 2)
## Data for Experiments 1 and 2
monthly_array_temp_exp_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['votemper']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
###
## Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
monthly_array_temp_exp_slice[monthly_array_temp_exp_slice == 0 ] = np.nan
monthly_array_temp_exp_slicemean = \
np.nanmean(np.nanmean(monthly_array_temp_exp_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_temp_exp_slicemean))
(14, 12)
/tmp/ipykernel_2066066/2338418733.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_temp_exp_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_temp_slicemean[12,:],color='r',linestyle='-',label='Original WY')
ax.plot(xticks, monthly_array_temp_exp_slicemean[12,:],color='k',linestyle='-.',label='WY with CY winds')
ax.set_title('WY SST with CY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,25)
ax.set_ylabel('Degrees C')
Text(0, 0.5, 'Degrees C')
monthly_array_temp_exp_slicemean[12,:]
array([ 6.94768824, 5.03149186, 6.92260036, 10.4741395 , 13.69086769, 17.51762955, 18.32682018, 18.80116005, 16.19822578, 10.88716039, 8.96325978, 6.76106302])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_temp_slicemean[1,:],color='b',linestyle='-',label='Original CY')
ax.plot(xticks, monthly_array_temp_exp_slicemean[1,:],color='k',linestyle='-.',label='CY with WY winds')
ax.set_title('CY SST with WY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,25)
ax.set_ylabel('Degrees C')
Text(0, 0.5, 'Degrees C')
monthly_array_temp_exp_slicemean[1,:]
array([ 5.59662968, 5.90259706, 7.60633249, 9.04497093, 13.09915475, 16.51518926, 20.73906041, 19.60519318, 16.00876067, 11.95839157, 9.37915939, 6.13304258])
## PAR data for original years
monthly_array_PAR_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['PAR']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
### 2008 original temp
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
### 2019 original
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
monthly_array_PAR_slice[monthly_array_PAR_slice == 0 ] = np.nan
monthly_array_PAR_slicemean = \
np.nanmean(np.nanmean(monthly_array_PAR_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_PAR_slicemean))
(14, 12)
/tmp/ipykernel_2066066/2771304440.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_PAR_slice, axis = 2),axis = 2)
# PAR data for experiments 1 and 2
monthly_array_PAR_exp_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['PAR']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
###
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
monthly_array_PAR_exp_slice[monthly_array_PAR_exp_slice == 0 ] = np.nan
monthly_array_PAR_exp_slicemean = \
np.nanmean(np.nanmean(monthly_array_PAR_exp_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_PAR_exp_slicemean))
(14, 12)
/tmp/ipykernel_2066066/178454329.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_PAR_exp_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_PAR_slicemean[12,:],color='r',linestyle='-',label='Original WY')
ax.plot(xticks, monthly_array_PAR_exp_slicemean[12,:],color='k',linestyle='-.',label='WY with CY winds')
ax.set_title('WY PAR with CY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,150)
ax.set_ylabel('m$^{-2}$')
Text(0, 0.5, 'm$^{-2}$')
monthly_array_PAR_exp_slicemean[12,:]
array([ 15.33108848, 25.43487056, 53.40414242, 62.65396427, 90.50300235, 103.02960905, 93.25852068, 79.68039158, 51.27869687, 27.85621855, 20.32929689, 8.03467567])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_PAR_slicemean[1,:],color='b',linestyle='-',label='Original CY')
ax.plot(xticks, monthly_array_PAR_exp_slicemean[1,:],color='k',linestyle='-.',label='CY with WY winds')
ax.set_title('CY PAR with WY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,150)
ax.set_ylabel('m$^{-2}$')
Text(0, 0.5, 'm$^{-2}$')
monthly_array_PAR_exp_slicemean[1,:]
array([ 14.22929443, 25.46042814, 40.97263571, 59.96603881, 84.80457427, 94.07934195, 101.42659579, 80.66420632, 60.70089652, 29.308822 , 13.13657237, 10.14639563])
# Halocline Strength data for original years
monthly_array_halocline_depth_orig_SSslice = np.zeros([14,12,50,50])
monthly_array_halocline_strength_orig_SSslice = np.zeros([14,12,50,50])
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask,depth = [mask[var].isel(**slc).values for var in ('e3t_0', 'tmask','gdept_0')]
years, variables = range(2007, 2021), ['halocline','strength']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_orig_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_orig_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_halocline_strength_orig_SSslice[monthly_array_halocline_strength_orig_SSslice == 0 ] = np.nan
monthly_array_halocline_strength_orig_SSslicemean = \
np.nanmean(np.nanmean(monthly_array_halocline_strength_orig_SSslice, axis = 2),axis = 2)
print(np.shape(monthly_array_halocline_strength_orig_SSslicemean))
(14, 12)
/tmp/ipykernel_2066066/1288103633.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_halocline_strength_orig_SSslice, axis = 2),axis = 2)
# Data for Experiments 1 and 2
monthly_array_halocline_depth_SSslice = np.zeros([14,12,50,50])
monthly_array_halocline_strength_SSslice = np.zeros([14,12,50,50])
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask,depth = [mask[var].isel(**slc).values for var in ('e3t_0', 'tmask','gdept_0')]
years, variables = range(2007, 2021), ['halocline','strength']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_halocline_strength_SSslice[monthly_array_halocline_strength_SSslice == 0 ] = np.nan
monthly_array_halocline_strength_SSslicemean = \
np.nanmean(np.nanmean(monthly_array_halocline_strength_SSslice, axis = 2),axis = 2)
print(np.shape(monthly_array_halocline_strength_SSslicemean))
(14, 12)
/tmp/ipykernel_2066066/3661973807.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_halocline_strength_SSslice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_halocline_strength_orig_SSslicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_halocline_strength_SSslicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 winds')
ax.set_title('WY Halocline with CY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,5)
ax.set_ylabel('g/kg m$^{-1}$')
Text(0, 0.5, 'g/kg m$^{-1}$')
monthly_array_halocline_strength_SSslicemean[12,:]
array([0.27275877, 0.43238446, 0.42162363, 1.29093306, 1.82034273, 2.08958861, 1.36382405, 1.45949038, 1.19413448, 1.26065329, 0.80995226, 0.6541769 ])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_halocline_strength_orig_SSslicemean[1,:],color='b',linestyle='-',label='Original CY')
ax.plot(xticks, monthly_array_halocline_strength_SSslicemean[1,:],color='k',linestyle='-.',label='CY with WY winds')
ax.set_title('CY Halocline with WY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,5)
ax.set_ylabel('g/kg m$^{-1}$')
Text(0, 0.5, 'g/kg m$^{-1}$')
### Nitrate data for original cold and warm years
monthly_array_nitrate_orig_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['nitrate']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_nitrate_orig_slice[monthly_array_nitrate_orig_slice == 0 ] = np.nan
monthly_array_nitrate_orig_slicemean = \
np.nanmean(np.nanmean(monthly_array_nitrate_orig_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_nitrate_orig_slicemean))
(14, 12)
/tmp/ipykernel_2066066/3312634990.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_nitrate_orig_slice, axis = 2),axis = 2)
### Silicon data for original cold and warm years
monthly_array_silicon_orig_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['silicon']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
###
## Experimental Year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
monthly_array_silicon_orig_slice[monthly_array_silicon_orig_slice == 0 ] = np.nan
monthly_array_silicon_orig_slicemean = \
np.nanmean(np.nanmean(monthly_array_silicon_orig_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_silicon_orig_slicemean))
(14, 12)
/tmp/ipykernel_2066066/241793216.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_silicon_orig_slice, axis = 2),axis = 2)
### Nitrate data for Experiments 1 and 2
monthly_array_nitrate_depthint_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['nitrate']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_nitrate_depthint_slice[monthly_array_nitrate_depthint_slice == 0 ] = np.nan
monthly_array_nitrate_depthint_slicemean = \
np.nanmean(np.nanmean(monthly_array_nitrate_depthint_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_nitrate_depthint_slicemean))
(14, 12)
/tmp/ipykernel_2066066/231329215.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_nitrate_depthint_slice, axis = 2),axis = 2)
### Silicon data for Experiments 1 and 2
monthly_array_silicon_depthint_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['silicon']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_silicon_depthint_slice[monthly_array_silicon_depthint_slice == 0 ] = np.nan
monthly_array_silicon_depthint_slicemean = \
np.nanmean(np.nanmean(monthly_array_silicon_depthint_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_silicon_depthint_slicemean))
(14, 12)
/tmp/ipykernel_2066066/3737416097.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_silicon_depthint_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_nitrate_orig_slicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_nitrate_depthint_slicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 winds')
ax.set_title('WY Nitrate with CY Winds',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,30)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_nitrate_orig_slicemean[12,:]
array([23.64834195, 22.38691698, 16.99383033, 7.63730902, 4.96273598, 1.56547187, 1.23107567, 1.5230891 , 7.95614177, 16.79074658, 19.52305174, 21.62759183])
monthly_array_nitrate_depthint_slicemean[12,:]
array([23.80685063, 22.63941779, 18.68073904, 6.54336744, 5.49402545, 1.67369885, 2.81440205, 3.81694601, 11.47654414, 19.18559774, 21.74671523, 22.24619177])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_silicon_orig_slicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_silicon_depthint_slicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 winds')
ax.set_title('WY Silicon with CY Winds',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,60)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_silicon_orig_slicemean[12,:]
array([48.06238141, 49.17238747, 41.59249494, 18.0018168 , 5.46358173, 12.1107789 , 19.87997362, 28.03714477, 36.00806573, 41.96864256, 44.54149611, 47.0336623 ])
monthly_array_silicon_depthint_slicemean[12,:]
array([47.79922893, 49.19336588, 44.03685091, 16.95528149, 8.04410933, 11.85676838, 23.22130531, 29.99828923, 38.12645109, 42.5020271 , 45.51880128, 47.52354357])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_nitrate_orig_slicemean[1,:],color='b',linestyle='-',label='Original 2008')
ax.plot(xticks, monthly_array_nitrate_depthint_slicemean[1,:],color='k',linestyle='-.',label='2008 with 2019 winds')
ax.set_title('CY Nitrate with WY Winds',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,30)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_nitrate_depthint_slicemean[1,:]
array([24.13891096, 23.43445999, 20.16081944, 9.53873272, 5.7397893 , 2.06754755, 0.95348431, 2.25127683, 8.106326 , 16.05059715, 20.61628598, 23.32419151])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_silicon_orig_slicemean[1,:],color='b',linestyle='-',label='Original 2008')
ax.plot(xticks, monthly_array_silicon_depthint_slicemean[1,:],color='k',linestyle='-.',label='2008 with 2019 winds')
ax.set_title('CY Silicon with WY Winds',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,60)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_silicon_depthint_slicemean[1,:]
array([50.06105751, 51.39125156, 49.65665279, 23.79865366, 8.11240424, 10.9729251 , 13.07850414, 11.59949357, 21.74708727, 35.04296807, 42.83359454, 47.61766334])
### Diatom data for original years
monthly_array_diatoms_orig_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['diatoms']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
### 2008 using higher temperature threshold
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
### 2019 using higher temperature threshold
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
monthly_array_diatoms_orig_slice[monthly_array_diatoms_orig_slice == 0 ] = np.nan
monthly_array_diatoms_orig_slicemean = \
np.nanmean(np.nanmean(monthly_array_diatoms_orig_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_diatoms_orig_slicemean))
(14, 12)
/tmp/ipykernel_2066066/3403781678.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_diatoms_orig_slice, axis = 2),axis = 2)
#years, months, data
monthly_array_diatoms_depthint_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['diatoms']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_W19/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_W08/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_diatoms_depthint_slice[monthly_array_diatoms_depthint_slice == 0 ] = np.nan
monthly_array_diatoms_depthint_slicemean = \
np.nanmean(np.nanmean(monthly_array_diatoms_depthint_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_diatoms_depthint_slicemean))
(14, 12)
/tmp/ipykernel_2066066/2320522072.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_diatoms_depthint_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_diatoms_orig_slicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_diatoms_depthint_slicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 winds')
ax.set_title('WY Diatoms (0-100 m) with CY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,50)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_diatoms_orig_slicemean[1,:],color='b',linestyle='-',label='Original 2008')
ax.plot(xticks, monthly_array_diatoms_depthint_slicemean[1,:],color='k',linestyle='-.',label='2008 with 2019 winds')
ax.set_title('CY Diatoms (0-100 m) with WY Winds',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,50)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')