import numpy as np
import xarray as xr
import pandas as pd
from salishsea_tools import viz_tools, places, visualisations
from matplotlib import pyplot as plt, dates
from datetime import datetime, timedelta
from calendar import month_name
from scipy.io import loadmat
from tqdm.notebook import tqdm
from salishsea_tools import nc_tools
from dask.diagnostics import ProgressBar
import cmocean
%matplotlib inline
plt.rcParams.update({'font.size': 12, 'axes.titlesize': 'medium'})
## Data for original cold and warm years
monthly_array_temp_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['votemper']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
### 2008 original temp
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
### 2019 original
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
monthly_array_temp_slice[monthly_array_temp_slice == 0 ] = np.nan
monthly_array_temp_slicemean = \
np.nanmean(np.nanmean(monthly_array_temp_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_temp_slicemean))
(14, 12)
/tmp/ipykernel_3484198/391916811.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_temp_slice, axis = 2),axis = 2)
## Data for Experiments 1 and 2
monthly_array_temp_exp_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['votemper']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
###
## Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.votemper.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_temp_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['votemper']:
data[var].append(ds.votemper.isel(deptht=0, **slc).values)
monthly_array_temp_exp_slice[monthly_array_temp_exp_slice == 0 ] = np.nan
monthly_array_temp_exp_slicemean = \
np.nanmean(np.nanmean(monthly_array_temp_exp_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_temp_exp_slicemean))
(14, 12)
/tmp/ipykernel_3484198/2338418733.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_temp_exp_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_temp_slicemean[12,:],color='r',linestyle='-',label='Original WY')
ax.plot(xticks, monthly_array_temp_exp_slicemean[12,:],color='k',linestyle='-.',label='WY with CY increased thresh')
ax.set_title('WY SST with CY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,25)
ax.set_ylabel('Degrees C')
Text(0, 0.5, 'Degrees C')
monthly_array_temp_exp_slicemean[12,:]
array([ 6.67567996, 5.16967481, 7.29056174, 10.17487312, 14.0136752 , 17.62500597, 20.65200325, 20.13192256, 16.657524 , 11.60955676, 8.68565119, 6.48617353])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_temp_slicemean[1,:],color='b',linestyle='-',label='Original CY')
ax.plot(xticks, monthly_array_temp_exp_slicemean[1,:],color='k',linestyle='-.',label='CY with WY increased thresh')
ax.set_title('CY SST with WY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,25)
ax.set_ylabel('Degrees C')
Text(0, 0.5, 'Degrees C')
monthly_array_temp_exp_slicemean[1,:]
array([ 6.15109604, 5.8556036 , 7.47975049, 9.10203945, 12.70656031, 16.08675342, 18.27222176, 18.2109748 , 15.41443395, 11.9993017 , 9.32771591, 6.42471353])
## PAR data for original years
monthly_array_PAR_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['PAR']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
### 2008 original temp
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
### 2019 original
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_slice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
monthly_array_PAR_slice[monthly_array_PAR_slice == 0 ] = np.nan
monthly_array_PAR_slicemean = \
np.nanmean(np.nanmean(monthly_array_PAR_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_PAR_slicemean))
(14, 12)
/tmp/ipykernel_3484198/2771304440.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_PAR_slice, axis = 2),axis = 2)
# PAR data for experiments 1 and 2
monthly_array_PAR_exp_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['PAR']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
###
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_carp_T.nc') as ds:
q = ds.PAR.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_PAR_exp_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['PAR']:
data[var].append(ds.PAR.isel(deptht=0, **slc).values)
monthly_array_PAR_exp_slice[monthly_array_PAR_exp_slice == 0 ] = np.nan
monthly_array_PAR_exp_slicemean = \
np.nanmean(np.nanmean(monthly_array_PAR_exp_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_PAR_exp_slicemean))
(14, 12)
/tmp/ipykernel_3484198/178454329.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_PAR_exp_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_PAR_slicemean[12,:],color='r',linestyle='-',label='Original WY')
ax.plot(xticks, monthly_array_PAR_exp_slicemean[12,:],color='k',linestyle='-.',label='WY with CY increased thresh')
ax.set_title('WY PAR with CY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,150)
ax.set_ylabel('m$^{-2}$')
Text(0, 0.5, 'm$^{-2}$')
monthly_array_PAR_exp_slicemean[12,:]
array([ 15.32172431, 25.46445341, 53.22065648, 62.60315202, 90.71975931, 103.29945894, 95.21548723, 81.05727718, 51.34878977, 31.48643708, 20.30787763, 8.03411984])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_PAR_slicemean[1,:],color='b',linestyle='-',label='Original CY')
ax.plot(xticks, monthly_array_PAR_exp_slicemean[1,:],color='k',linestyle='-.',label='CY with WY increased thresh')
ax.set_title('CY PAR with WY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,150)
ax.set_ylabel('m$^{-2}$')
Text(0, 0.5, 'm$^{-2}$')
monthly_array_PAR_exp_slicemean[1,:]
array([ 14.23694213, 25.42996031, 41.18085163, 59.76610693, 84.90226449, 94.06975087, 100.70847597, 79.87635294, 60.07499117, 29.22549197, 13.13997812, 10.14688224])
# Halocline Strength data for original years
monthly_array_halocline_depth_orig_SSslice = np.zeros([14,12,50,50])
monthly_array_halocline_strength_orig_SSslice = np.zeros([14,12,50,50])
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask,depth = [mask[var].isel(**slc).values for var in ('e3t_0', 'tmask','gdept_0')]
years, variables = range(2007, 2021), ['halocline','strength']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_orig_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_orig_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_orig_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_halocline_strength_orig_SSslice[monthly_array_halocline_strength_orig_SSslice == 0 ] = np.nan
monthly_array_halocline_strength_orig_SSslicemean = \
np.nanmean(np.nanmean(monthly_array_halocline_strength_orig_SSslice, axis = 2),axis = 2)
print(np.shape(monthly_array_halocline_strength_orig_SSslicemean))
(14, 12)
/tmp/ipykernel_3484198/1288103633.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_halocline_strength_orig_SSslice, axis = 2),axis = 2)
# Data for Experiments 1 and 2
monthly_array_halocline_depth_SSslice = np.zeros([14,12,50,50])
monthly_array_halocline_strength_SSslice = np.zeros([14,12,50,50])
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask,depth = [mask[var].isel(**slc).values for var in ('e3t_0', 'tmask','gdept_0')]
years, variables = range(2007, 2021), ['halocline','strength']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_grid_T.nc') as ds:
q = ds.vosaline.isel(deptht=0, **slc).values
q2 = q[0,:,:]
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:] = q2 #year2007 is index 0 along 1st dimension
sal=ds.vosaline.isel(**slc).values
#get the gradient in salinity
sal_grad = np.zeros_like(sal)
for i in range(0, (np.shape(sal_grad)[1]-1)):
sal_grad[:,i,:,:] =(sal[:,i,:,:]-sal[:,i+1,:,:])/(depth[:,i,:,:]-depth[:,i+1,:,:])
#print(sal_grad)
loc_max = np.argmax(sal_grad,axis=1)
depths=np.tile(depth,[np.shape(sal)[0],1,1,1])
h1=np.take_along_axis(depths, np.expand_dims(loc_max, axis=1), axis=1)
h2=np.take_along_axis(depths, np.expand_dims(loc_max+1, axis=1), axis=1)
sals=np.tile(sal,[np.shape(sal)[0],1,1,1])
s1=np.take_along_axis(sals, np.expand_dims(loc_max, axis=1), axis=1)
s2=np.take_along_axis(sals, np.expand_dims(loc_max+1, axis=1), axis=1)
#halocline is halfway between the two cells
halocline = 0.5*(h1+h2)
strength = (s2-s1)/(h2-h1)
data['halocline'].append(halocline)
data['strength'].append(strength)
monthly_array_halocline_depth_SSslice[year-2007,month-1,:,:]=halocline
monthly_array_halocline_strength_SSslice[year-2007,month-1,:,:]=strength
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_halocline_strength_SSslice[monthly_array_halocline_strength_SSslice == 0 ] = np.nan
monthly_array_halocline_strength_SSslicemean = \
np.nanmean(np.nanmean(monthly_array_halocline_strength_SSslice, axis = 2),axis = 2)
print(np.shape(monthly_array_halocline_strength_SSslicemean))
(14, 12)
/tmp/ipykernel_3484198/3661973807.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_halocline_strength_SSslice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_halocline_strength_orig_SSslicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_halocline_strength_SSslicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 increased thresh')
ax.set_title('WY Halocline with CY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,5)
ax.set_ylabel('g/kg m$^{-1}$')
Text(0, 0.5, 'g/kg m$^{-1}$')
monthly_array_halocline_strength_SSslicemean[12,:]
array([0.5430552 , 0.31373746, 0.71585734, 1.15099531, 1.94217265, 2.61285887, 2.93682089, 2.14007747, 1.58125968, 0.66524009, 1.22184692, 0.94281413])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_halocline_strength_orig_SSslicemean[1,:],color='b',linestyle='-',label='Original CY')
ax.plot(xticks, monthly_array_halocline_strength_SSslicemean[1,:],color='k',linestyle='-.',label='CY with WY increase thresh')
ax.set_title('CY Halocline with WY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,5)
ax.set_ylabel('g/kg m$^{-1}$')
Text(0, 0.5, 'g/kg m$^{-1}$')
### Depth-averaged Nutrients (0-10m)
### Nitrate data for original cold and warm years
monthly_array_nitrate_orig_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['nitrate']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_nitrate_orig_slice[monthly_array_nitrate_orig_slice == 0 ] = np.nan
monthly_array_nitrate_orig_slicemean = \
np.nanmean(np.nanmean(monthly_array_nitrate_orig_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_nitrate_orig_slicemean))
(14, 12)
/tmp/ipykernel_3484198/3312634990.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_nitrate_orig_slice, axis = 2),axis = 2)
### Silicon data for original cold and warm years
monthly_array_silicon_orig_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['silicon']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
###
## Experimental Year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
monthly_array_silicon_orig_slice[monthly_array_silicon_orig_slice == 0 ] = np.nan
monthly_array_silicon_orig_slicemean = \
np.nanmean(np.nanmean(monthly_array_silicon_orig_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_silicon_orig_slicemean))
(14, 12)
/tmp/ipykernel_3484198/241793216.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_silicon_orig_slice, axis = 2),axis = 2)
### Nitrate data for Experiments 1 and 2
monthly_array_nitrate_depthint_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['nitrate']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_nitrate_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['nitrate']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_nitrate_depthint_slice[monthly_array_nitrate_depthint_slice == 0 ] = np.nan
monthly_array_nitrate_depthint_slicemean = \
np.nanmean(np.nanmean(monthly_array_nitrate_depthint_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_nitrate_depthint_slicemean))
(14, 12)
/tmp/ipykernel_3484198/231329215.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_nitrate_depthint_slice, axis = 2),axis = 2)
### Silicon data for Experiments 1 and 2
monthly_array_silicon_depthint_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 10),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['silicon']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data
q2 = q[0,:,:]
monthly_array_silicon_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['silicon']:
data[var].append((ds[var].isel(deptht=slice(None, 10),**slc)*e3t*tmask).sum(axis=1)/((e3t*tmask).sum(axis=1)).data)
# Concatenate months
for var in variables: aggregates[var][year] = np.concatenate(data[var]).mean(axis=0)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_silicon_depthint_slice[monthly_array_silicon_depthint_slice == 0 ] = np.nan
monthly_array_silicon_depthint_slicemean = \
np.nanmean(np.nanmean(monthly_array_silicon_depthint_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_silicon_depthint_slicemean))
(14, 12)
/tmp/ipykernel_3484198/3737416097.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_silicon_depthint_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_nitrate_orig_slicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_nitrate_depthint_slicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 increased thresh')
ax.set_title('WY Nitrate with CY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,30)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_nitrate_orig_slicemean[12,:]
array([23.64834195, 22.38691698, 16.99383033, 7.63730902, 4.96273598, 1.56547187, 1.23107567, 1.5230891 , 7.95614177, 16.79074658, 19.52305174, 21.62759183])
monthly_array_nitrate_depthint_slicemean[12,:]
array([23.67644305, 22.37273059, 17.09940305, 7.25002158, 4.93789725, 1.24039805, 0.80265601, 1.60089322, 6.53129732, 14.65755052, 18.79863427, 21.41577999])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_silicon_orig_slicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_silicon_depthint_slicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 increased thresh')
ax.set_title('WY Silicon with CY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,60)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_silicon_orig_slicemean[12,:]
array([48.06238141, 49.17238747, 41.59249494, 18.0018168 , 5.46358173, 12.1107789 , 19.87997362, 28.03714477, 36.00806573, 41.96864256, 44.54149611, 47.0336623 ])
monthly_array_silicon_depthint_slicemean[12,:]
array([48.08720697, 49.17021756, 41.74573903, 17.27394181, 5.18894447, 8.35421668, 9.00828493, 6.48977421, 10.8493533 , 26.04362057, 36.47614259, 43.36377397])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_nitrate_orig_slicemean[1,:],color='b',linestyle='-',label='Original 2008')
ax.plot(xticks, monthly_array_nitrate_depthint_slicemean[1,:],color='k',linestyle='-.',label='2008 with 2019 increased thresh')
ax.set_title('CY Nitrate with WY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,30)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_nitrate_depthint_slicemean[1,:]
array([24.77422039, 23.29999955, 21.7438187 , 9.87203474, 6.32937779, 2.56494218, 2.649082 , 4.58757776, 7.08316269, 15.02847707, 21.88930665, 23.35885963])
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_silicon_orig_slicemean[1,:],color='b',linestyle='-',label='Original 2008')
ax.plot(xticks, monthly_array_silicon_depthint_slicemean[1,:],color='k',linestyle='-.',label='2008 with 2019 increased thresh')
ax.set_title('CY Silicon with WY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=4)
ax.set_ylim(0,60)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
monthly_array_silicon_depthint_slicemean[1,:]
array([50.13888235, 51.47213852, 50.01017314, 25.05295036, 9.56076795, 6.89618133, 3.9125294 , 4.51631545, 7.42875662, 19.71116328, 38.33231021, 45.14306696])
### Diatom data for original years
monthly_array_diatoms_orig_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['diatoms']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
### 2008 using higher temperature threshold
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
### 2019 using higher temperature threshold
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/v201905r/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_orig_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
monthly_array_diatoms_orig_slice[monthly_array_diatoms_orig_slice == 0 ] = np.nan
monthly_array_diatoms_orig_slicemean = \
np.nanmean(np.nanmean(monthly_array_diatoms_orig_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_diatoms_orig_slicemean))
(14, 12)
/tmp/ipykernel_3484198/3403781678.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_diatoms_orig_slice, axis = 2),axis = 2)
#years, months, data
monthly_array_diatoms_depthint_slice = np.zeros([14,12,50,50])
# Load monthly averages
mask = xr.open_dataset('/data/eolson/results/MEOPAR/NEMO-forcing-new/grid/mesh_mask201702.nc')
slc = {'y': slice(450,500), 'x': slice(250,300)}
e3t, tmask = [mask[var].isel(z=slice(None, 27),**slc).values for var in ('e3t_0', 'tmask')]
years, variables = range(2007, 2021), ['diatoms']
# Temporary list dict
data = {}
# Permanent aggregate dict
aggregates = {var: {} for var in variables}
monthlydat = {var: {} for var in variables}
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# Add experiment year
for year in [2008]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul08_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(1, 7):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jan19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# Add experiment year
for year in [2019]:
# Initialize lists
for var in variables: data[var] = []
# Load monthly averages
for month in range(7, 13):
datestr = f'{year}{month:02d}'
prefix = f'/data/sallen/results/MEOPAR/Karyn/01jul19_tsc/SalishSea_1m_{datestr}_{datestr}'
# Load grazing variables
with xr.open_dataset(prefix + '_ptrc_T.nc') as ds:
q = np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data
q2 = q[0,:,:]
monthly_array_diatoms_depthint_slice[year-2007,month-1,:,:] = q2 #year2015 is index 0 along 1st dimension
for var in ['diatoms']:
data[var].append(np.ma.masked_where(tmask == 0, ds[var].isel(deptht=slice(None, 27), **slc).values * e3t).sum(axis=1).data)
# # Calculate 5 year mean and anomalies
# for var in variables:
# aggregates[var][‘mean’] = np.concatenate([aggregates[var][year][None, ...] for year in years]).mean(axis=0)
# for year in years: aggregates[var][year] = aggregates[var][year] - aggregates[var][‘mean’]
monthly_array_diatoms_depthint_slice[monthly_array_diatoms_depthint_slice == 0 ] = np.nan
monthly_array_diatoms_depthint_slicemean = \
np.nanmean(np.nanmean(monthly_array_diatoms_depthint_slice, axis = 2),axis = 2)
print(np.shape(monthly_array_diatoms_depthint_slicemean))
(14, 12)
/tmp/ipykernel_3484198/2320522072.py:3: RuntimeWarning: Mean of empty slice np.nanmean(np.nanmean(monthly_array_diatoms_depthint_slice, axis = 2),axis = 2)
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_diatoms_orig_slicemean[12,:],color='r',linestyle='-',label='Original 2019')
ax.plot(xticks, monthly_array_diatoms_depthint_slicemean[12,:],color='k',linestyle='-.',label='2019 with 2008 increased thresh')
ax.set_title('WY Diatoms (0-100 m) with CY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,50)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')
fig, ax = plt.subplots(figsize=(15, 3))
bbox = {'boxstyle': 'round', 'facecolor': 'w', 'alpha': 0.9}
cmap = plt.get_cmap('tab10')
palette = [cmap(0), cmap(0.2), 'k', cmap(0.1), cmap(0.3)]
xticks=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov',"Dec"]
ax.plot(xticks, monthly_array_diatoms_orig_slicemean[1,:],color='b',linestyle='-',label='Original 2008')
ax.plot(xticks, monthly_array_diatoms_depthint_slicemean[1,:],color='k',linestyle='-.',label='2008 with 2019 increased thresh')
ax.set_title('CY Diatoms (0-100 m) with WY Increased Threshold',fontsize=18)
ax.legend(frameon=False,loc=1)
ax.set_ylim(0,50)
ax.set_ylabel('mmol N m$^{-2}$')
Text(0, 0.5, 'mmol N m$^{-2}$')