This notebook compares the surges at Neah Bay and Tofino to determine if the timing of the surge event at these two location is in sync. Also look at Toke Point.
Also does a quick check of the contribution to SSH due to the inverse barometer effect.
Tidal predictions and water level observations are taken from the NOAA website. http://tidesandcurrents.noaa.gov/waterlevels.html?id=9443090&units=metric&bdate=20060201&edate=20060207&timezone=GMT&datum=MLLW&interval=h&action=data
The surge is calculated as the difference between observations and predictions.
This data was downloaded by visiting the website. We would need something more automated if we decided to use Neah Bay conditions in forcing.
Note: It was not completely obvious how to collect historical surge data from the National Weather Service website. http://www.nws.noaa.gov/mdl/etsurge/index.php?page=stn%C2%AEion=wc&datum=msl&list=&map=0-48&type=both&stn=waneah
stormtools.get_NOAA_wlev() and stormtools.get_NOAA_predictions() automate retrieving the NOAA data.
Toke Point data also grabbed from NOAA
Water level observations are taken from DFO. Tidal predictions are calculated using t_tide with tidal harmonics from a 2005 time series. The 2005 mean sea level from observations is added to the tidal predictions.
The surge is calculated as the difference between observations and predictions.
%matplotlib inline
from matplotlib import pylab
import matplotlib.pyplot as plt
import netCDF4 as NC
import numpy as np
import arrow
import datetime
import pandas as pd
import pytz
import requests
from salishsea_tools import tidetools
from salishsea_tools import stormtools
stns ={'NeahBay': 9443090, 'TokePoint': 9440910}
st='01-JAN-2006';
ed = '31-DEC-2006';
for key in stns:
stormtools.get_NOAA_predictions(stns[key],st,ed)
stormtools.get_NOAA_wlev(stns[key],st,ed)
def dateParserMeasured(s):
#convert the string to a datetime object
unaware = datetime.datetime.strptime(s, "%Y-%m-%d %H:%M")
#add in the local time zone
aware = unaware.replace(tzinfo=pytz.timezone('UTC'))
return aware
Make a function so that the plots are easy and we minimize copying and pasting. Function will compare Tofino and Neah Bay surges in a plot.
def get_surges(start,end):
#loads data from files an calculates that surges. returns anomaly and time
meas={}; pred={}; anom={}; time={}
for key in stns:
filename_wl = 'wlev_' + str(stns[key]) + '_'+start +'_' +end +'.csv';
filename_pr = 'predictions_' + str(stns[key]) + '_'+start +'_' +end +'.csv';
meas[key] = pd.read_csv(filename_wl,skiprows=0,parse_dates=[0],date_parser=dateParserMeasured)
meas[key] = meas[key].rename(columns={'Date Time': 'time', ' Water Level': 'slev',
' Sigma': 'sigma', 'I': 'I', 'L': 'L'})
pred[key] = pd.read_csv(filename_pr,skiprows=0,parse_dates=[0],date_parser=dateParserMeasured)
pred[key] = pred[key].rename(columns={'Date Time': 'time', ' Prediction': 'pred'})
#anomaly
anom[key] = np.zeros(len(meas[key].time))
for i in np.arange(0,len(meas[key].time)):
#check that there is a corresponding time
#if any(wlev_pred.time == wlev_meas.time[i]):
anom[key][i] =(meas[key].slev[i] - pred[key].pred[pred[key].time==meas[key].time[i]])
if not(anom[key][i]):
anom[key][i]=float('Nan')
time[key]=meas[key].time
return anom,time
anom,time=get_surges(st,ed)
def compare_surge(date_start, date_end):
#compare Tofino surges with NOAA surges during the time frame indicated
#Loading Tofino surge from forcing files
ssh_Tofino = '/data/nsoontie/MEOPAR/NEMO-forcing/open_boundaries/west/ssh/ssh'
arr_start =arrow.Arrow.strptime(date_start,'%d-%b-%Y')
arr_end =arrow.Arrow.strptime(date_end,'%d-%b-%Y')
yr=arr_start.year
m=arr_start.month
m= "%02d" % (m,)
string = '_y'+ str(yr) + 'm' + str(m) +'.nc'
#get the data
fT = NC.Dataset(ssh_Tofino + string);
Tof_anom=fT.variables['sossheig'];
tss= fT.variables['time_counter'][:];
l = tss.shape[0]; t=np.linspace(0,l-1,l) #time array
Tof_time=stormtools.convert_date_hours(t,start);
#Plotting
plt.figure(figsize=(18,5))
ax1=pylab.subplot(1,1,1)
ax1.plot(Tof_time[:],Tof_anom[:,0,0],'r',label='Tofino')
for key in stns:
ax1.plot(time[key],anom[key],label=key)
ax1.set_ylim([-1.5,1.5])
ax1.set_xlim([datetime.datetime(arr_start.year,arr_start.month,arr_start.day),
datetime.datetime(arr_end.year,arr_end.month,arr_end.day)])
pylab.xlabel('Time')
pylab.ylabel('SSH anomaly (m)')
pylab.grid()
pylab.legend(loc=0)
pylab.title('SSH Anomaly for ' +start)
return Tof_time,Tof_anom
def get_statistics(ssh1,ssh2,t1,t2,sdt,edt):
"""
Calculates several statisitcs, such as mean error, maximum value, etc for two sshs in a given time period.
:returns: max1, max2, tmax1, tmax2, mean_diff, mean_abs_diff, rms_diff, correlation
"""
#truncate ssh1
trun1, trun_t1 = stormtools.truncate(ssh1, t1, sdt, edt)
#truncate ssh2
trun2,trun_t2=stormtools.truncate(ssh2,t2,sdt,edt)
#rebase observations
diff = trun1-trun2
#calculate statisitcs
mean_diff = np.nanmean(diff)
mean_abs_diff = np.nanmean(np.abs(diff))
rms_diff= stormtools._rmse(diff)
corr=np.corrcoef(trun1,trun2)
max1,tmax1=stormtools._find_max(trun1,trun_t1)
max2,tmax2=stormtools._find_max(trun2,trun_t2)
return max1,max2,tmax1,tmax2,mean_diff,mean_abs_diff,rms_diff,corr
start='01-Feb-2006'
end = '28-Feb-2006'
[Tof_time, Tof_anom]=compare_surge(start,end)
start='02-Feb-2006'
end = '05-Feb-2006'
sd =datetime.datetime.strptime(start,'%d-%b-%Y')
sd = sd.replace(tzinfo=pytz.timezone('UTC'))
ed =datetime.datetime.strptime(end,'%d-%b-%Y')
ed = ed.replace(tzinfo=pytz.timezone('UTC'))
[maxTof,maxNB,tmaxTof,tmaxNB,mean_diff, mean_abs_diff, rms_diff,corr] = get_statistics(
np.array(Tof_anom[:,0,0]),np.array(anom['NeahBay']),np.array(Tof_time),np.array(time['NeahBay']),sd,ed)
print 'From', start, 'to', end
print 'Tofino Max/Time', maxTof, tmaxTof
print 'Neah Bay Max/Time', maxNB, tmaxNB
print 'Mean (Tofino -Neah Bay)', mean_diff
From 02-Feb-2006 to 05-Feb-2006 Tofino Max/Time 0.882743 2006-02-04 14:00:00+00:00 Neah Bay Max/Time 0.636 2006-02-04 16:00:00+00:00 Mean (Tofino -Neah Bay) 0.10972174032
Tofino surge is significantly higher than Neah Bay (about 25 cm) on Feb 4. Neah Bay is about 2 hours later
Toke Point anomaly is much larger. Timing is about the same.
start='01-Feb-2006'
end = '28-Feb-2006'
sd =datetime.datetime.strptime(start,'%d-%b-%Y')
sd = sd.replace(tzinfo=pytz.timezone('UTC'))
ed =datetime.datetime.strptime(end,'%d-%b-%Y')
ed = ed.replace(tzinfo=pytz.timezone('UTC'))
[maxTof,maxNB,tmaxTof,tmaxNB,mean_diff, mean_abs_diff, rms_diff,corr] = get_statistics(
np.array(Tof_anom[:,0,0]),np.array(anom['NeahBay']),np.array(Tof_time),np.array(time['NeahBay']),sd,ed)
print 'From', start, 'to', end
print 'Mean (Tofino -Neah Bay)', mean_diff
From 01-Feb-2006 to 28-Feb-2006 Mean (Tofino -Neah Bay) 0.0812378998059
start='01-Dec-2006'
end = '31-Dec-2006'
[Tof_time, Tof_anom]=compare_surge(start,end)
start='14-Dec-2006'
end = '18-Dec-2006'
sd =datetime.datetime.strptime(start,'%d-%b-%Y')
sd = sd.replace(tzinfo=pytz.timezone('UTC'))
ed =datetime.datetime.strptime(end,'%d-%b-%Y')
ed = ed.replace(tzinfo=pytz.timezone('UTC'))
[maxTof,maxNB,tmaxTof,tmaxNB,mean_diff, mean_abs_diff, rms_diff,corr] = get_statistics(
np.array(Tof_anom[:,0,0]),np.array(anom['NeahBay']),np.array(Tof_time),np.array(time['NeahBay']),sd,ed)
print 'From', start, 'to', end
print 'Tofino Max/Time', maxTof, tmaxTof
print 'Neah Bay Max/Time', maxNB, tmaxNB
print 'Mean (Tofino -Neah Bay)', mean_diff
From 14-Dec-2006 to 18-Dec-2006 Tofino Max/Time 0.554225 2006-12-15 06:00:00+00:00 Neah Bay Max/Time 0.65 2006-12-15 06:00:00+00:00 Mean (Tofino -Neah Bay) -0.0902090209924
On Dec 15, the Neah Bay surge is slightly higher (10cm) than Tofino. But the timing of the maximum is a good match
start='01-Dec-2006'
end = '30-Dec-2006'
sd =datetime.datetime.strptime(start,'%d-%b-%Y')
sd = sd.replace(tzinfo=pytz.timezone('UTC'))
ed =datetime.datetime.strptime(end,'%d-%b-%Y')
ed = ed.replace(tzinfo=pytz.timezone('UTC'))
[maxTof,maxNB,tmaxTof,tmaxNB,mean_diff, mean_abs_diff, rms_diff,corr] = get_statistics(
np.array(Tof_anom[:,0,0]),np.array(anom['NeahBay']),np.array(Tof_time),np.array(time['NeahBay']),sd,ed)
print 'From', start, 'to', end
print 'Mean (Tofino -Neah Bay)', mean_diff
From 01-Dec-2006 to 30-Dec-2006 Mean (Tofino -Neah Bay) 0.0410648408411
start='01-Nov-2006'
end = '30-Nov-2006'
[Tof_time, Tof_anom]=compare_surge(start,end)
start='14-Nov-2006'
end = '18-Nov-2006'
sd =datetime.datetime.strptime(start,'%d-%b-%Y')
sd = sd.replace(tzinfo=pytz.timezone('UTC'))
ed =datetime.datetime.strptime(end,'%d-%b-%Y')
ed = ed.replace(tzinfo=pytz.timezone('UTC'))
[maxTof,maxNB,tmaxTof,tmaxNB,mean_diff, mean_abs_diff, rms_diff,corr] = get_statistics(
np.array(Tof_anom[:,0,0]),np.array(anom['NeahBay']),np.array(Tof_time),np.array(time['NeahBay']),sd,ed)
print 'From', start, 'to', end
print 'Tofino Max/Time', maxTof, tmaxTof
print 'Neah Bay Max/Time', maxNB, tmaxNB
print 'Mean (Tofino -Neah Bay)', mean_diff
From 14-Nov-2006 to 18-Nov-2006 Tofino Max/Time 0.725153 2006-11-15 19:00:00+00:00 Neah Bay Max/Time 0.63 2006-11-16 01:00:00+00:00 Mean (Tofino -Neah Bay) -0.0594614228353
Neah Bay Surge is several hours later later and 9 cm lower on Nov 15/16.
start='01-Nov-2006'
end = '30-Nov-2006'
sd =datetime.datetime.strptime(start,'%d-%b-%Y')
sd = sd.replace(tzinfo=pytz.timezone('UTC'))
ed =datetime.datetime.strptime(end,'%d-%b-%Y')
ed = ed.replace(tzinfo=pytz.timezone('UTC'))
[maxTof,maxNB,tmaxTof,tmaxNB,mean_diff, mean_abs_diff, rms_diff,corr] = get_statistics(
np.array(Tof_anom[:,0,0]),np.array(anom['NeahBay']),np.array(Tof_time),np.array(time['NeahBay']),sd,ed)
print 'From', start, 'to', end
print 'Mean (Tofino -Neah Bay)', mean_diff
From 01-Nov-2006 to 30-Nov-2006 Mean (Tofino -Neah Bay) 0.00765545827363
Generally, it looks like the Tofino surges are slightly higher than the Neah Bay surges. There are a few exceptions though, like on the Dec 15, 2006 storm surge and also Nov 13, 2006.
NEMO can correct the sea surface height to account for the force applied the atmospheric pressure. If the atmosphere is at a low pressure then the ssh will be elevated slightly.
The correction fator given by the NEMO documentation is:
$ \eta_{ib} = -\frac{1}{g\rho_0}\left(P_{atm}-P_0\right) $
Questions:
How significant is this at the mouth of Juan de Fuca?
Are we double counting this?
#define constants (from NEMO book)
rho0=1035
P0=101000
g=9.81
NB_lat=48.37
NB_lon=-124.45
Find a CGRF point close to Neah Bay
CGRF_path = '/ocean/dlatorne/MEOPAR/CGRF/NEMO-atmos/'
spr = NC.Dataset(CGRF_path+'slp_y2006m02d04.nc')
print spr
bathy_path='/data/nsoontie/MEOPAR/NEMO-forcing/grid/bathy_meter_SalishSea2.nc'
b=NC.Dataset(bathy_path)
bathy=b.variables['Bathymetry']
X=b.variables['nav_lon']
Y=b.variables['nav_lat']
lat=spr.variables['nav_lat']
lon=spr.variables['nav_lon']
#Can't use this for som reason :(
#[i,j] = tidetools.find_closest_model_point(NB_lon,NB_lat,X,Y,bathy[:])
i=365
j=28
print X[i,j], Y[i,j]
plt.figure(figsize=(8,8))
pylab.subplot(1,1,1)
pylab.pcolormesh(X[:],Y[:],bathy[:])
pylab.plot(X[i,j], Y[i,j],'or',label='Neah Bay')
Ci=461
Cj=523
pylab.plot(lon[Ci,Cj]-360, lat[Ci,Cj],'og',label='CGRF point')
pylab.legend(loc=0)
press_CGRF=spr.variables['atmpres'][:,Ci,Cj]
time_CGRF=spr.variables['time_counter']
<type 'netCDF4.Dataset'> root group (NETCDF4 data model, file format UNDEFINED): NCO: 4.0.9 nco_openmp_thread_number: 1 Conventions: CF-1.6 title: CGRF sea-level atmospheric pressure forcing dataset for 2006-02-04 institution: Dept of Earth, Ocean & Atmospheric Sciences, University of British Columbia source: https://bitbucket.org/salishsea/tools/raw/tip/SalishSeaCmd/salishsea_cmd/get_cgrf_processor.py references: /ocean/dlatorne/MEOPAR/CGRF/NEMO-atmos/slp_y2006m02d04.nc comment: Processed from goapp.ocean.dal.ca::canadian_GDPS_reforecasts_v1 files. history: Tue May 27 11:29:47 2014: ncks -4 -L4 -O -d time_counter,18,23 /ocean/dlatorne/MEOPAR/CGRF/rsync-mirror/2006-02-03/2006020300_slp.nc tmp1.nc Tue May 27 11:29:50 2014: ncrcat -O tmp1.nc tmp2.nc /ocean/dlatorne/MEOPAR/CGRF/NEMO-atmos/slp_y2006m02d04.nc Tue 27 May 2014 11:29:53 AM : Adjust time_counter values and clean up metadata. dimensions(sizes): time_counter(24), y(600), x(801) variables(dimensions): float32 atmpres(time_counter,y,x), float32 nav_lat(y,x), float32 nav_lon(y,x), float64 time_counter(time_counter) groups: -124.467224121 48.3496665955
Close enough...
Now calculate the ssh correction.
print press_CGRF.shape
print 'Max CGRF pressure', max(press_CGRF)
print 'Min CGRF pressure', min(press_CGRF)
eta= -1/g/rho0*(press_CGRF-P0)
print max(eta)
pylab.plot(eta)
pylab.xlabel('Hour since Feb 4, 2006 00:00')
pylab.ylabel('Eta correction due to inverse barometer (m)')
(24,) Max CGRF pressure 98661.5 Min CGRF pressure 96697.2 0.423781005776
<matplotlib.text.Text at 0x7faa95d8ae10>
This seems like a large contribution due to the inverse barometer effect. Can this be right?
Note: digging into the namelists, there is a flag for turning on/off the inverse barometer effect on OBC SSH data. This is ln_apr_obc. I am currently using true, but I am considering changing this to see if there is an effect...
To do:
path = '/data/nsoontie/MEOPAR/SalishSea/results/storm-surges/tide_fix/feb2006/'
runs = {'all_forcing','obc_false'}
fUs={}; fVs={}; fTs={};
for key in runs:
fUs[key] = NC.Dataset(path + key +'/SalishSea_4h_20060201_20060207_grid_U.nc','r');
fVs[key] = NC.Dataset(path + key +'/SalishSea_4h_20060201_20060207_grid_V.nc','r');
fTs[key] = NC.Dataset(path + key +'/SalishSea_4h_20060201_20060207_grid_T.nc','r');
--------------------------------------------------------------------------- RuntimeError Traceback (most recent call last) <ipython-input-20-01886e1c2471> in <module>() 7 8 for key in runs: ----> 9 fUs[key] = NC.Dataset(path + key +'/SalishSea_4h_20060201_20060207_grid_U.nc','r'); 10 fVs[key] = NC.Dataset(path + key +'/SalishSea_4h_20060201_20060207_grid_V.nc','r'); 11 fTs[key] = NC.Dataset(path + key +'/SalishSea_4h_20060201_20060207_grid_T.nc','r'); /home/nsoontie/anaconda/lib/python2.7/site-packages/netCDF4.so in netCDF4.Dataset.__init__ (netCDF4.c:19478)() RuntimeError: No such file or directory
start='01-Feb-2006'
stations = {'PointAtkinson': 7795, 'Victoria': 7120, 'PatriciaBay': 7277, 'CampbellRiver': 8074,
'CrescentBeach': 1, 'WhiteRock': 1, 'BoundaryBay': 1}
datums = {'PointAtkinson': 3.09, 'Victoria': 1.881, 'PatriciaBay': 2.256, 'CampbellRiver': 2.916,
'CrescentBeach': 2.44, 'WhiteRock': 2.85, 'BoundaryBay': 1 }
thalwegs = ['Thalweg1','Thalweg2', 'Thalweg3', 'Thalweg4', 'Thalweg5', 'Thalweg6']
allstations = {'PointAtkinson': 7795, 'Victoria': 7120, 'PatriciaBay': 7277, 'CampbellRiver': 8074,
'CrescentBeach': 2.44, 'WhiteRock': 2.85 , 'BoundaryBay': 1,
'Thalweg1': [], 'Thalweg2': [], 'Thalweg3': [], 'Thalweg4': [], 'Thalweg5': [], 'Thalweg6': [],
'Plume': 1}
#datums from t_xtide
run_stations={}
us={}; vs={}; lats={}; lons={}; tmps={}; sals={}; sshs={}; ts={};
for run in runs:
for key in allstations:
string = path + run + '/1h_' + key + '.nc'
run_stations[key] = NC.Dataset(string,'r');
t=run_stations[key].variables['time_counter']
ts[run]=stormtools.convert_date_seconds(t,start)
[us[run], vs[run], lats[run], lons[run], tmps[run], sals[run], sshs[run]] = stormtools.combine_data(run_stations)
run_stations={};
start='01-Feb-2006'
end='07-Feb-2006'
emin=-3
emax=3
plt.figure(figsize=(20,12))
wlev_pred_xtide={}
wlev_meas={}
i=1;
for key in stations:
for run in runs:
ax=pylab.subplot(4,2,i)
pylab.plot(ts[run],sshs[run][key][:,0,0],label=run)
pylab.title(key)
ax.set_ylim([emin,emax])
if key == 'PointAtkinson' or key == 'Victoria' or key =='PatriciaBay' or key =='CampbellRiver':
#plotting observations
statID_PA = stations[key]
filename = 'wlev_' +str(statID_PA) + '_' + start +'_' +end +'.csv'
tidetools.get_dfo_wlev(statID_PA,start,end)
def dateParserMeasured(s):
#convert the string to a datetime object
unaware = datetime.datetime.strptime(s, "%Y/%m/%d %H:%M")
#add in the local time zone (Canada/Pacific)
aware = unaware.replace(tzinfo=pytz.timezone('Canada/Pacific'))
#convert to UTC
return aware.astimezone(pytz.timezone('UTC'))
wlev_meas[key] = pd.read_csv(filename,skiprows=7,parse_dates=[0],date_parser=dateParserMeasured)
wlev_meas[key] = wlev_meas[key].rename(columns={'Obs_date': 'time', 'SLEV(metres)': 'slev'})
#pylab.plot(wlev_meas[key].time, wlev_meas[key].slev-datums[key],'k*',label='observed') #subtracted off the datum from t_xtide
if key != 'BoundaryBay':
#plotting t_xtide predictions
filename = '/data/nsoontie/MEOPAR/analysis/storm_surges/data/'+ key+ ' (2)_xtide_prediction_' + start+'_'+end+'.csv'
def dateParserMeasured2(s):
#convert the string to a datetime object
unaware = datetime.datetime.strptime(s, "%d-%b-%Y %H:%M:%S ")
#add in the local time zone (Canada/Pacific)
aware = unaware.replace(tzinfo=pytz.timezone('Canada/Pacific'))
#convert to UTC
return aware.astimezone(pytz.timezone('UTC'))
wlev_pred_xtide[key] = pd.read_csv(filename,delimiter='\t',parse_dates=[0],date_parser=dateParserMeasured2)
wlev_pred_xtide[key] = wlev_pred_xtide[key].rename(columns={'Time_Local ': 'time', ' wlev_pred ': 'slev'})
#pylab.plot(wlev_pred_xtide[key].time, wlev_pred_xtide[key].slev-datums[key], label='t_xtide')
pylab.grid()
i=i+1
pylab.legend(bbox_to_anchor=(1.05, 1), loc=2)
print 'Sea Surface Height'
Us={}; Vs={}; Es ={}; Ss={}; Ts={};
timestamp =25#model output time
depthlevel = 0 #model level to plot
for key in runs:
[Us[key], Vs[key], Es[key], Ss[key],
Ts[key]]= stormtools.get_variables(fUs[key],fVs[key],fTs[key],timestamp,depthlevel)
# define plot parameters
#time, depth, colormap etc
times = fVs[key].variables['time_counter'] #array of output times
time = times[timestamp] #physical time in seconds
depths = fVs[key].variables['depthv'] #array of depths
depth = depths[depthlevel] #physical depth in meters
cmap = plt.get_cmap('jet')
u_lat = fUs[key].variables['nav_lat']
u_lon = fUs[key].variables['nav_lon']
T_lat = fTs[key].variables['nav_lat']
T_lon = fTs[key].variables['nav_lon']
v_lat = fVs[key].variables['nav_lat']
v_lon = fVs[key].variables['nav_lon']
diff = Es['all_forcing']-Es['obc_false']
pylab.pcolormesh(diff,vmin=-0.004,vmax=0.004)
pylab.colorbar()
print (Es['all_forcing']==Es['obc_false']).all()
These are identical! I should double check that the I actually used ln_apr_obc=false
import commands
str1=path+'all_forcing/namelist'
str2=path+'obc_false/namelist'
cmd = 'diff '+ str1 +' '+ str2
print cmd
out = commands.getoutput(cmd)
print out
** Conclusion**
This flag ln_apr_obc does not affect our results. Note that this flag was not found in the 3.4 documentation so it may not be in use anymore.
Does that mean we have to remove the inverse baromenter ourselves? This would make preparing foricng files more difficult since we would need the weather...
def get_NOAA_wlev(station_no, start_date, end_date):
"""Download water level data from NOAA site for one NOAA station
for specified period.
:arg station_no: Station number e.g. 9443090.
:type station_no: int
:arg start_date: Start date; e.g. '01-JAN-2010'.
:type start_date: str
:arg end_date: End date; e.g. '31-JAN-2010'
:type end_date: str
:returns: Saves text file with water level data at one station
"""
# Name the output file
outfile = 'wlev_'+str(station_no)+'_'+str(start_date)+'_'+str(end_date)+'.csv'
# Form urls and html information
st_ar=arrow.Arrow.strptime(start_date, '%d-%b-%Y')
end_ar=arrow.Arrow.strptime(end_date, '%d-%b-%Y')
base_url = 'http://tidesandcurrents.noaa.gov'
form_handler = (
'/stationhome.html?id='
+ str(station_no))
data_provider = (
'/api/datagetter?product=hourly_height&application=NOS.COOPS.TAC.WL'
+ '&begin_date=' +st_ar.format('YYYYMMDD') +'&end_date='+end_ar.format('YYYYMMDD')
+ '&datum=MLLW&station='+str(station_no)
+ '&time_zone=GMT&units=metric&interval=h&format=csv')
# Go get the data from the DFO site
with requests.Session() as s:
s.post(base_url)
r = s.get(base_url + data_provider)
# Write the data to a text file
with open(outfile, 'w') as f:
f.write(r.text)