#!/usr/bin/env python # coding: utf-8 # In[1]: import sqlalchemy from sqlalchemy import create_engine, Column, String, Boolean, Integer, Float, MetaData, Table, type_coerce, ForeignKey, case, update from sqlalchemy.orm import mapper, create_session, relationship, aliased, Session from sqlalchemy.ext.declarative import declarative_base import csv from sqlalchemy import case import numpy as np from sqlalchemy.ext.automap import automap_base import matplotlib.pyplot as plt import sqlalchemy.types as types import numbers from sqlalchemy.sql import and_, or_, not_, func from sqlalchemy.sql import select import os import glob import re from os.path import isfile import gsw # In[2]: basedir='/ocean/shared/SalishSeaCastData/DFO/CTD/' dbname='DFO_CTD.sqlite' datelims=() engine = create_engine('sqlite:///' + basedir + dbname, echo = False) Base = automap_base() # reflect the tables in salish.sqlite: Base.prepare(engine, reflect=True) # mapped classes have been created # existing tables: StationTBL=Base.classes.StationTBL ObsTBL=Base.classes.ObsTBL CalcsTBL=Base.classes.CalcsTBL session = create_session(bind = engine, autocommit = False, autoflush = True) SA=case([(CalcsTBL.Salinity_T0_C0_SA!=None, CalcsTBL.Salinity_T0_C0_SA)], else_= case([(CalcsTBL.Salinity_T1_C1_SA!=None, CalcsTBL.Salinity_T1_C1_SA)], else_= case([(CalcsTBL.Salinity_SA!=None, CalcsTBL.Salinity_SA)], else_= None))) CT=case([(CalcsTBL.Temperature_Primary_CT!=None, CalcsTBL.Temperature_Primary_CT)], else_= case([(CalcsTBL.Temperature_Secondary_CT!=None, CalcsTBL.Temperature_Secondary_CT)], else_=CalcsTBL.Temperature_CT)) ZD=case([(ObsTBL.Depth!=None,ObsTBL.Depth)], else_= CalcsTBL.Z) # In[3]: qry=session.query(StationTBL.sourceFile,ObsTBL.sourceFile).\ select_from(StationTBL).join(ObsTBL,ObsTBL.StationTBLID==StationTBL.ID).\ join(CalcsTBL,CalcsTBL.ObsTBLID==ObsTBL.ID).filter(and_(StationTBL.Lat>47-3/2.5*(StationTBL.Lon+123.5), StationTBL.Lat<47-3/2.5*(StationTBL.Lon+121), StationTBL.Include==True,ObsTBL.Include==True,CalcsTBL.Include==True, StationTBL.StartYear==2015,StationTBL.StartMonth==2,CT>11)) # In[4]: for row in qry.distinct(): print(row) add to processDFO_CTD.py in case zip is re-extracted and file reverts to original: fnameErr='req20181116/EO UBC November 16, 2018 (2015 data)/2015-29-0002.ctd' if session.query(StationTBL).filter(and_(StationTBL.sourceFile==fnameErr,StationTBL.StartMonth==2)).count(): session0.query(StationTBL0).filter(StationTBL0.sourceFile==fnameErr).update({"StartMonth":8,"StartDay":2}) # In[5]: fnameErr='req20181116/EO UBC November 16, 2018 (2015 data)/2015-29-0002.ctd' for row in session.query(StationTBL.ID,StationTBL.StartMonth).filter(StationTBL.sourceFile==fnameErr).one(): print(row) session.query(StationTBL).filter(StationTBL.sourceFile==fnameErr).update({"StartMonth":8,"StartDay":2}) # In[6]: session.commit() # In[7]: session.close() engine.dispose() # In[ ]: