766 lines
33 KiB
Python
766 lines
33 KiB
Python
#%% Project Setup
|
|
|
|
import os
|
|
|
|
import pandas as pd
|
|
import geopandas as gp
|
|
from scipy.signal import argrelextrema
|
|
import numpy as np
|
|
import math
|
|
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
|
|
import matplotlib.pyplot as plt
|
|
import matplotlib.font_manager as fm
|
|
import matplotlib as mpl
|
|
|
|
import cartopy.crs as ccrs
|
|
import contextily as ctx
|
|
import cmocean.cm as cmo
|
|
from shapely.geometry import Point, LineString
|
|
|
|
from xarray.backends import NetCDF4DataStore
|
|
import xarray as xr
|
|
|
|
from datetime import datetime, timedelta
|
|
from netCDF4 import num2date
|
|
from metpy.units import units
|
|
|
|
import matplotlib.pyplot as plt
|
|
import cartopy.crs as ccrs
|
|
import cartopy.feature as cfeature
|
|
from metpy.plots import ctables
|
|
|
|
from siphon.catalog import TDSCatalog
|
|
|
|
#%% Helper function for finding proper time variable
|
|
|
|
def find_time_var(var, time_basename='time'):
|
|
for coord_name in var.coords:
|
|
if coord_name.startswith(time_basename):
|
|
return var.coords[coord_name]
|
|
raise ValueError('No time variable found for ' + var.name)
|
|
|
|
|
|
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
|
|
importPaths = ['C:/Users/arey/files/Projects/West Coast/Pre_Construction/Great House/',
|
|
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Greensleeves/',
|
|
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Old Queens Fort/',
|
|
'C:/Users/arey/files/Projects/West Coast/Construction/Great House/',
|
|
'C:/Users/arey/files/Projects/West Coast/Construction/Greensleeves/',
|
|
'C:/Users/arey/files/Projects/West Coast/Construction/Old Queens Fort/',
|
|
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Great House/',
|
|
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Greensleeves/',
|
|
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Old Queens Fort/',
|
|
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Great House/',
|
|
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Greensleeves/',
|
|
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Old Queens Fort/']
|
|
|
|
siteNames = ['Great House',
|
|
'Greensleeves',
|
|
'Old Queens Fort',
|
|
'Great House',
|
|
'Greensleeves',
|
|
'Old Queens Fort',
|
|
'Great House',
|
|
'Greensleeves',
|
|
'Old Queens Fort',
|
|
'Great House',
|
|
'Greensleeves',
|
|
'Old Queens Fort']
|
|
|
|
timeLabels= ['Before Construction',
|
|
'Before Construction',
|
|
'Before Construction',
|
|
'During Construction',
|
|
'During Construction',
|
|
'During Construction',
|
|
'After Construction',
|
|
'After Construction',
|
|
'After Construction',
|
|
'Monitoring',
|
|
'Monitoring',
|
|
'Monitoring']
|
|
|
|
wave_bts_file = [
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
|
|
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
|
|
None,
|
|
None,
|
|
None]
|
|
|
|
|
|
|
|
for s in range(9,13):
|
|
## Define master import path
|
|
importPath = importPaths[s]
|
|
siteName = siteNames[s]
|
|
timeLabel = timeLabels[s]
|
|
importFiles = os.listdir(importPath)
|
|
|
|
# Initialize import variables
|
|
RBR_File = None
|
|
JFE_File = None
|
|
GPS_File = None
|
|
|
|
for i in range(0, len(importFiles)):
|
|
if '.xlsx' in importFiles[i] and 'Summary' not in importFiles[i]:
|
|
RBR_File = importFiles[i]
|
|
elif '_A.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
|
|
JFE_File = importFiles[i]
|
|
elif '.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
|
|
GPS_File = importFiles[i]
|
|
|
|
#%% RBR Import Data
|
|
if RBR_File is not None:
|
|
RBR_Obs = pd.read_excel(importPath + RBR_File,
|
|
sheet_name='Data', skiprows=0, header=1)
|
|
|
|
#%% JFE Import Data
|
|
if JFE_File is not None:
|
|
JFE_Obs = pd.read_csv(importPath + JFE_File, skiprows=30)
|
|
|
|
#%% GPS Import Data
|
|
if GPS_File is not None:
|
|
GPS = pd.read_csv(importPath + GPS_File,
|
|
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
|
|
#convert GPS data to geodataframe
|
|
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
|
|
|
|
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
|
|
|
|
GPS_gdf.set_index('DateTime', inplace=True)
|
|
|
|
# Convert to UTM
|
|
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
|
|
|
|
else:
|
|
# Synthesize GPS data for great house
|
|
GPS_gdf = gp.read_file('C:/Users/arey/files/Projects/West Coast/GreatHousePath_R3.shp')
|
|
GPS_gdf['DateTime'] = pd.date_range(pd.to_datetime(RBR_Obs['Time'].iloc[0]),
|
|
pd.to_datetime(RBR_Obs['Time'].iloc[-1]),
|
|
periods=len(GPS_gdf)).values
|
|
GPS_gdf.set_index('DateTime', inplace=True)
|
|
|
|
#%% Read in site shapefile
|
|
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
|
|
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
|
|
|
|
|
|
#%% Merge GPS to RBR
|
|
# Process RBR into datetime
|
|
if RBR_File is not None:
|
|
RBR_Obs['DateTime'] = pd.to_datetime(RBR_Obs['Time'])
|
|
RBR_Obs.set_index('DateTime', inplace=True)
|
|
|
|
# Merge with GPS as dataframe
|
|
RBR_Obs_geo = pd.merge_asof(RBR_Obs, GPS_gdf,
|
|
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
|
|
RBR_Obs_geo = gp.GeoDataFrame(RBR_Obs_geo, geometry=RBR_Obs_geo.geometry, crs="EPSG:32621")
|
|
|
|
#%% Merge GPS to JFE
|
|
if JFE_File is not None:
|
|
# Process JFE into datetime
|
|
JFE_Obs['DateTime'] = pd.to_datetime(JFE_Obs['Date'])
|
|
JFE_Obs.set_index('DateTime', inplace=True)
|
|
|
|
# Merge with GPS as dataframe
|
|
JFE_Obs_geo = pd.merge_asof(JFE_Obs, GPS_gdf, left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('60s'))
|
|
JFE_Obs_geo = gp.GeoDataFrame(JFE_Obs_geo, geometry=JFE_Obs_geo.geometry, crs="EPSG:32621")
|
|
|
|
#%% Find and setup key plotting details
|
|
|
|
# Shaded Water
|
|
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
|
|
# Sat water
|
|
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
|
|
|
|
if siteName == 'Great House':
|
|
axXlim = (213210.7529575412, 213562.64172686986)
|
|
axYlim = (1464769.2243017585, 1465135.2219089477)
|
|
GFS_Lon = -59.6441
|
|
GFS_Lat = 13.2372
|
|
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[2, 1])
|
|
elif siteName == 'Greensleeves':
|
|
axXlim = (213269.99233348924, 213648.1643157148)
|
|
# axYlim = (1463378.1020314451, 1463843.5442048472)
|
|
axYlim = (1463378.1020314451, 1463950.5442048472)
|
|
GFS_Lon = -59.6428
|
|
GFS_Lat = 13.2289
|
|
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[1, 1])
|
|
elif siteName == 'Old Queens Fort':
|
|
axXlim = (213368.59866770002, 213745.6997016811)
|
|
axYlim = (1460192.707288096, 1460672.371780407)
|
|
GFS_Lon = -59.6419
|
|
GFS_Lat = 13.1960
|
|
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[0, 1])
|
|
|
|
|
|
# Set min and max times using conductivity
|
|
# if JFE_File is None:
|
|
if RBR_Obs_geo['inArea'].any():
|
|
# First and last times from area in shapefile
|
|
minTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[0, 0]
|
|
maxTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[-1, 0]
|
|
else:
|
|
# First and last times if no GPS data
|
|
minTime = RBR_Obs_geo.iloc[20, 0]
|
|
maxTime = RBR_Obs_geo.iloc[-20, 0]
|
|
# else:
|
|
# minTime = (RBR_Obs['Conductivity ']>5).idxmax()
|
|
# minTime = minTime + timedelta(seconds=30)
|
|
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
|
|
# maxTime = maxTime - timedelta(seconds=30)
|
|
# obsPeriod = maxTime-minTime
|
|
#
|
|
# if (obsPeriod.seconds<180) | (maxTime<minTime):
|
|
# minTime = ((RBR_Obs['Conductivity ']>5) & (RBR_Obs['Time']>(minTime+timedelta(seconds=180)))).idxmax()
|
|
# minTime = minTime + timedelta(seconds=30)
|
|
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
|
|
# maxTime = maxTime - timedelta(seconds=30)
|
|
|
|
metDate = minTime - timedelta(
|
|
hours = minTime.hour % 6,
|
|
minutes=minTime.minute,
|
|
seconds=minTime.second,
|
|
microseconds=minTime.microsecond)
|
|
|
|
|
|
#%% GFS Met Import
|
|
var = ['Temperature_surface', 'Wind_speed_gust_surface',
|
|
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
|
|
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
|
|
|
|
temp_1d = []
|
|
gust_1d = []
|
|
wndu_1d = []
|
|
wndv_1d = []
|
|
prep_1d = []
|
|
time_1d = []
|
|
|
|
# Set times to download
|
|
startdate = metDate - timedelta(hours=18)
|
|
enddate = metDate + timedelta(hours=6)
|
|
date_list = pd.date_range(startdate, enddate, freq='6H')
|
|
|
|
# Loop through dates
|
|
for date in date_list:
|
|
# Base URL for 0.5 degree GFS data
|
|
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
|
|
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
|
|
|
|
# Generate URLs for specific grib file
|
|
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
|
|
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
|
|
|
|
# Format the query parameters
|
|
ncss = best_ds.subset()
|
|
query = ncss.query()
|
|
|
|
ncss_precp = best_ds_precp.subset()
|
|
query_precp = ncss_precp.query()
|
|
|
|
# Extract data from specific point
|
|
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
|
|
query.accept('netcdf')
|
|
query.variables(var[0], var[1], var[2], var[3])
|
|
query.vertical_level(10)
|
|
|
|
data = ncss.get_data(query)
|
|
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
|
|
|
|
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
|
|
query_precp.accept('netcdf')
|
|
query_precp.variables(var_precp[0])
|
|
|
|
|
|
data_precp = ncss_precp.get_data(query_precp)
|
|
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
|
|
|
|
temp_3d = data[var[0]]
|
|
gust_3d = data[var[1]]
|
|
wndu_3d = data[var[2]]
|
|
wndv_3d = data[var[3]]
|
|
prep_3d = data_precp[var_precp[0]]
|
|
|
|
# Read the individual point (with units) and append to the list
|
|
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
|
|
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
|
|
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
|
|
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
|
|
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
|
|
time_1d.append(find_time_var(temp_3d))
|
|
|
|
#%% Process Met Data
|
|
# 24h Precipitation Total
|
|
# Time weighted average of last two points for everything else
|
|
|
|
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
|
|
|
|
timeWeight1 = minTime-metDate
|
|
timeWeight2 = (metDate+timedelta(hours=6))-minTime
|
|
|
|
timeWeight1 = timeWeight1.seconds/21600
|
|
timeWeight2 = timeWeight2.seconds/21600
|
|
|
|
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
|
|
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
|
|
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
|
|
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
|
|
met_wdir = math.degrees(math.atan2(
|
|
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
|
|
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
|
|
|
|
#%% Read in wave conditions from BTS file
|
|
if wave_bts_file[s] is not None:
|
|
if siteName == 'Great House':
|
|
wave_bts = pd.read_csv(wave_bts_file[s],
|
|
names=['date', 'time', 'HM0', 'TP', 'TM', 'MWD', 'DPK', 'HSWL', 'TSWL', 'DPSWL', 'HSEA', 'TSEA', 'DPSEA'],
|
|
header=0, skiprows=22, delim_whitespace=True)
|
|
wave_bts['datetime'] = pd.to_datetime(wave_bts['date'] + ' ' + wave_bts['time'])
|
|
wave_bts.set_index('datetime', inplace=True)
|
|
|
|
|
|
met_hmo = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].HM0
|
|
met_tp = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].TP
|
|
met_mwd = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].MWD
|
|
else:
|
|
met_hmo = -999
|
|
met_tp = -999
|
|
met_mwd = -999
|
|
|
|
|
|
#%% Plot time series for Geo data
|
|
fontprops = fm.FontProperties(size=25)
|
|
|
|
if JFE_File is None:
|
|
fig, axesTMP = plt.subplots(nrows=1, ncols=1, figsize=(19, 5), constrained_layout=True)
|
|
|
|
RBRparam = ['Turbidity ']
|
|
RBRparamName = ['Turbidity [NTU]']
|
|
RBRparmCmap = [cmo.turbid]
|
|
RBRparamMin = [0.0]
|
|
RBRparamMax = [60.0]
|
|
|
|
dataTable = np.zeros([9, 4])
|
|
roundIDX = [1, 1, 0, 1, 1, 1, 1, 1]
|
|
axes = []
|
|
axes.append(axesTMP)
|
|
else:
|
|
fig, axes = plt.subplots(nrows=7, ncols=1, figsize=(19, 25), constrained_layout=True)
|
|
dataTable = np.zeros([15, 4])
|
|
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
|
|
|
|
RBRparam = ['Depth ', 'Salinity ', 'Dissolved O₂ saturation ', 'Temperature ']
|
|
RBRparamName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]']
|
|
RBRparmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal]
|
|
RBRparamMin = [0.0, 34.0, 32.5, 29.0]
|
|
RBRparamMax = [1.0, 36.0, 34.0, 31.0]
|
|
|
|
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]', 'Chl-a[ug/l]']
|
|
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]']
|
|
JFEparamMin = [0.0, 0.0, 0.0]
|
|
JFEparamMax = [20.0, 1.0, 1.0]
|
|
|
|
|
|
fig.patch.set_facecolor('white')
|
|
# fig.tight_layout(pad=1.05)
|
|
|
|
fontprops = fm.FontProperties(size=25)
|
|
|
|
dataTable[0, 0] = met_temp.m.item(0)-272.15
|
|
dataTable[1, 0] = met_wind
|
|
dataTable[2, 0] = met_wdir
|
|
dataTable[3, 0] = met_prep.m.item(0)
|
|
dataTable[4, 0] = met_hmo
|
|
dataTable[5, 0] = met_tp
|
|
dataTable[6, 0] = met_mwd
|
|
|
|
ilocs_max = []
|
|
ilocs_max_pts = []
|
|
RBR_mask = []
|
|
JFE_mask = []
|
|
|
|
for paramIDX, param in enumerate(RBRparam):
|
|
RBR_Obs_geo.loc[minTime:maxTime, param].plot(
|
|
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
|
|
|
|
# Create mask for RBR data based on time and parameter minimum
|
|
RBR_mask.append(((RBR_Obs_geo.index>minTime) &
|
|
(RBR_Obs_geo.index<maxTime) &
|
|
(RBR_Obs_geo[param]>RBRparamMin[paramIDX])))
|
|
|
|
RBR_smoothed = RBR_Obs_geo.loc[RBR_mask[paramIDX], param].rolling(
|
|
60, win_type='nuttall',center=True).mean()
|
|
|
|
RBR_smoothed.plot(
|
|
ax=axes[paramIDX], label='1 Minute Average', color='black',
|
|
linewidth=3)
|
|
|
|
# Find the local maximums for Turbidity
|
|
if param == 'Turbidity ':
|
|
ilocs_max.append(argrelextrema(RBR_smoothed.values,
|
|
np.greater_equal, order=40, mode='wrap')[0])
|
|
|
|
# Add start and end points?
|
|
# ilocs_max = np.insert(ilocs_max, 0, 10)
|
|
# ilocs_max[-1] = len(RBR_smoothed.values)-10
|
|
ilocs_max_pts.append(RBR_smoothed.iloc[ilocs_max[paramIDX]].index.values)
|
|
|
|
# Add labels if GPS data is available
|
|
if GPS_File is not None:
|
|
axes[paramIDX].scatter(RBR_smoothed.iloc[
|
|
ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
|
|
color='blue')
|
|
for a in range(0, len(ilocs_max[paramIDX])):
|
|
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 30), fontsize=30)
|
|
else:
|
|
ilocs_max.append(None)
|
|
ilocs_max_pts.append(None)
|
|
|
|
# dataTable[paramIDX+7, 0] = RBR_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
|
|
# dataTable[paramIDX+7, 1] = RBR_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
|
|
# dataTable[paramIDX+7, 2] = max(RBR_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
|
|
# dataTable[paramIDX+7, 3] = RBR_Obs_geo.loc[minTime:maxTime, param].max(skipna=True)
|
|
dataTable[paramIDX+7, 0] = RBR_smoothed.mean(skipna=True)
|
|
dataTable[paramIDX+7, 1] = RBR_smoothed.std(skipna=True)
|
|
dataTable[paramIDX+7, 2] = max(RBR_smoothed.min(skipna=True), 0)
|
|
dataTable[paramIDX+7, 3] = RBR_smoothed.max(skipna=True)
|
|
|
|
axes[paramIDX].set_ylabel(RBRparamName[paramIDX])
|
|
axes[paramIDX].set_title('RBR: ' + RBRparamName[paramIDX])
|
|
axes[paramIDX].set_xlabel('')
|
|
axes[paramIDX].set_ylim(RBRparamMin[paramIDX], RBRparamMax[paramIDX])
|
|
axes[paramIDX].legend(loc='upper right')
|
|
|
|
if JFE_File is not None:
|
|
for paramIDX, param in enumerate(JFEparam):
|
|
JFE_Obs_geo.loc[minTime:maxTime, param].plot(
|
|
ax=axes[paramIDX+4], label='15 Second Observations', color='lightgrey')
|
|
|
|
JFE_mask.append(((JFE_Obs_geo.index > minTime) &
|
|
(JFE_Obs_geo.index < maxTime) &
|
|
(JFE_Obs_geo[param] > JFEparamMin[paramIDX])))
|
|
|
|
JFE_smoothed = JFE_Obs_geo.loc[JFE_mask[paramIDX], param].rolling(
|
|
20, win_type='nuttall', center=True).mean()
|
|
|
|
JFE_smoothed.plot(
|
|
ax=axes[paramIDX+4], label='1 Minute Average', color='black',
|
|
linewidth=3)
|
|
|
|
|
|
# Find the local maximums for Turbidity
|
|
if param == 'Turb. -M[FTU]':
|
|
ilocs_max.append(argrelextrema(JFE_smoothed.values,
|
|
np.greater_equal, order=60, mode='wrap')[0])
|
|
|
|
# Add start and end points?
|
|
# ilocs_max = np.insert(ilocs_max, 0, 10)
|
|
# ilocs_max[-1] = len(RBR_smoothed.values)-10
|
|
ilocs_max_pts.append(JFE_smoothed.iloc[ilocs_max[paramIDX+4]].index.values)
|
|
|
|
# Add labels if GPS data is available
|
|
if GPS_File is not None:
|
|
axes[paramIDX+4].scatter(JFE_smoothed.iloc[
|
|
ilocs_max[paramIDX+4]].index, np.ones(len(ilocs_max[paramIDX+4])) * 10, 75,
|
|
color='blue')
|
|
for a in range(0, len(ilocs_max[paramIDX+4])):
|
|
axes[paramIDX+4].annotate(str(a + 1), (ilocs_max_pts[paramIDX+4][a], 10), fontsize=30)
|
|
else:
|
|
ilocs_max.append(None)
|
|
ilocs_max_pts.append(None)
|
|
|
|
# dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
|
|
# dataTable[paramIDX+4+7, 1] = JFE_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
|
|
# dataTable[paramIDX+4+7, 2] = max(JFE_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
|
|
# dataTable[paramIDX+4+7, 3] = JFE_Obs_geo.loc[minTime:maxTime, param].max(skipna=True) dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
|
|
|
|
dataTable[paramIDX+4+7, 0] = JFE_smoothed.mean(skipna=True)
|
|
dataTable[paramIDX+4+7, 1] = JFE_smoothed.std(skipna=True)
|
|
dataTable[paramIDX+4+7, 2] = max(JFE_smoothed.min(skipna=True), 0)
|
|
dataTable[paramIDX+4+7, 3] = JFE_smoothed.max(skipna=True)
|
|
|
|
axes[paramIDX+4].set_ylabel(JFEparamName[paramIDX])
|
|
axes[paramIDX+4].set_title('JFE: ' + JFEparamName[paramIDX])
|
|
axes[paramIDX+4].set_xlabel('')
|
|
|
|
axes[paramIDX+4].set_ylim(JFEparamMin[paramIDX], JFEparamMax[paramIDX])
|
|
axes[paramIDX+4].legend(loc='upper right')
|
|
|
|
axes[paramIDX+4].set_xlabel(minTime.strftime('%B %#d, %Y'))
|
|
else:
|
|
axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
|
|
|
|
# Formate Data Table
|
|
dataTableFormat_mean = []
|
|
dataTableFormat_maxmin = []
|
|
for d in range(0, len(roundIDX)):
|
|
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
|
|
if dataTable[d, 3] == 0:
|
|
dataTableFormat_maxmin.append('--')
|
|
else:
|
|
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
|
|
|
|
|
|
dfOut = pd.DataFrame(dataTable[:, :])
|
|
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
|
|
|
|
# Change the column names
|
|
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
|
|
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
|
|
np.array(['Mean', 'Min / Max'])]
|
|
# Change the row indexes
|
|
if JFE_File is not None:
|
|
dfOut.iloc[14, 0] = minTime.strftime('%B %#d, %Y')
|
|
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
|
|
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
|
|
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]', 'Observation Date']
|
|
dfOut.index = rowNames
|
|
dfOutFormat.index = rowNames[0:-1]
|
|
else:
|
|
dfOut.iloc[8, 0] = minTime.strftime('%B %#d, %Y')
|
|
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
|
|
'Mean Wave Direction Offshore [deg]',
|
|
'Turbidity [NTU]', 'Observation Date']
|
|
dfOut.index = rowNames
|
|
dfOutFormat.index = rowNames[0:-1]
|
|
|
|
|
|
fig.suptitle(siteName + ', ' +minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
|
|
|
|
plt.show()
|
|
dfOut.to_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
|
|
dfOutFormat.to_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
|
|
|
|
dfOut.to_csv(importPath + '/Summary_Stats_' + siteName + '.csv')
|
|
|
|
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|
|
|
|
|
|
#%% Plot Maps
|
|
if JFE_File is None:
|
|
# Only Turbidity Data
|
|
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
|
|
ax = []
|
|
ax.append(axes)
|
|
else:
|
|
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
|
|
ax = axes.flat
|
|
|
|
fig.patch.set_facecolor('white')
|
|
# fig.tight_layout(pad=1.05)
|
|
|
|
fontprops = fm.FontProperties(size=25)
|
|
x, y, arrow_length = 0.95, 0.93, 0.20
|
|
plt.rcParams.update({'font.size': 22})
|
|
|
|
axXlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
|
|
RBR_Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
|
|
axYlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
|
|
RBR_Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
|
|
|
|
plt.setp(axes, xlim=axXlim, ylim=axYlim)
|
|
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
|
|
|
|
# Plot the RBR observations
|
|
# Salinity
|
|
for paramIDX, param in enumerate(RBRparam):
|
|
if RBR_File is not None:
|
|
|
|
# Draw thick black line to show approx path
|
|
if GPS_File is None:
|
|
ax[paramIDX].scatter(RBR_Obs_geo.loc[minTime:maxTime].geometry.x,
|
|
RBR_Obs_geo.loc[minTime:maxTime].geometry.y,
|
|
150, marker='o', color='black', label='Approximate Path')
|
|
plt.legend(loc='upper left')
|
|
|
|
RBR_Obs_geo.loc[minTime:maxTime].plot(
|
|
column=param, ax=ax[paramIDX], vmin=RBRparamMin[paramIDX], vmax=RBRparamMax[paramIDX],
|
|
legend=True, legend_kwds={'label': RBRparamName[paramIDX]},
|
|
cmap=RBRparmCmap[paramIDX], markersize=20)
|
|
|
|
|
|
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
|
|
|
|
# Add time labels
|
|
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
|
|
# ilocs_max, :].geometry.x,
|
|
# RBR_Obs_geo.loc[RBR_mask].iloc[
|
|
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
|
|
|
|
if (not RBR_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turbidity '):
|
|
for a in range(0, len(ilocs_max[paramIDX])):
|
|
ax[paramIDX].annotate(str(a + 1), (RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX][a], :].geometry.x,
|
|
RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
|
|
|
|
ax[paramIDX].set_title(RBRparamName[paramIDX])
|
|
# ax[paramIDX].set_ylabel('UTM 21N [m]')
|
|
# ax[paramIDX].set_xlabel('UTM 21N [m]')
|
|
ax[paramIDX].locator_params(axis='y', nbins=3)
|
|
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
|
|
|
|
ax[paramIDX].get_xaxis().set_ticks([])
|
|
ax[paramIDX].get_yaxis().set_ticks([])
|
|
|
|
#Add scale-bar
|
|
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
|
|
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
|
|
ax[paramIDX].add_artist(scalebar)
|
|
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
|
|
arrowprops=dict(facecolor='black', width=6, headwidth=30),
|
|
ha='center', va='center', fontsize=35,
|
|
xycoords=ax[paramIDX].transAxes)
|
|
|
|
# Plot Plot JFE Points
|
|
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]']
|
|
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]']
|
|
JFEparamCmp = [cmo.turbid, cmo.algae]
|
|
JFEparamMin = [0.0, 0.0]
|
|
JFEparamMax = [10.0, 1.0]
|
|
if JFE_File is not None:
|
|
for paramIDX, param in enumerate(JFEparam):
|
|
if JFE_File is not None:
|
|
JFE_Obs_geo.loc[minTime:maxTime].plot(
|
|
column=param, ax=ax[paramIDX+4], vmin=JFEparamMin[paramIDX], vmax=JFEparamMax[paramIDX],
|
|
legend=True, legend_kwds={'label': JFEparamName[paramIDX]},
|
|
cmap=JFEparamCmp[paramIDX], markersize=20) # Note the space in the col name
|
|
ctx.add_basemap(ax[paramIDX+4], source=mapbox, crs='EPSG:32621')
|
|
|
|
# Add time labels
|
|
if (not JFE_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turb. -M[FTU]'):
|
|
for a in range(0, len(ilocs_max[paramIDX+4])):
|
|
ax[paramIDX+4].annotate(str(a + 1), (JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX+4][a], :].geometry.x,
|
|
JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX+4][a], :].geometry.y), fontsize=30)
|
|
|
|
ax[paramIDX+4].set_title(JFEparamName[paramIDX])
|
|
# ax[paramIDX+4].set_ylabel('UTM 21N [m]')
|
|
# ax[paramIDX+4].set_xlabel('UTM 21N [m]')
|
|
ax[paramIDX+4].locator_params(axis='y', nbins=3)
|
|
ax[paramIDX+4].ticklabel_format(useOffset=False, style='plain', axis='both')
|
|
ax[paramIDX+4].get_xaxis().set_ticks([])
|
|
ax[paramIDX+4].get_yaxis().set_ticks([])
|
|
|
|
#Add scale-bar
|
|
scalebar = AnchoredSizeBar(ax[paramIDX+4].transData,
|
|
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
|
|
ax[paramIDX+4].add_artist(scalebar)
|
|
ax[paramIDX+4].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
|
|
arrowprops=dict(facecolor='black', width=6, headwidth=30),
|
|
ha='center', va='center', fontsize=25,
|
|
xycoords=ax[paramIDX+4].transAxes)
|
|
|
|
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
|
|
plt.show()
|
|
|
|
if not os.path.exists(importPath + '/Figures'):
|
|
os.mkdir(importPath + '/Figures')
|
|
|
|
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
|
|
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|
|
|
|
#%% Summary Sheet
|
|
plotIDXsLoop = []
|
|
plotIDXsLoop.append([0, 3, 6, 9])
|
|
plotIDXsLoop.append([1, 4, 7, 10])
|
|
plotIDXsLoop.append([2, 5, 8, 11])
|
|
|
|
for i in range(0, 3):
|
|
summTable = None
|
|
plotIDXs = plotIDXsLoop[i]
|
|
|
|
for s, plotIDX in enumerate(plotIDXs):
|
|
## Define master import path
|
|
importPath = importPaths[plotIDX]
|
|
siteName = siteNames[plotIDX]
|
|
|
|
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0,1], index_col=0)
|
|
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
|
|
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
|
|
|
|
if s == 0:
|
|
summTable = obsStatsIN
|
|
else:
|
|
summTable = summTable.join(obsStatsIN)
|
|
|
|
# Remove -999 with nan
|
|
summTable.replace(-999, np.nan, inplace=True)
|
|
|
|
summTable.to_excel('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/Summary_StatsMerge_' + siteName + '.xlsx')
|
|
|
|
|
|
#%% Summary Plot
|
|
|
|
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height [m]', 'Salinity [PSU]',
|
|
'Dissolved O₂ [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-Flu. [ppb]']
|
|
|
|
|
|
plotIDXsLoop = []
|
|
plotIDXsLoop.append([0, 3, 6, 9])
|
|
plotIDXsLoop.append([1, 4, 7, 10])
|
|
plotIDXsLoop.append([2, 5, 8, 11])
|
|
|
|
for i in range(0, 3):
|
|
summTable = None
|
|
plotIDXs = plotIDXsLoop[i]
|
|
plotDates = []
|
|
plotTable = np.empty([10, len(plotIDXs)])
|
|
|
|
for s, plotIDX in enumerate(plotIDXs):
|
|
## Define master import path
|
|
importPath = importPaths[plotIDX]
|
|
siteName = siteNames[plotIDX]
|
|
|
|
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
|
|
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
|
|
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
|
|
plotTable[8, s] = obsStatsIN.iloc[7, 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
|
|
else:
|
|
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
|
|
|
|
|
|
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
|
|
fig.patch.set_facecolor('white')
|
|
fig.tight_layout(pad=3)
|
|
ax = axes.flat
|
|
|
|
# Repalce zero with nan
|
|
plotTable[plotTable < 0.00001] = np.nan
|
|
|
|
for v in range(0, 10):
|
|
ax[v].scatter(plotDates, plotTable[v, :], 250)
|
|
ax[v].set_ylabel(plotvars[v])
|
|
|
|
fig.suptitle(siteName, fontsize=35)
|
|
plt.gcf().autofmt_xdate()
|
|
plt.gcf().align_ylabels()
|
|
plt.show()
|
|
|
|
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|