AJMR-Python-Baird/Mustique/WestCoastDataTemplate_V5.py

1084 lines
44 KiB
Python

#%% Project Setup
import os
import pandas as pd
import geopandas as gp
from scipy.signal import argrelextrema
import numpy as np
import math
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import matplotlib.dates as mdates
import matplotlib as mpl
import cartopy.crs as ccrs
import contextily as ctx
import cmocean.cm as cmo
from shapely.geometry import Point, LineString
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from metpy.plots import ctables
from siphon.catalog import TDSCatalog
import gsw as gsw
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
importPaths = ['//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211006 WQ Pre_Construction/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211006 WQ Pre_Construction/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211006 WQ Pre_Construction/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211021 WQ During Construction/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211021 WQ During Construction/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211021 WQ During Construction/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211104 WQ Post Construction Monitoring/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211104 WQ Post Construction Monitoring/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211104 WQ Post Construction Monitoring/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211125 WQ Post Construction Monitoring/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211125 WQ Post Construction Monitoring/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211125 WQ Post Construction Monitoring/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220208 WQ February/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220208 WQ February/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220208 WQ February/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220318 WQ sampling by LNW with WiMo/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220318 WQ sampling by LNW with WiMo/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220318 WQ sampling by LNW with WiMo/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220414 WQ sampling/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220414 WQ sampling/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220414 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220530 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220530 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220709 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220709 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220812 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220812 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221127 WQ Sampling/Crane',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Crane',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Caribee',
'//srv-ott3.baird.com/Projects/13711.101 Caribee (Indigo) Hotel, Barbados/03_Data/03_Field/20220613_EnvData']
siteNames = ['Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort',
'Crane',
'Great House',
None,
'Old Queens Fort',
'Crane',
'Caribee',
'Caribee']
timeLabels= ['Before Construction',
'Before Construction',
'Before Construction',
'During Construction',
'During Construction',
'During Construction',
'After Construction',
'After Construction',
'After Construction',
'Monitoring',
'Monitoring',
'Monitoring',
'February',
'February',
'February',
'March',
'March',
'March',
'April',
'April',
'April',
'May',
None,
'May',
'July',
None,
'July',
'August',
None,
'August',
'November',
None,
'November',
'November',
'February',
None,
'February',
'February',
'February',
'June 2022']
wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None]
# %% Read in site shapefile
siteShp = gp.read_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/SitePolygons_Crane_Caribee.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
# Switch to save figures/ sheets
figureSave = True
for s in range(22, 40): #range(15, 27):
## Define master import path
importPath = importPaths[s]
siteName = siteNames[s]
timeLabel = timeLabels[s]
importFiles = os.listdir(importPath)
# Skip if no importPath
if importPath is None:
print('Skipping site ' + str(s + 1))
continue
# Monitor progress
print('Processing site ' + str(s+1) + ' of ' + str(len(importPaths)) + ' (' + siteName + '_' + timeLabel + ')')
# Initialize import variables
OBS_File = None
GPS_File = None
RBR_File = None
JFE_File = None
for i in range(0, len(importFiles)):
if '_A.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
JFE_File = importFiles[i]
elif '.csv' in importFiles[i] and 'Summary' not in importFiles[i] and 'export' not in importFiles[i]:
OBS_File = importFiles[i]
elif '.csv' in importFiles[i] and 'export' in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'CSV'
elif '.xlsx' in importFiles[i] and 'Summary' not in importFiles[i]:
RBR_File = importFiles[i]
elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'xls'
#%% RBR Import Data
if RBR_File is not None:
RBR_Obs = pd.read_excel(importPath + '/' + RBR_File,
sheet_name='Data', skiprows=0, header=1)
RBR_Obs['DateTime'] = pd.to_datetime(RBR_Obs['Time'])
RBR_Obs.set_index('DateTime', inplace=True)
#%% JFE Import Data
if JFE_File is not None:
JFE_Obs = pd.read_csv(importPath + '/' + JFE_File, skiprows=30)
JFE_Obs['DateTime'] = pd.to_datetime(JFE_Obs['Date'])
# Add seconds to time based on mod 60 of index
JFE_Obs['DateTime'] = JFE_Obs['DateTime'] + pd.to_timedelta(JFE_Obs.index % 60, unit='s')
JFE_Obs.set_index('DateTime', inplace=True)
#%% Obs Import Data
if OBS_File is not None:
Obs_dat = pd.read_csv(importPath + '/' + OBS_File, skiprows=0, header=0)
# Drop rows with metadata
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
if s < 15 or s >= 30:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
else:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
# Set Index
Obs_dat.set_index('DateTime', inplace=True)
else:
# Merge RBR and JFS obs into one dataframe
if JFE_File is not None:
Obs_dat = pd.merge(RBR_Obs, JFE_Obs, how='outer', on='DateTime')
else:
Obs_dat = RBR_Obs
# Sort index by DateTime
Obs_dat.sort_index(inplace=True)
# Rename columns to match Obs_dat
Obs_dat = Obs_dat.rename(columns={'Pressure ': 'CH0:Pressure(dbar)', 'Temperature ': 'CH1:Temperature(degC)',
'Chl-a[ug/l]': 'CH2:Chlorophyll_a(ppb)', 'Conductivity ': 'CH3:Conductivity(mS/cm)',
'Temp.[deg C]': 'CH4:Temperature(degC)', 'Turb. -M[FTU]': 'CH6:Turbidity(NTU)',
'Dissolved O₂ saturation ': 'CH8:Oxygen_Saturation(%)'})
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
Obs_dat.index = Obs_dat.index.tz_localize('UTC')
#%% GPS Import Data
if GPS_File is not None:
if GPS_Type == 'CSV':
GPS = pd.read_csv(importPath + '/' + GPS_File,
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
if s >= 30:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date1'].astype(str) + ' ' + GPS_gdf['Time1'].astype(str))
else:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
elif GPS_Type == 'xls':
GPS = pd.read_excel(importPath + '/' + GPS_File, header=0)
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC')
else:
# Synthesize GPS data for great house
GPS_gdf = gp.read_file('C:/Users/arey/files/Projects/West Coast/GreatHousePath_R3.shp')
GPS_gdf['DateTime'] = pd.date_range(pd.to_datetime(RBR_Obs['Time'].iloc[0]),
pd.to_datetime(RBR_Obs['Time'].iloc[-1]),
periods=len(GPS_gdf)).values
# Set Datetime as index
GPS_gdf.set_index('DateTime', inplace=True)
# Sort by time
GPS_gdf.sort_index(inplace=True)
# Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
#%% Merge Obs to RBR
# Process Obs into datetime
# Merge with GPS as dataframe
Obs_geo = pd.merge_asof(Obs_dat, GPS_gdf,
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
Obs_geo = gp.GeoDataFrame(Obs_geo, geometry=Obs_geo.geometry, crs="EPSG:32621")
#%% Find and setup key plotting details
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
# Sat water
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
if siteName == 'Great House':
axXlim = (213210.7529575412, 213562.64172686986)
axYlim = (1464769.2243017585, 1465135.2219089477)
GFS_Lon = -59.6441
GFS_Lat = 13.2372
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[2, 1])
elif siteName == 'Greensleeves':
axXlim = (213269.99233348924, 213648.1643157148)
# axYlim = (1463378.1020314451, 1463843.5442048472)
axYlim = (1463378.1020314451, 1463950.5442048472)
GFS_Lon = -59.6428
GFS_Lat = 13.2289
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[1, 1])
elif siteName == 'Old Queens Fort':
axXlim = (213368.59866770002, 213745.6997016811)
axYlim = (1460192.707288096, 1460672.371780407)
GFS_Lon = -59.6419
GFS_Lat = 13.1960
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[0, 1])
elif siteName == 'Crane':
axXlim = (234835, 235507)
axYlim = (1449966, 1450580)
GFS_Lon = -59.442
GFS_Lat = 13.1075
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[3, 1])
elif siteName == 'Caribee':
axXlim = (217929, 218345)
axYlim = (1446528, 1446818)
GFS_Lon = -59.442
GFS_Lat = 13.1075
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[4, 1])
# Set min and max times using shape
if Obs_geo['inArea'].any():
# First and last times from area in shapefile
minTime = Obs_geo[Obs_geo['inArea']==True].index[0]
maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
else:
# First and last times if no GPS data
minTime = Obs_geo.index[20]
maxTime = Obs_geo.index[-20]
metDate = minTime - timedelta(
hours = minTime.hour % 6,
minutes=minTime.minute,
seconds=minTime.second,
microseconds=minTime.microsecond)
#%% GFS Met Import
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
startdate = metDate - timedelta(hours=18)
enddate = metDate + timedelta(hours=6)
# startdate = metDate - timedelta(hours=36)
# enddate = metDate + timedelta(hours=-12)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Loop through variables and request
data = dict()
for v in var:
# Setup query
query = ncss.query()
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(v)
# IF there are multiple vertical levels, select the desired one
if v == 'u-component_of_wind_height_above_ground' or v == 'v-component_of_wind_height_above_ground':
query.vertical_level(10)
dataIN = ncss.get_data(query)
data[v] = xr.open_dataset(NetCDF4DataStore(dataIN), drop_variables='height_above_ground4')
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
dataIN = ncss_precp.get_data(query_precp)
data_precp = dict()
data_precp[var_precp[0]] = xr.open_dataset(NetCDF4DataStore(dataIN))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.quantify().Temperature_surface.data.squeeze())
gust_1d.append(gust_3d.metpy.quantify().Wind_speed_gust_surface.data.squeeze())
wndu_1d.append(wndu_3d.metpy.quantify()['u-component_of_wind_height_above_ground'].data.squeeze())
wndv_1d.append(wndv_3d.metpy.quantify()['v-component_of_wind_height_above_ground'].data.squeeze())
prep_1d.append(prep_3d.metpy.quantify()['Total_precipitation_surface_6_Hour_Accumulation'].data.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Process Met Data
# 24h Precipitation Total
# Time weighted average of last two points for everything else
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
timeWeight1 = minTime-metDate
timeWeight2 = (metDate+timedelta(hours=6))-minTime
timeWeight1 = timeWeight1.seconds/21600
timeWeight2 = timeWeight2.seconds/21600
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
met_wdir = math.degrees(math.atan2(
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
#%% Read in wave conditions from BTS file
if wave_bts_file[s] is not None:
if siteName == 'Great House':
wave_bts = pd.read_csv(wave_bts_file[s],
names=['date', 'time', 'HM0', 'TP', 'TM', 'MWD', 'DPK', 'HSWL', 'TSWL', 'DPSWL', 'HSEA', 'TSEA', 'DPSEA'],
header=0, skiprows=22, delim_whitespace=True)
wave_bts['datetime'] = pd.to_datetime(wave_bts['date'] + ' ' + wave_bts['time'])
wave_bts.set_index('datetime', inplace=True)
met_hmo = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].HM0
met_tp = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].TP
met_mwd = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].MWD
else:
met_hmo = -999
met_tp = -999
met_mwd = -999
#%% Add PSU and depth units if not present
if ('Salinity ' not in Obs_geo.columns) and (JFE_File is None):
Obs_geo['PSU'] = gsw.conversions.SP_from_C(Obs_geo['CH3:Conductivity(mS/cm)'].values,
Obs_geo['CH4:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values)
Obs_geo['Depth'] = (Obs_geo['CH0:Pressure(dbar)'].astype(float)*10000)/(gsw.rho(Obs_geo['PSU'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values) * 9.81)
elif (JFE_File is not None):
# Rename Salinity to PSU
Obs_geo.rename(columns={'Salinity ': 'PSU'}, inplace=True)
# Fix depth name
Obs_geo.rename(columns={'Depth ': 'Depth'}, inplace=True)
else: # Fix turbidity name
Obs_geo.rename(columns={'Turbidity ': 'CH6:Turbidity(NTU)'}, inplace=True)
#%% Save data as geojson
Obs_geo.loc[minTime:maxTime, :].to_file(
importPaths[s] + '/MergedGeoDataFrame.geojson', driver='GeoJSON')
#%% Plot time series for Geo data
if figureSave:
fontprops = fm.FontProperties(size=25)
if (JFE_File is None) and (RBR_File is not None):
fig, axesTMP = plt.subplots(nrows=1, ncols=1, figsize=(19, 5), constrained_layout=True)
axes = []
axes.append(axesTMP)
dataTable = np.zeros([9, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 1, 1]
params = ['CH6:Turbidity(NTU)']
paramName = ['Turbidity [FTU]']
parmCmap = [cmo.turbid]
RBRparamMin = [0.0]
RBRparamMax = [60.0]
paramMin = [0]
paramMax = [60]
order = 12
smoothIDX = [60]
smoothPeriod = '1 Minute Average'
else:
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(19, 25), constrained_layout=True)
dataTable = np.zeros([14, 4])
params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ sat [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]']
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
if s<12: # Original sensor
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [6.0, 36.0, 34.0, 31.0, 20.0, 1.0]
paramMin = [0.0, 32.0, 32.5, 24.0, 0, 0]
paramMax = [2.0, 36.0, 34.0, 34.0, 20.0, 1.0]
order = 12
smoothIDX = [60, 60, 60, 20, 20, 20]
smoothPeriod = '1 Minute Average'
else: # New sensor
paramMin = [0.0, 32.0, 100, 24.0, 0, 0]
paramMax = [6, 36.5, 130, 34.0, 150.0, 12000]
order = 60
smoothIDX = [12, 12, 12, 12, 12, 12]
smoothPeriod = '10 Second Average'
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
dataTable[0, 0] = met_temp.m.item(0)-272.15
dataTable[1, 0] = met_wind
dataTable[2, 0] = met_wdir
dataTable[3, 0] = met_prep.m.item(0)
dataTable[4, 0] = met_hmo
dataTable[5, 0] = met_tp
dataTable[6, 0] = met_mwd
ilocs_max = []
ilocs_max_pts = []
OBS_mask = []
for paramIDX, param in enumerate(params):
Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
# Create mask for RBR data based on time and parameter minimum
OBS_mask.append(((Obs_geo.index>minTime) &
(Obs_geo.index<maxTime) &
(Obs_geo[param]>paramMin[paramIDX])))
OBS_smoothed = Obs_geo.loc[OBS_mask[paramIDX], param].rolling(
12, win_type='nuttall',center=True).mean()
OBS_smoothed.plot(
ax=axes[paramIDX], label=smoothPeriod, color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'CH6:Turbidity(NTU)':
##### Adjust "order" to control the number of turbidity points that are selected
# Find an order that creates between 5-10 points
while (len(argrelextrema(OBS_smoothed.values, np.greater_equal, order=order, mode='wrap')[0]) < 5) or\
(len(argrelextrema(OBS_smoothed.values, np.greater_equal, order=order, mode='wrap')[0]) > 10) or\
(order == 0):
if len(argrelextrema(OBS_smoothed.values, np.greater_equal, order=order, mode='wrap')[0]) < 5:
order = order - 5
elif len(argrelextrema(OBS_smoothed.values, np.greater_equal, order=order, mode='wrap')[0]) > 10:
order = order + 6
if order == 0:
order = 1
ilocs_max.append(argrelextrema(OBS_smoothed.values,
np.greater_equal, order=order, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(OBS_smoothed.values)
ilocs_max_pts.append(OBS_smoothed.iloc[ilocs_max[paramIDX]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
# axes[paramIDX].scatter(OBS_smoothed.iloc[
# ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
# color='blue')
for a in range(0, len(ilocs_max[paramIDX])):
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 12), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
dataTable[paramIDX+7, 0] = OBS_smoothed.mean(skipna=True)
dataTable[paramIDX+7, 1] = OBS_smoothed.std(skipna=True)
dataTable[paramIDX+7, 2] = max(OBS_smoothed.min(skipna=True), 0)
dataTable[paramIDX+7, 3] = OBS_smoothed.max(skipna=True)
axes[paramIDX].set_ylabel(paramName[paramIDX])
axes[paramIDX].set_title(paramName[paramIDX])
axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
axes[paramIDX].legend(loc='upper right')
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
# Formate Data Table
dataTableFormat_mean = []
dataTableFormat_maxmin = []
for d in range(0, len(roundIDX)):
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
if dataTable[d, 3] == 0:
dataTableFormat_maxmin.append('--')
else:
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
dfOut = pd.DataFrame(dataTable[:, :])
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
# Change the column names
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
np.array(['Mean', 'Min / Max'])]
# Change the row indexes
if (JFE_File is None) and (RBR_File is not None):
dfOut.iloc[8, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]', 'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]',
'Turbidity [NTU]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
else:
dfOut.iloc[13, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
fig.suptitle(siteName + ', ' + minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
# outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
outPath = importPaths[s]
if not os.path.exists(outPath):
os.mkdir(outPath)
dfOut.to_excel(outPath + '/Summary_Stats_' + siteName + '_RevB.xlsx')
dfOutFormat.to_excel(outPath + '/Summary_StatsFormat_' + siteName + '_RevB.xlsx')
dfOut.to_csv(outPath + '/Summary_Stats_' + siteName + '_RevB.csv')
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '_RevB.pdf',
bbox_inches='tight')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '_RevB.png',
bbox_inches='tight', dpi=500)
#%% Plot Maps
if figureSave:
if (JFE_File is None) and (RBR_File is not None):
# Only Turbidity Data
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
ax = []
ax.append(axes)
else:
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
ax = axes.flat
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
x, y, arrow_length = 0.95, 0.93, 0.20
plt.rcParams.update({'font.size': 22})
axXlimTT = (Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
axYlimTT = (Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
plt.setp(axes, xlim=axXlim, ylim=axYlim)
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
# Plot the RBR observations
# Salinity
for paramIDX, param in enumerate(params):
Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=paramMin[paramIDX], vmax=paramMax[paramIDX],
legend=True, legend_kwds={'label': paramName[paramIDX]},
cmap=parmCmap[paramIDX], markersize=20)
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
# Add time labels
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.x,
# RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
if (not Obs_geo.geometry.isnull().all()) &\
(GPS_File is not None) & (param == 'CH6:Turbidity(NTU)'):
for a in range(0, len(ilocs_max[paramIDX])):
ax[paramIDX].annotate(str(a + 1), (Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.x,
Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
ax[paramIDX].set_title(paramName[paramIDX])
# ax[paramIDX].set_ylabel('UTM 21N [m]')
# ax[paramIDX].set_xlabel('UTM 21N [m]')
ax[paramIDX].locator_params(axis='y', nbins=3)
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX].get_xaxis().set_ticks([])
ax[paramIDX].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX].add_artist(scalebar)
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes)
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
#outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
outPath = importPaths[s]
if not os.path.exists(outPath):
os.mkdir(outPath)
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryMap_' + timeLabels[s] + '_' + siteName + '_RevB.pdf',
bbox_inches='tight')
fig.savefig(outPath + '/Figures/SummaryMap_' + timeLabels[s] + '_' + siteName + '_RevB.png',
bbox_inches='tight', dpi=500)
#%% Summary Sheet
plotIDXsLoop = []
# plotIDXsLoop.append([0, 3, 6, 9])
# plotIDXsLoop.append([1, 4, 7, 10])
# plotIDXsLoop.append([2, 5, 8, 11])
# plotIDXsLoop.append([np.arange(0, 21*3, 3)])
# plotIDXsLoop.append([np.arange(1, 21*3, 3)])
# plotIDXsLoop.append([np.arange(2, 21*3, 3)])
for i in range(0, 1):
summTable = None
# plotIDXs = plotIDXsLoop[i]
# plotIDXs = np.arange(i, 25, 3)
#plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24, 27, 30, 34] # Great House
# plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32, 36] # Old Queen's Fort
plotIDXs = [1, 4, 7, 10, 13, 16, 19] # Greensleevs
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0, 1], index_col=0)
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
if plotIDX > 11:
obsStatsIN.rename({'Chl-a [ppb]': 'Chl-a [ug/l]'}, inplace=True)
if s == 0:
summTable = obsStatsIN
else:
summTable = summTable.join(obsStatsIN)
# Remove -999 with nan
summTable.replace(-999, np.nan, inplace=True)
summTable.to_excel('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/Summary_StatsMerge_July_' + siteName + '.xlsx')
#%% Summary Plot
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height [m]', 'Salinity [PSU]',
'Dissolved O₂ [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a. [ug/L]']
for i in range(2, 3):
summTable = None
# plotIDXs = np.arange(i, 27, 3)
#plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32]
# plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32, 36] # Old Queen's Fort
plotIDXs = [1, 4, 7, 10, 13, 16, 19] # Greensleevs
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
plotTable[8, s] = obsStatsIN.iloc[7, 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
elif plotIDX<12:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 13], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
else:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[13, 1], '%B %d, %Y'))
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# Repalce zero with nan
plotTable[plotTable < 0.00001] = np.nan
for v in range(0, 10):
ax[v].scatter(plotDates, plotTable[v, :], 250)
ax[v].set_ylabel(plotvars[v])
fig.suptitle(siteName, fontsize=35)
plt.gcf().autofmt_xdate()
plt.gcf().align_ylabels()
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Save tracks for one location to shapefile
mergedLines = []
# Loop through all samples
for s in range(0, len(siteNames)):
siteName = siteNames[s]
if siteName == 'Crane':
importPath = importPaths[s]
# Load in track data
locationGDF = gp.read_file(importPath + '/MergedGeoDataFrame.geojson')
# Convert points to linestring and add to list
mergedLines.append(LineString([[a.x, a.y] for a in locationGDF.geometry.values]))
# Track progress
print(s)
# Convert list of tracks to geodataframe
mergedLines_gdf = gp.GeoDataFrame(geometry=mergedLines, crs="EPSG:32621")
# Save to shapefile
mergedLines_gdf.to_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/Crane_Tracks.shp')
#%% Read GeoJSON file for each site and extract data at two points
# Plot trubidity at each point through time
fig, axes = plt.subplots(1, 2, figsize=(15, 6))
# Define points of interest as geodataframe 5 m from point
meanTurbA = []
meanTurbB = []
meanTime = []
# Set plotting site
# plotSite = 'Great House'
# plotSite = 'Caribee'
# plotSite = 'Greensleeves'
# plotSite = 'Old Queens Fort'
plotSite = 'Crane'
# Mapping parameter
if plotSite == 'Great House':
axXlim = (213210.7529575412, 213562.64172686986)
axYlim = (1464769.2243017585, 1465135.2219089477)
A_pt = [213316.46, 1464972.00]
B_pt = [213329.09, 1464898.30]
elif plotSite == 'Greensleeves':
axXlim = (213269.99233348924, 213648.1643157148)
axYlim = (1463378.1020314451, 1463950.5442048472)
A_pt = [213391.92, 1463659.98]
B_pt = [213518.95, 1463517.72]
elif plotSite == 'Old Queens Fort':
axXlim = (213368.59866770002, 213745.6997016811)
axYlim = (1460192.707288096, 1460672.371780407)
A_pt = [213527.75, 1460354.13]
B_pt = [213510.13, 1460565.55]
elif plotSite == 'Crane':
axXlim = (234835, 235507)
axYlim = (1449966, 1450580)
A_pt = [235287.01, 1450171.33]
B_pt = [235300.81, 1450233.21]
elif plotSite == 'Caribee':
axXlim = (217929, 218345)
axYlim = (1446528, 1446818)
A_pt = [218120.54, 1446683.057]
B_pt = [218250.99, 1446676.20]
# Buffer to 20 m
A_GDF = gp.GeoDataFrame(geometry=gp.points_from_xy([A_pt[0]], [A_pt[1]]), crs="EPSG:32621").buffer(22)
B_GDF = gp.GeoDataFrame(geometry=gp.points_from_xy([B_pt[0]], [B_pt[1]]), crs="EPSG:32621").buffer(22)
# Set Map Bounds
axes[0].set_xlim(axXlim)
axes[0].set_ylim(axYlim)
# Add Base Map
ctx.add_basemap(axes[0], source=mapbox, crs='EPSG:32621')
# Loop through all samples
for s in range(0, len(siteNames)):
siteName = siteNames[s]
if siteName == plotSite:
importPath = importPaths[s]
# Load in track data
locationGDF = gp.read_file(importPath + '/MergedGeoDataFrame.geojson')
# Find points within the point of interest buffer
# Average turbidity
meanTurbA.append(locationGDF.loc[locationGDF.within(A_GDF.iloc[0]), 'CH6:Turbidity(NTU)'].mean())
meanTurbB.append(locationGDF.loc[locationGDF.within(B_GDF.iloc[0]), 'CH6:Turbidity(NTU)'].mean())
meanTime.append(locationGDF.loc[0, 'DateTime'])
# Plot Track
locationGDF.plot(ax=axes[0])
# Plot turbidity locations as empty circles
A_GDF.plot(ax=axes[0], facecolor="none", edgecolor='k', linewidth=5)
B_GDF.plot(ax=axes[0], facecolor="none", edgecolor='k', linewidth=5)
# Add text labels to turbidity locations
axes[0].text(A_pt[0]+30, A_pt[1]-10, 'A', fontsize=25)
axes[0].text(B_pt[0]+30, B_pt[1]-10, 'B', fontsize=25)
# Remove ticks
axes[0].get_xaxis().set_ticks([])
axes[0].get_yaxis().set_ticks([])
# Plot turbidity
axes[1].plot(pd.to_datetime(meanTime), meanTurbA, label='Turbidity Point A')
axes[1].plot(pd.to_datetime(meanTime), meanTurbB, label='Turbidity Point B')
# Add Legend
axes[1].legend()
# Add axis labels
axes[1].set_xlabel('Date')
axes[1].set_ylabel('Turbidity (NTU)')
# Limit x axis to 4 ticks
axes[1].xaxis.set_major_locator(plt.MaxNLocator(4))
# Format x axis dates
axes[1].xaxis.set_major_formatter(mdates.DateFormatter('%b %y'))
# Add plot title
plt.suptitle(plotSite)
plt.show()
# Save figure
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + plotSite + '_Turbidity_Time.png', dpi=300, bbox_inches='tight')