619 lines
24 KiB
Python
619 lines
24 KiB
Python
#%% Project Setup
|
|
|
|
import os
|
|
|
|
import pandas as pd
|
|
import geopandas as gp
|
|
from scipy.signal import argrelextrema
|
|
import numpy as np
|
|
import math
|
|
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
|
|
import matplotlib.pyplot as plt
|
|
import matplotlib.font_manager as fm
|
|
import matplotlib as mpl
|
|
|
|
import cartopy.crs as ccrs
|
|
import contextily as ctx
|
|
import cmocean.cm as cmo
|
|
from shapely.geometry import Point, LineString
|
|
|
|
from xarray.backends import NetCDF4DataStore
|
|
import xarray as xr
|
|
|
|
from datetime import datetime, timedelta
|
|
from netCDF4 import num2date
|
|
from metpy.units import units
|
|
|
|
import matplotlib.pyplot as plt
|
|
import cartopy.crs as ccrs
|
|
import cartopy.feature as cfeature
|
|
from metpy.plots import ctables
|
|
|
|
from siphon.catalog import TDSCatalog
|
|
|
|
import gsw as gsw
|
|
|
|
#%% Helper function for finding proper time variable
|
|
|
|
def find_time_var(var, time_basename='time'):
|
|
for coord_name in var.coords:
|
|
if coord_name.startswith(time_basename):
|
|
return var.coords[coord_name]
|
|
raise ValueError('No time variable found for ' + var.name)
|
|
|
|
|
|
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
|
|
importPaths = ['C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Great House',
|
|
'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Greensleeves',
|
|
'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Old Queens Fort',
|
|
'C:/Users/arey/files/Projects/West Coast/18_03_2022/Great House',
|
|
'C:/Users/arey/files/Projects/West Coast/18_03_2022/Greensleeves',
|
|
'C:/Users/arey/files/Projects/West Coast/18_03_2022/Old Queens Fort']
|
|
|
|
siteNames = ['Great House',
|
|
'Greensleeves',
|
|
'Old Queens Fort',
|
|
'Great House',
|
|
'Greensleeves',
|
|
'Old Queens Fort']
|
|
|
|
timeLabels= ['February',
|
|
'February',
|
|
'February',
|
|
'March',
|
|
'March',
|
|
'March']
|
|
|
|
wave_bts_file = [
|
|
None,
|
|
None,
|
|
None,
|
|
None,
|
|
None,
|
|
None]
|
|
|
|
|
|
for s in range(0, 6):
|
|
## Define master import path
|
|
importPath = importPaths[s]
|
|
siteName = siteNames[s]
|
|
timeLabel = timeLabels[s]
|
|
importFiles = os.listdir(importPath)
|
|
|
|
# Initialize import variables
|
|
OBS_File = None
|
|
GPS_File = None
|
|
|
|
for i in range(0, len(importFiles)):
|
|
if '.csv' in importFiles[i] and 'Summary' not in importFiles[i] and 'export' not in importFiles[i]:
|
|
OBS_File = importFiles[i]
|
|
elif '.csv' in importFiles[i] and 'export' in importFiles[i]:
|
|
GPS_File = importFiles[i]
|
|
GPS_Type = 'CSV'
|
|
elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]:
|
|
GPS_File = importFiles[i]
|
|
GPS_Type = 'xls'
|
|
|
|
#%% Obs Import Data
|
|
if OBS_File is not None:
|
|
Obs_dat = pd.read_csv(importPath + '/' + OBS_File, skiprows=0, header=0)
|
|
|
|
# Drop rows with metadata
|
|
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
|
|
|
|
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
|
|
if s < 3:
|
|
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
|
|
else:
|
|
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
|
|
|
|
# Set Index
|
|
Obs_dat.set_index('DateTime', inplace=True)
|
|
|
|
#%% GPS Import Data
|
|
if GPS_File is not None:
|
|
if GPS_Type == 'CSV':
|
|
GPS = pd.read_csv(importPath + '/' + GPS_File,
|
|
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
|
|
#convert GPS data to geodataframe
|
|
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
|
|
|
|
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
|
|
|
|
|
|
elif GPS_Type == 'xls':
|
|
GPS = pd.read_excel(importPath + '/' + GPS_File, header=0)
|
|
|
|
#convert GPS data to geodataframe
|
|
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326"))
|
|
|
|
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC')
|
|
|
|
# Set Datetime as index
|
|
GPS_gdf.set_index('DateTime', inplace=True)
|
|
|
|
# Sort by time
|
|
GPS_gdf.sort_index(inplace=True)
|
|
|
|
# Convert to UTM
|
|
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
|
|
|
|
#%% Read in site shapefile
|
|
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
|
|
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
|
|
|
|
|
|
#%% Merge GPS to RBR
|
|
# Process RBR into datetime
|
|
if OBS_File is not None:
|
|
# Merge with GPS as dataframe
|
|
Obs_geo = pd.merge_asof(Obs_dat, GPS_gdf,
|
|
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
|
|
Obs_geo = gp.GeoDataFrame(Obs_geo, geometry=Obs_geo.geometry, crs="EPSG:32621")
|
|
|
|
#%% Find and setup key plotting details
|
|
# Shaded Water
|
|
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
|
|
# Sat water
|
|
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
|
|
|
|
if siteName == 'Great House':
|
|
axXlim = (213210.7529575412, 213562.64172686986)
|
|
axYlim = (1464769.2243017585, 1465135.2219089477)
|
|
GFS_Lon = -59.6441
|
|
GFS_Lat = 13.2372
|
|
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[2, 1])
|
|
elif siteName == 'Greensleeves':
|
|
axXlim = (213269.99233348924, 213648.1643157148)
|
|
# axYlim = (1463378.1020314451, 1463843.5442048472)
|
|
axYlim = (1463378.1020314451, 1463950.5442048472)
|
|
GFS_Lon = -59.6428
|
|
GFS_Lat = 13.2289
|
|
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[1, 1])
|
|
elif siteName == 'Old Queens Fort':
|
|
axXlim = (213368.59866770002, 213745.6997016811)
|
|
axYlim = (1460192.707288096, 1460672.371780407)
|
|
GFS_Lon = -59.6419
|
|
GFS_Lat = 13.1960
|
|
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[0, 1])
|
|
|
|
|
|
# Set min and max times using conductivity
|
|
if Obs_geo['inArea'].any():
|
|
# First and last times from area in shapefile
|
|
minTime = Obs_geo[Obs_geo['inArea']==True].index[0]
|
|
maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
|
|
else:
|
|
# First and last times if no GPS data
|
|
minTime = Obs_geo.index[20]
|
|
maxTime = Obs_geo.index[-20]
|
|
|
|
metDate = minTime - timedelta(
|
|
hours = minTime.hour % 6,
|
|
minutes=minTime.minute,
|
|
seconds=minTime.second,
|
|
microseconds=minTime.microsecond)
|
|
|
|
|
|
#%% GFS Met Import
|
|
var = ['Temperature_surface', 'Wind_speed_gust_surface',
|
|
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
|
|
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
|
|
|
|
temp_1d = []
|
|
gust_1d = []
|
|
wndu_1d = []
|
|
wndv_1d = []
|
|
prep_1d = []
|
|
time_1d = []
|
|
|
|
# Set times to download
|
|
startdate = metDate - timedelta(hours=18)
|
|
enddate = metDate + timedelta(hours=6)
|
|
date_list = pd.date_range(startdate, enddate, freq='6H')
|
|
|
|
# Loop through dates
|
|
for date in date_list:
|
|
# Base URL for 0.5 degree GFS data
|
|
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
|
|
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
|
|
|
|
# Generate URLs for specific grib file
|
|
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
|
|
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
|
|
|
|
# Format the query parameters
|
|
ncss = best_ds.subset()
|
|
query = ncss.query()
|
|
|
|
ncss_precp = best_ds_precp.subset()
|
|
query_precp = ncss_precp.query()
|
|
|
|
# Extract data from specific point
|
|
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
|
|
query.accept('netcdf')
|
|
query.variables(var[0], var[1], var[2], var[3])
|
|
query.vertical_level(10)
|
|
|
|
data = ncss.get_data(query)
|
|
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
|
|
|
|
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
|
|
query_precp.accept('netcdf')
|
|
query_precp.variables(var_precp[0])
|
|
|
|
|
|
data_precp = ncss_precp.get_data(query_precp)
|
|
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
|
|
|
|
temp_3d = data[var[0]]
|
|
gust_3d = data[var[1]]
|
|
wndu_3d = data[var[2]]
|
|
wndv_3d = data[var[3]]
|
|
prep_3d = data_precp[var_precp[0]]
|
|
|
|
# Read the individual point (with units) and append to the list
|
|
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
|
|
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
|
|
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
|
|
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
|
|
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
|
|
time_1d.append(find_time_var(temp_3d))
|
|
|
|
#%% Process Met Data
|
|
# 24h Precipitation Total
|
|
# Time weighted average of last two points for everything else
|
|
|
|
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
|
|
|
|
timeWeight1 = minTime-metDate
|
|
timeWeight2 = (metDate+timedelta(hours=6))-minTime
|
|
|
|
timeWeight1 = timeWeight1.seconds/21600
|
|
timeWeight2 = timeWeight2.seconds/21600
|
|
|
|
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
|
|
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
|
|
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
|
|
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
|
|
met_wdir = math.degrees(math.atan2(
|
|
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
|
|
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
|
|
|
|
#%% Read in wave conditions from BTS file
|
|
if wave_bts_file[s] is not None:
|
|
if siteName == 'Great House':
|
|
wave_bts = pd.read_csv(wave_bts_file[s],
|
|
names=['date', 'time', 'HM0', 'TP', 'TM', 'MWD', 'DPK', 'HSWL', 'TSWL', 'DPSWL', 'HSEA', 'TSEA', 'DPSEA'],
|
|
header=0, skiprows=22, delim_whitespace=True)
|
|
wave_bts['datetime'] = pd.to_datetime(wave_bts['date'] + ' ' + wave_bts['time'])
|
|
wave_bts.set_index('datetime', inplace=True)
|
|
|
|
|
|
met_hmo = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].HM0
|
|
met_tp = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].TP
|
|
met_mwd = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].MWD
|
|
else:
|
|
met_hmo = -999
|
|
met_tp = -999
|
|
met_mwd = -999
|
|
|
|
#%% Add PSU and depth units
|
|
Obs_geo['PSU'] = gsw.conversions.SP_from_C(Obs_geo['CH3:Conductivity(mS/cm)'].values,
|
|
Obs_geo['CH1:Temperature(degC)'].values,
|
|
Obs_geo['CH0:Pressure(dbar)'].values)
|
|
|
|
|
|
Obs_geo['Depth'] = (Obs_geo['CH0:Pressure(dbar)'].astype(float)*10000)/(gsw.rho(Obs_geo['PSU'].values,
|
|
Obs_geo['CH1:Temperature(degC)'].values,
|
|
Obs_geo['CH0:Pressure(dbar)'].values) * 9.81)
|
|
|
|
|
|
#%% Plot time series for Geo data
|
|
fontprops = fm.FontProperties(size=25)
|
|
|
|
|
|
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(19, 25), constrained_layout=True)
|
|
dataTable = np.zeros([14, 4])
|
|
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
|
|
|
|
params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
|
|
'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
|
|
paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ sat [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-a [ppb]']
|
|
|
|
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
|
|
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
|
|
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
|
|
paramMin = [0.0, 33.0, 100, 26.0, 0, 0]
|
|
paramMax = [1.0, 36.0, 130, 30.0, 75.0, 12000]
|
|
|
|
|
|
fig.patch.set_facecolor('white')
|
|
# fig.tight_layout(pad=1.05)
|
|
|
|
fontprops = fm.FontProperties(size=25)
|
|
|
|
dataTable[0, 0] = met_temp.m.item(0)-272.15
|
|
dataTable[1, 0] = met_wind
|
|
dataTable[2, 0] = met_wdir
|
|
dataTable[3, 0] = met_prep.m.item(0)
|
|
dataTable[4, 0] = met_hmo
|
|
dataTable[5, 0] = met_tp
|
|
dataTable[6, 0] = met_mwd
|
|
|
|
ilocs_max = []
|
|
ilocs_max_pts = []
|
|
OBS_mask = []
|
|
|
|
for paramIDX, param in enumerate(params):
|
|
Obs_geo.loc[minTime:maxTime, param].plot(
|
|
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
|
|
|
|
# Create mask for RBR data based on time and parameter minimum
|
|
OBS_mask.append(((Obs_geo.index>minTime) &
|
|
(Obs_geo.index<maxTime) &
|
|
(Obs_geo[param]>paramMin[paramIDX])))
|
|
|
|
OBS_smoothed = Obs_geo.loc[OBS_mask[paramIDX], param].rolling(
|
|
12, win_type='nuttall',center=True).mean()
|
|
|
|
OBS_smoothed.plot(
|
|
ax=axes[paramIDX], label='1 Minute Average', color='black',
|
|
linewidth=3)
|
|
|
|
# Find the local maximums for Turbidity
|
|
if param == 'CH6:Turbidity(NTU)':
|
|
ilocs_max.append(argrelextrema(OBS_smoothed.values,
|
|
np.greater_equal, order=6, mode='wrap')[0])
|
|
|
|
# Add start and end points?
|
|
# ilocs_max = np.insert(ilocs_max, 0, 10)
|
|
# ilocs_max[-1] = len(OBS_smoothed.values)
|
|
|
|
ilocs_max_pts.append(OBS_smoothed.iloc[ilocs_max[paramIDX]].index.values)
|
|
|
|
# Add labels if GPS data is available
|
|
if GPS_File is not None:
|
|
# axes[paramIDX].scatter(OBS_smoothed.iloc[
|
|
# ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
|
|
# color='blue')
|
|
for a in range(0, len(ilocs_max[paramIDX])):
|
|
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 12), fontsize=30)
|
|
else:
|
|
ilocs_max.append(None)
|
|
ilocs_max_pts.append(None)
|
|
|
|
dataTable[paramIDX+7, 0] = OBS_smoothed.mean(skipna=True)
|
|
dataTable[paramIDX+7, 1] = OBS_smoothed.std(skipna=True)
|
|
dataTable[paramIDX+7, 2] = max(OBS_smoothed.min(skipna=True), 0)
|
|
dataTable[paramIDX+7, 3] = OBS_smoothed.max(skipna=True)
|
|
|
|
axes[paramIDX].set_ylabel(paramName[paramIDX])
|
|
axes[paramIDX].set_title(paramName[paramIDX])
|
|
axes[paramIDX].set_xlabel('')
|
|
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
|
|
axes[paramIDX].legend(loc='upper left')
|
|
|
|
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
|
|
|
|
# Formate Data Table
|
|
dataTableFormat_mean = []
|
|
dataTableFormat_maxmin = []
|
|
for d in range(0, len(roundIDX)):
|
|
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
|
|
if dataTable[d, 3] == 0:
|
|
dataTableFormat_maxmin.append('--')
|
|
else:
|
|
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
|
|
|
|
|
|
dfOut = pd.DataFrame(dataTable[:, :])
|
|
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
|
|
|
|
# Change the column names
|
|
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
|
|
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
|
|
np.array(['Mean', 'Min / Max'])]
|
|
# Change the row indexes
|
|
dfOut.iloc[13, 0] = minTime.strftime('%B %#d, %Y')
|
|
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
|
|
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
|
|
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-a [ppb]', 'Observation Date']
|
|
|
|
|
|
dfOut.index = rowNames
|
|
dfOutFormat.index = rowNames[0:-1]
|
|
|
|
fig.suptitle(siteName + ', ' + minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
|
|
|
|
plt.show()
|
|
|
|
outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
|
|
|
|
if not os.path.exists(outPath):
|
|
os.mkdir(outPath)
|
|
|
|
dfOut.to_excel(outPath + '/Summary_Stats_' + siteName + '.xlsx')
|
|
dfOutFormat.to_excel(outPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
|
|
|
|
dfOut.to_csv(outPath + '/Summary_Stats_' + siteName + '.csv')
|
|
|
|
if not os.path.exists(outPath + '/Figures'):
|
|
os.mkdir(outPath + '/Figures')
|
|
|
|
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|
|
|
|
|
|
#%% Plot Maps
|
|
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
|
|
ax = axes.flat
|
|
|
|
fig.patch.set_facecolor('white')
|
|
# fig.tight_layout(pad=1.05)
|
|
|
|
fontprops = fm.FontProperties(size=25)
|
|
x, y, arrow_length = 0.95, 0.93, 0.20
|
|
plt.rcParams.update({'font.size': 22})
|
|
|
|
axXlimTT = (Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
|
|
Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
|
|
axYlimTT = (Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
|
|
Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
|
|
|
|
plt.setp(axes, xlim=axXlim, ylim=axYlim)
|
|
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
|
|
|
|
# Plot the RBR observations
|
|
# Salinity
|
|
for paramIDX, param in enumerate(params):
|
|
Obs_geo.loc[minTime:maxTime].plot(
|
|
column=param, ax=ax[paramIDX], vmin=paramMin[paramIDX], vmax=paramMax[paramIDX],
|
|
legend=True, legend_kwds={'label': paramName[paramIDX]},
|
|
cmap=parmCmap[paramIDX], markersize=20)
|
|
|
|
|
|
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
|
|
|
|
# Add time labels
|
|
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
|
|
# ilocs_max, :].geometry.x,
|
|
# RBR_Obs_geo.loc[RBR_mask].iloc[
|
|
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
|
|
|
|
if (not Obs_geo.geometry.isnull().all()) &\
|
|
(GPS_File is not None) & (param == 'CH6:Turbidity(NTU)'):
|
|
for a in range(0, len(ilocs_max[paramIDX])):
|
|
ax[paramIDX].annotate(str(a + 1), (Obs_geo.loc[OBS_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX][a], :].geometry.x,
|
|
Obs_geo.loc[OBS_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
|
|
|
|
ax[paramIDX].set_title(paramName[paramIDX])
|
|
# ax[paramIDX].set_ylabel('UTM 21N [m]')
|
|
# ax[paramIDX].set_xlabel('UTM 21N [m]')
|
|
ax[paramIDX].locator_params(axis='y', nbins=3)
|
|
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
|
|
|
|
ax[paramIDX].get_xaxis().set_ticks([])
|
|
ax[paramIDX].get_yaxis().set_ticks([])
|
|
|
|
#Add scale-bar
|
|
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
|
|
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
|
|
ax[paramIDX].add_artist(scalebar)
|
|
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
|
|
arrowprops=dict(facecolor='black', width=6, headwidth=30),
|
|
ha='center', va='center', fontsize=35,
|
|
xycoords=ax[paramIDX].transAxes)
|
|
|
|
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
|
|
|
|
plt.show()
|
|
|
|
outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
|
|
|
|
if not os.path.exists(outPath):
|
|
os.mkdir(outPath)
|
|
|
|
if not os.path.exists(outPath + '/Figures'):
|
|
os.mkdir(outPath + '/Figures')
|
|
|
|
fig.savefig(outPath + '/Figures/SummaryMap_' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
|
|
fig.savefig(outPath + '/Figures/SummaryMap_' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|
|
|
|
#%% Summary Sheet
|
|
plotIDXsLoop = []
|
|
plotIDXsLoop.append([0, 3, 6, 9])
|
|
plotIDXsLoop.append([1, 4, 7, 10])
|
|
plotIDXsLoop.append([2, 5, 8, 11])
|
|
|
|
for i in range(0, 3):
|
|
summTable = None
|
|
plotIDXs = plotIDXsLoop[i]
|
|
|
|
for s, plotIDX in enumerate(plotIDXs):
|
|
## Define master import path
|
|
importPath = importPaths[plotIDX]
|
|
siteName = siteNames[plotIDX]
|
|
|
|
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0,1], index_col=0)
|
|
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
|
|
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
|
|
|
|
if s == 0:
|
|
summTable = obsStatsIN
|
|
else:
|
|
summTable = summTable.join(obsStatsIN)
|
|
|
|
# Remove -999 with nan
|
|
summTable.replace(-999, np.nan, inplace=True)
|
|
|
|
summTable.to_excel('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/Summary_StatsMerge_' + siteName + '.xlsx')
|
|
|
|
|
|
#%% Summary Plot
|
|
|
|
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height [m]', 'Salinity [PSU]',
|
|
'Dissolved O₂ [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-Flu. [ppb]']
|
|
|
|
|
|
plotIDXsLoop = []
|
|
plotIDXsLoop.append([0, 3, 6, 9])
|
|
plotIDXsLoop.append([1, 4, 7, 10])
|
|
plotIDXsLoop.append([2, 5, 8, 11])
|
|
|
|
for i in range(0, 3):
|
|
summTable = None
|
|
plotIDXs = plotIDXsLoop[i]
|
|
plotDates = []
|
|
plotTable = np.empty([10, len(plotIDXs)])
|
|
|
|
for s, plotIDX in enumerate(plotIDXs):
|
|
## Define master import path
|
|
importPath = importPaths[plotIDX]
|
|
siteName = siteNames[plotIDX]
|
|
|
|
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
|
|
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
|
|
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
|
|
plotTable[8, s] = obsStatsIN.iloc[7, 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
|
|
else:
|
|
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
|
|
|
|
|
|
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
|
|
fig.patch.set_facecolor('white')
|
|
fig.tight_layout(pad=3)
|
|
ax = axes.flat
|
|
|
|
# Repalce zero with nan
|
|
plotTable[plotTable < 0.00001] = np.nan
|
|
|
|
for v in range(0, 10):
|
|
ax[v].scatter(plotDates, plotTable[v, :], 250)
|
|
ax[v].set_ylabel(plotvars[v])
|
|
|
|
fig.suptitle(siteName, fontsize=35)
|
|
plt.gcf().autofmt_xdate()
|
|
plt.gcf().align_ylabels()
|
|
plt.show()
|
|
|
|
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|