566 lines
20 KiB
Python
566 lines
20 KiB
Python
#%% Project Setup
|
|
|
|
import os
|
|
|
|
import pandas as pd
|
|
import geopandas as gp
|
|
from scipy.signal import argrelextrema
|
|
import numpy as np
|
|
import math
|
|
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
|
|
import matplotlib.pyplot as plt
|
|
import matplotlib.font_manager as fm
|
|
import matplotlib as mpl
|
|
import matplotlib.dates as mdates
|
|
from matplotlib.dates import DateFormatter
|
|
|
|
import cartopy.crs as ccrs
|
|
import contextily as ctx
|
|
import cmocean.cm as cmo
|
|
from shapely.geometry import Point, LineString
|
|
|
|
from xarray.backends import NetCDF4DataStore
|
|
import xarray as xr
|
|
|
|
from datetime import datetime, timedelta
|
|
import pytz
|
|
from netCDF4 import num2date
|
|
from metpy.units import units
|
|
|
|
import matplotlib.pyplot as plt
|
|
import cartopy.crs as ccrs
|
|
import cartopy.feature as cfeature
|
|
from metpy.plots import ctables
|
|
|
|
from siphon.catalog import TDSCatalog
|
|
|
|
import gsw as gsw
|
|
|
|
import datetime as datetime
|
|
|
|
#%% Helper function for finding proper time variable
|
|
|
|
def find_time_var(var, time_basename='time'):
|
|
for coord_name in var.coords:
|
|
if coord_name.startswith(time_basename):
|
|
return var.coords[coord_name]
|
|
raise ValueError('No time variable found for ' + var.name)
|
|
|
|
|
|
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
|
|
importPath = '//srv-ott3/Projects/13539.101 L\'Ansecoy Bay, Mustique/03_Data/03_Field/03_June 2022 trip MJA'
|
|
|
|
siteName = 'Ansecoy Bay'
|
|
|
|
|
|
# Initialize import variables
|
|
OBS_File = '_03 Water Quality Measurements/WiMo/524d_data_220624_151645.csv'
|
|
|
|
# timeLabel= 'June 24-25, 2022'
|
|
|
|
# timeLabel= 'June 24, 2022'
|
|
# GPS_File = '_06 GPS/GPS Tracker 1/export_2022-06-24 15-17.csv'
|
|
|
|
timeLabel= 'June 25, 2022'
|
|
GPS_File = '_06 GPS/GPS Tracker 1/export_2022-06-25 12-03.csv'
|
|
# GPS_File = '_06 GPS/GPS Tracker 2/export_2022-06-25 12-03.csv'
|
|
|
|
|
|
#%% Obs Import Data
|
|
Obs_dat = pd.read_csv(importPath + '/' + OBS_File, skiprows=0, header=0)
|
|
|
|
# Drop rows with metadata
|
|
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
|
|
|
|
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
|
|
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados')
|
|
|
|
# Set Index
|
|
Obs_dat.set_index('DateTime', inplace=True)
|
|
|
|
#%% GPS Import Data
|
|
GPS = pd.read_csv(importPath + '/' + GPS_File,
|
|
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
|
|
#convert GPS data to geodataframe
|
|
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
|
|
|
|
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str)).dt.tz_localize('America/Barbados')
|
|
|
|
# Set Datetime as index
|
|
GPS_gdf.set_index('DateTime', inplace=True)
|
|
|
|
# Sort by time
|
|
GPS_gdf.sort_index(inplace=True)
|
|
|
|
# Convert to UTM
|
|
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32620")
|
|
|
|
|
|
#%% Merge GPS to RBR
|
|
# Process RBR into datetime
|
|
if OBS_File is not None:
|
|
# Merge with GPS as dataframe
|
|
Obs_geo = pd.merge_asof(Obs_dat, GPS_gdf,
|
|
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
|
|
Obs_geo = gp.GeoDataFrame(Obs_geo, geometry=Obs_geo.geometry, crs="EPSG:32620")
|
|
|
|
#%% Find and setup key plotting details
|
|
# Shaded Water
|
|
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
|
|
# Sat water
|
|
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
|
|
|
|
axXlim = (697063, 698145)
|
|
axYlim = (1425770, 1426312)
|
|
GFS_Lon = -61.1789072
|
|
GFS_Lat = 12.8940943
|
|
Obs_geo['inArea'] = True
|
|
|
|
|
|
# Set min and max times using conductivity
|
|
# if Obs_geo['inArea'].any():
|
|
# # First and last times from area in shapefile
|
|
# minTime = Obs_geo[Obs_geo['inArea']==True].index[0]
|
|
# maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
|
|
# else:
|
|
# # First and last times if no GPS data
|
|
# minTime = Obs_geo.index[20]
|
|
# maxTime = Obs_geo.index[-20]
|
|
|
|
|
|
# Set min and max times in local UTC
|
|
# minTime = pd.to_datetime('2022-6-24 19:17:00+00:00')
|
|
# maxTime = pd.to_datetime('2022-6-24 20:36:00+00:00')
|
|
|
|
minTime = pd.to_datetime('2022-6-25 16:03:00+00:00')
|
|
maxTime = pd.to_datetime('2022-6-25 16:47:00+00:00')
|
|
|
|
# minTime = pd.to_datetime('2022-6-24 20:40:00+00:00')
|
|
# maxTime = pd.to_datetime('2022-6-25 15:58:00+00:00')
|
|
|
|
|
|
metDate = minTime.tz_convert('UTC') - timedelta(
|
|
hours = minTime.tz_convert('UTC').hour % 6,
|
|
minutes=minTime.tz_convert('UTC').minute,
|
|
seconds=minTime.tz_convert('UTC').second,
|
|
microseconds=minTime.tz_convert('UTC').microsecond)
|
|
|
|
#%% Save Shapefiles
|
|
Obs_geo.loc[minTime:maxTime,:].reset_index(drop=True).to_file(outPath +
|
|
'/Figures/MustiqueData_June25.shp')
|
|
|
|
#%% GFS Met Import
|
|
var = ['Temperature_surface', 'Wind_speed_gust_surface',
|
|
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
|
|
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
|
|
|
|
temp_1d = []
|
|
gust_1d = []
|
|
wndu_1d = []
|
|
wndv_1d = []
|
|
prep_1d = []
|
|
time_1d = []
|
|
|
|
# Set times to download
|
|
|
|
### USED TO BE -18/ +6 CHECK ###
|
|
|
|
startdate = metDate - timedelta(hours=18)
|
|
enddate = metDate + timedelta(hours=6)
|
|
date_list = pd.date_range(startdate, enddate, freq='6H')
|
|
|
|
# Loop through dates
|
|
for date in date_list:
|
|
# Base URL for 0.5 degree GFS data
|
|
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
|
|
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
|
|
|
|
# Generate URLs for specific grib file
|
|
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
|
|
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
|
|
|
|
# Format the query parameters
|
|
ncss = best_ds.subset()
|
|
query = ncss.query()
|
|
|
|
ncss_precp = best_ds_precp.subset()
|
|
query_precp = ncss_precp.query()
|
|
|
|
# Extract data from specific point
|
|
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
|
|
query.accept('netcdf')
|
|
query.variables(var[0], var[1], var[2], var[3])
|
|
query.vertical_level(10)
|
|
|
|
data = ncss.get_data(query)
|
|
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
|
|
|
|
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
|
|
query_precp.accept('netcdf')
|
|
query_precp.variables(var_precp[0])
|
|
|
|
|
|
data_precp = ncss_precp.get_data(query_precp)
|
|
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
|
|
|
|
temp_3d = data[var[0]]
|
|
gust_3d = data[var[1]]
|
|
wndu_3d = data[var[2]]
|
|
wndv_3d = data[var[3]]
|
|
prep_3d = data_precp[var_precp[0]]
|
|
|
|
# Read the individual point (with units) and append to the list
|
|
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
|
|
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
|
|
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
|
|
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
|
|
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
|
|
time_1d.append(find_time_var(temp_3d))
|
|
|
|
#%% Process Met Data
|
|
# 24h Precipitation Total
|
|
# Time weighted average of last two points for everything else
|
|
|
|
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
|
|
|
|
timeWeight1 = minTime.tz_convert('UTC')-metDate
|
|
timeWeight2 = (metDate+timedelta(hours=6))-minTime.tz_convert('UTC')
|
|
|
|
timeWeight1 = timeWeight1.seconds/21600
|
|
timeWeight2 = timeWeight2.seconds/21600
|
|
|
|
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
|
|
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
|
|
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
|
|
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
|
|
met_wdir = math.degrees(math.atan2(
|
|
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
|
|
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
|
|
|
|
#%% Add PSU and depth units
|
|
Obs_geo['PSU'] = gsw.conversions.SP_from_C(Obs_geo['CH3:Conductivity(mS/cm)'].values,
|
|
Obs_geo['CH1:Temperature(degC)'].values,
|
|
Obs_geo['CH0:Pressure(dbar)'].values)
|
|
|
|
|
|
Obs_geo['Depth'] = (Obs_geo['CH0:Pressure(dbar)'].astype(float)*10000)/(gsw.rho(Obs_geo['PSU'].values,
|
|
Obs_geo['CH1:Temperature(degC)'].values,
|
|
Obs_geo['CH0:Pressure(dbar)'].values) * 9.81)
|
|
|
|
|
|
#%% Plot time series for Geo data
|
|
fontprops = fm.FontProperties(size=25)
|
|
|
|
|
|
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(19, 25), constrained_layout=True)
|
|
dataTable = np.zeros([14, 4])
|
|
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
|
|
|
|
params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
|
|
'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
|
|
paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ sat [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-a [ppb]']
|
|
|
|
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
|
|
paramMin = [0.0, 32, 80, 26.0, 0, 0]
|
|
paramMax = [5.0, 33.5, 150, 35.0, 100.0, 15000]
|
|
# paramMin = [0.0, 32.0, 80, 26.0, 0, 0]
|
|
# paramMax = [5.0, 36.0, 150, 35.0, 100.0, 15000]
|
|
|
|
|
|
fig.patch.set_facecolor('white')
|
|
# fig.tight_layout(pad=1.05)
|
|
|
|
fontprops = fm.FontProperties(size=25)
|
|
|
|
# Setup formatted dates
|
|
date_form = DateFormatter("%d-%m %H:%M", tz=pytz.timezone('America/Barbados'))
|
|
|
|
dataTable[0, 0] = met_temp.m.item(0)-272.15
|
|
dataTable[1, 0] = met_wind
|
|
dataTable[2, 0] = met_wdir
|
|
dataTable[3, 0] = met_prep.m.item(0)
|
|
|
|
ilocs_max = []
|
|
ilocs_max_pts = []
|
|
OBS_mask = []
|
|
|
|
for paramIDX, param in enumerate(params):
|
|
Obs_geo.loc[minTime:maxTime, param].plot(
|
|
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
|
|
|
|
# Create mask for RBR data based on time and parameter minimum
|
|
OBS_mask.append(((Obs_geo.index>minTime) &
|
|
(Obs_geo.index<maxTime) &
|
|
(Obs_geo[param]>paramMin[paramIDX])))
|
|
|
|
OBS_smoothed = Obs_geo.loc[OBS_mask[paramIDX], param].rolling(
|
|
12, win_type='nuttall',center=True).mean()
|
|
|
|
OBS_smoothed.plot(
|
|
ax=axes[paramIDX], label='2 Minute Average', color='black',
|
|
linewidth=3)
|
|
|
|
# Find the local maximums for Turbidity
|
|
if param == 'CH6:Turbidity(NTU)':
|
|
ilocs_max.append(argrelextrema(OBS_smoothed.values,
|
|
np.greater_equal, order=200, mode='wrap')[0]) #20
|
|
|
|
# Add start and end points?
|
|
# ilocs_max = np.insert(ilocs_max, 0, 10)
|
|
# ilocs_max[-1] = len(OBS_smoothed.values)
|
|
|
|
ilocs_max_pts.append(OBS_smoothed.iloc[ilocs_max[paramIDX]].index.values)
|
|
|
|
# Add labels if GPS data is available
|
|
if GPS_File is not None:
|
|
# axes[paramIDX].scatter(OBS_smoothed.iloc[
|
|
# ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
|
|
# color='blue')
|
|
for a in range(0, len(ilocs_max[paramIDX])):
|
|
# axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 6000), fontsize=30)
|
|
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 25), fontsize=30)
|
|
#75
|
|
else:
|
|
ilocs_max.append(None)
|
|
ilocs_max_pts.append(None)
|
|
|
|
dataTable[paramIDX+7, 0] = OBS_smoothed.mean(skipna=True)
|
|
dataTable[paramIDX+7, 1] = OBS_smoothed.std(skipna=True)
|
|
dataTable[paramIDX+7, 2] = max(OBS_smoothed.min(skipna=True), 0)
|
|
dataTable[paramIDX+7, 3] = OBS_smoothed.max(skipna=True)
|
|
|
|
axes[paramIDX].set_ylabel(paramName[paramIDX])
|
|
axes[paramIDX].set_title(paramName[paramIDX])
|
|
axes[paramIDX].set_xlabel('')
|
|
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
|
|
axes[paramIDX].legend(loc='upper right')
|
|
axes[paramIDX].xaxis.set_major_formatter(date_form)
|
|
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
|
|
|
|
# Formate Data Table
|
|
dataTableFormat_mean = []
|
|
dataTableFormat_maxmin = []
|
|
for d in range(0, len(roundIDX)):
|
|
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
|
|
if dataTable[d, 3] == 0:
|
|
dataTableFormat_maxmin.append('--')
|
|
else:
|
|
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
|
|
|
|
|
|
dfOut = pd.DataFrame(dataTable[:, :])
|
|
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
|
|
|
|
# Change the column names
|
|
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
|
|
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
|
|
np.array(['Mean', 'Min / Max'])]
|
|
# Change the row indexes
|
|
dfOut.iloc[13, 0] = minTime.strftime('%B %#d, %Y')
|
|
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
|
|
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
|
|
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-a [ppb]', 'Observation Date']
|
|
|
|
|
|
dfOut.index = rowNames
|
|
dfOutFormat.index = rowNames[0:-1]
|
|
|
|
fig.suptitle(siteName + ' (' + timeLabel + ')', fontsize=30)
|
|
|
|
plt.show()
|
|
|
|
# outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
|
|
outPath = importPath
|
|
|
|
if not os.path.exists(outPath):
|
|
os.mkdir(outPath)
|
|
|
|
# dfOut.to_excel(outPath + '/Summary_Stats3_' + siteName + '.xlsx')
|
|
# dfOutFormat.to_excel(outPath + '/Summary_StatsFormat_3' + siteName + '.xlsx')
|
|
# dfOut.to_csv(outPath + '/Summary_Stats3_' + siteName + '.csv')
|
|
|
|
if not os.path.exists(outPath + '/Figures'):
|
|
os.mkdir(outPath + '/Figures')
|
|
|
|
# fig.savefig(outPath + '/Figures/SummaryTimeSeries3_' + siteName + '.pdf',
|
|
# bbox_inches='tight')
|
|
# fig.savefig(outPath + '/Figures/SummaryTimeSeries3_' + siteName + '.png',
|
|
# bbox_inches='tight', dpi=500)
|
|
|
|
|
|
#%% Plot Maps
|
|
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(25, 19), constrained_layout=True)
|
|
ax = axes.flat
|
|
|
|
fig.patch.set_facecolor('white')
|
|
# fig.tight_layout(pad=1.05)
|
|
|
|
fontprops = fm.FontProperties(size=25)
|
|
x, y, arrow_length = 0.95, 0.93, 0.20
|
|
plt.rcParams.update({'font.size': 22})
|
|
|
|
axXlimTT = (Obs_geo.loc[minTime:maxTime].geometry.x.min()-500,
|
|
Obs_geo.loc[minTime:maxTime].geometry.x.max()+500)
|
|
axYlimTT = (Obs_geo.loc[minTime:maxTime].geometry.y.min()-500,
|
|
Obs_geo.loc[minTime:maxTime].geometry.y.max()+500)
|
|
|
|
plt.setp(axes, xlim=axXlim, ylim=axYlim)
|
|
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
|
|
|
|
# Plot the RBR observations
|
|
# Salinity
|
|
for paramIDX, param in enumerate(params):
|
|
Obs_geo.loc[minTime:maxTime].plot(
|
|
column=param, ax=ax[paramIDX], vmin=paramMin[paramIDX], vmax=paramMax[paramIDX],
|
|
legend=True, legend_kwds={'label': paramName[paramIDX]},
|
|
cmap=parmCmap[paramIDX], markersize=20)
|
|
|
|
|
|
|
|
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32620')
|
|
|
|
# Add time labels
|
|
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
|
|
# ilocs_max, :].geometry.x,
|
|
# RBR_Obs_geo.loc[RBR_mask].iloc[
|
|
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
|
|
|
|
if (not Obs_geo.geometry.isnull().all()) &\
|
|
(GPS_File is not None) & (param == 'CH6:Turbidity(NTU)'):
|
|
for a in range(0, len(ilocs_max[paramIDX])):
|
|
ax[paramIDX].annotate(str(a + 1), (Obs_geo.loc[OBS_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX][a], :].geometry.x,
|
|
Obs_geo.loc[OBS_mask[paramIDX]].iloc[
|
|
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
|
|
|
|
ax[paramIDX].set_title(paramName[paramIDX])
|
|
# ax[paramIDX].set_ylabel('UTM 21N [m]')
|
|
# ax[paramIDX].set_xlabel('UTM 21N [m]')
|
|
ax[paramIDX].locator_params(axis='y', nbins=3)
|
|
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
|
|
|
|
ax[paramIDX].get_xaxis().set_ticks([])
|
|
ax[paramIDX].get_yaxis().set_ticks([])
|
|
|
|
#Add scale-bar
|
|
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
|
|
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
|
|
ax[paramIDX].add_artist(scalebar)
|
|
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
|
|
arrowprops=dict(facecolor='black', width=6, headwidth=30),
|
|
ha='center', va='center', fontsize=35,
|
|
xycoords=ax[paramIDX].transAxes)
|
|
|
|
fig.suptitle(siteName + ' (' + timeLabel + ')', fontsize=30)
|
|
|
|
plt.show()
|
|
|
|
#outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
|
|
outPath = importPath
|
|
|
|
if not os.path.exists(outPath):
|
|
os.mkdir(outPath)
|
|
|
|
if not os.path.exists(outPath + '/Figures'):
|
|
os.mkdir(outPath + '/Figures')
|
|
|
|
# fig.savefig(outPath + '/Figures/SummaryMap2_' + timeLabel + '_' + siteName + '.pdf',
|
|
# bbox_inches='tight')
|
|
#
|
|
# fig.savefig(outPath + '/Figures/SummaryMap2_' + timeLabel + '_' + siteName + '.png',
|
|
# bbox_inches='tight', dpi=500)
|
|
|
|
#%% Summary Sheet
|
|
plotIDXsLoop = []
|
|
# plotIDXsLoop.append([0, 3, 6, 9])
|
|
# plotIDXsLoop.append([1, 4, 7, 10])
|
|
# plotIDXsLoop.append([2, 5, 8, 11])
|
|
# plotIDXsLoop.append([np.arange(0, 21*3, 3)])
|
|
# plotIDXsLoop.append([np.arange(1, 21*3, 3)])
|
|
# plotIDXsLoop.append([np.arange(2, 21*3, 3)])
|
|
|
|
for i in range(0, 3):
|
|
summTable = None
|
|
# plotIDXs = plotIDXsLoop[i]
|
|
plotIDXs = np.arange(i, 21, 3)
|
|
|
|
for s, plotIDX in enumerate(plotIDXs):
|
|
## Define master import path
|
|
importPath = importPaths[plotIDX]
|
|
siteName = siteNames[plotIDX]
|
|
|
|
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0, 1], index_col=0)
|
|
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
|
|
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
|
|
if plotIDX > 11:
|
|
obsStatsIN.rename({'Chl-a [ppb]': 'Chl-a [ug/l]'}, inplace=True)
|
|
|
|
if s == 0:
|
|
summTable = obsStatsIN
|
|
else:
|
|
summTable = summTable.join(obsStatsIN)
|
|
|
|
# Remove -999 with nan
|
|
summTable.replace(-999, np.nan, inplace=True)
|
|
|
|
summTable.to_excel('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/Summary_StatsMerge_' + siteName + '.xlsx')
|
|
|
|
|
|
#%% Summary Plot
|
|
|
|
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
|
|
'Significant Wave Height [m]', 'Salinity [PSU]',
|
|
'Dissolved O₂ [%]', 'Temperature [degC]',
|
|
'Turbidity [FTU]', 'Chl-a. [ug/L]']
|
|
|
|
|
|
for i in range(0, 3):
|
|
summTable = None
|
|
plotIDXs = np.arange(i, 21, 3)
|
|
|
|
plotDates = []
|
|
plotTable = np.empty([10, len(plotIDXs)])
|
|
|
|
for s, plotIDX in enumerate(plotIDXs):
|
|
## Define master import path
|
|
importPath = importPaths[plotIDX]
|
|
siteName = siteNames[plotIDX]
|
|
|
|
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
|
|
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
|
|
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
|
|
plotTable[8, s] = obsStatsIN.iloc[7, 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
|
|
elif plotIDX<12:
|
|
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 13], 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
|
|
else:
|
|
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
|
|
plotDates.append(datetime.strptime(obsStatsIN.iloc[13, 1], '%B %d, %Y'))
|
|
|
|
|
|
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
|
|
fig.patch.set_facecolor('white')
|
|
fig.tight_layout(pad=3)
|
|
ax = axes.flat
|
|
|
|
# Repalce zero with nan
|
|
plotTable[plotTable < 0.00001] = np.nan
|
|
|
|
for v in range(0, 10):
|
|
ax[v].scatter(plotDates, plotTable[v, :], 250)
|
|
ax[v].set_ylabel(plotvars[v])
|
|
|
|
fig.suptitle(siteName, fontsize=35)
|
|
plt.gcf().autofmt_xdate()
|
|
plt.gcf().align_ylabels()
|
|
plt.show()
|
|
|
|
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.pdf',
|
|
bbox_inches='tight')
|
|
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.png',
|
|
bbox_inches='tight', dpi=500)
|