NTC Mustique EWR Code Updates

This commit is contained in:
Alexander Rey 2022-09-28 09:18:04 -04:00
parent 507c97548f
commit 016e983cea
11 changed files with 3459 additions and 0 deletions

0
EWR_D3D/EWR_DFM_Sens2.py Normal file
View File

602
EWR_D3D/EWR_DFM_WAQplot.py Normal file
View File

@ -0,0 +1,602 @@
#%% Plotting EWR Flume Tests
# Alexander Rey, 2022
#%% Import
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from scipy.interpolate import LinearNDInterpolator, interp1d
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc, get_ncfilelist, get_hisstationlist, get_ncvardimlist, get_timesfromnc
import pathlib as pl
#%% Read Model Log
# runLog = pd.read_excel("Y:/12828.101 English Wabigoon River/06_Models/00_Delft3D/ModelRuns.xlsx", "Sensitivity")
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "12828.101 English Wabigoon River", "06_Models", "00_Delft3D", "ModelRuns.xlsx")
runLog = pd.read_excel(pth.as_posix(), "Sensitivity")
dataPath = "FlowFM_map.nc"
#%% Import using DFM Functions
modelPlot = 27
file_nc_map = pl.Path(runLog['Run Location'][modelPlot], 'Water_Quality', 'output', 'deltashell_map.nc')
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
# Print file variables
A = get_ncvardimlist(file_nc_map.as_posix())
# print(A)
# Define extraction variables
dataVars = ['FrHgIM1', 'FrHgIM2', 'FrHgIM3', 'FrHgDOC', 'FrHgPOC', 'FrHgPHYT', 'FrHgDis',
'FrHgIM1S1', 'FrHgIM2S1', 'FrHgIM3S1', 'FrHgDOCS1', 'FrHgPOCS1', 'FrHgPHYTS1', 'FrHgDisS1',
'FrHgIM1S2', 'FrHgIM2S2', 'FrHgIM3S2', 'FrHgDOCS2', 'FrHgPOCS2', 'FrHgPHYTS2', 'FrHgDisS2',
'FrMmIM1', 'FrMmIM2', 'FrMmIM3', 'FrMmDOC', 'FrMmPOC', 'FrMmPHYT', 'FrMmDis',
'FrMmIM1S1', 'FrMmIM2S1', 'FrMmIM3S1', 'FrMmDOCS1', 'FrMmPOCS1', 'FrMmPHYTS1', 'FrMmDisS1',
'FrMmIM1S2', 'FrMmIM2S2', 'FrMmIM3S2', 'FrMmDOCS2', 'FrMmPOCS2', 'FrMmPHYTS2', 'FrMmDisS2',
'IM1', 'IM2', 'IM3', 'Hg', 'Mm', 'H0', 'POC1', 'POC2',
'IM1S1', 'IM2S1', 'IM3S1', 'HgS1', 'MmS1', 'DetCS1', 'OOCS1',
'IM1S2', 'IM2S2', 'IM3S2', 'HgS2', 'MmS2', 'DetCS2', 'OOCS2',
'fResS1IM1', 'fSedIM1', 'fResS1IM2', 'fSedIM2',
'fResS1DetC', 'fSedPOC1', 'fResS1OOC', 'fSedPOC2',
'fSedHg', 'fResS1Hg', 'fSedMm', 'fResS1Mm',
'HgHgS1O', 'MmMmS1O',
'HgMmO', 'HgS1MmS1O', 'MmHgO', 'MmS1HgS1O',
'oPhoDeg', 'oPhoOxy', 'oPhoRed',
'f_minPOC1', 'MnODCS1']
# Create Pandas Output Array
dataIN = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_face_x',
silent=True)
dataOUT = pd.DataFrame(index=np.array(dataIN[2:-2]))
# Extract variables
for vIDX, v in enumerate(dataVars):
# Extract data from NetCDF file
dataIN = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_' + v, timestep=len(tSteps)-1,
silent=True, layer=0)
dataOUT[v] = np.array(dataIN[0][0][2:-2])
print(v)
#%% Plot Partition Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
#Hg Parition in Water
ax[0].set_title('Hg Fraction in Water')
ax[0].plot(dataOUT.index, dataOUT['FrHgIM1'], linewidth=3, label='IM1')
ax[0].plot(dataOUT.index, dataOUT['FrHgIM2'], linewidth=3, label='IM2')
ax[0].plot(dataOUT.index, dataOUT['FrHgIM3'], linewidth=3, label='IM3')
ax[0].plot(dataOUT.index, dataOUT['FrHgPOC'], linewidth=3, label='POC')
ax[0].plot(dataOUT.index, dataOUT['FrHgDOC'], linewidth=3, label='DOC')
ax[0].plot(dataOUT.index, dataOUT['FrHgPHYT'], linewidth=3, label='PHYT')
ax[0].plot(dataOUT.index, dataOUT['FrHgDis'], linewidth=3, label='Dissolved')
ax[0].legend()
ax[0].set_ylabel('Percentage Hg')
ax[0].set_xlabel('Distance Along Flume [m]')
#Mm Parition in Water
ax[1].set_title('MeHg Fraction in Water')
ax[1].plot(dataOUT.index, dataOUT['FrMmIM1'], linewidth=3, label='IM1')
ax[1].plot(dataOUT.index, dataOUT['FrMmIM2'], linewidth=3, label='IM2')
ax[1].plot(dataOUT.index, dataOUT['FrMmIM3'], linewidth=3, label='IM3')
ax[1].plot(dataOUT.index, dataOUT['FrMmPOC'], linewidth=3, label='POC')
ax[1].plot(dataOUT.index, dataOUT['FrMmDOC'], linewidth=3, label='DOC')
ax[1].plot(dataOUT.index, dataOUT['FrMmPHYT'], linewidth=3, label='PHYT')
ax[1].plot(dataOUT.index, dataOUT['FrMmDis'], linewidth=3, label='Dissolved')
ax[1].legend()
ax[1].set_ylabel('Percentage MeHg')
ax[1].set_xlabel('Distance Along Flume [m]')
#Hg Parition in S1
ax[2].set_title('Hg Fraction in Sediment')
ax[2].plot(dataOUT.index, dataOUT['FrHgIM1S1'], linewidth=3, label='IM1')
ax[2].plot(dataOUT.index, dataOUT['FrHgIM2S1'], linewidth=3, label='IM2')
ax[2].plot(dataOUT.index, dataOUT['FrHgIM3S1'], linewidth=3, label='IM3')
ax[2].plot(dataOUT.index, dataOUT['FrHgPOCS1'], linewidth=3, label='POC')
ax[2].plot(dataOUT.index, dataOUT['FrHgDOCS1'], linewidth=3, label='DOC')
ax[2].plot(dataOUT.index, dataOUT['FrHgPHYTS1'], linewidth=3, label='PHYT')
ax[2].plot(dataOUT.index, dataOUT['FrHgDisS1'], linewidth=3, label='Dissolved')
ax[2].legend()
ax[2].set_ylabel('Percentage Hg')
ax[2].set_xlabel('Distance Along Flume [m]')
#Mm Parition in S1
ax[3].set_title('MeHg Fraction in Sediment')
ax[3].plot(dataOUT.index, dataOUT['FrMmIM1S1'], linewidth=3, label='IM1')
ax[3].plot(dataOUT.index, dataOUT['FrMmIM2S1'], linewidth=3, label='IM2')
ax[3].plot(dataOUT.index, dataOUT['FrMmIM3S1'], linewidth=3, label='IM3')
ax[3].plot(dataOUT.index, dataOUT['FrMmPOCS1'], linewidth=3, label='POC')
ax[3].plot(dataOUT.index, dataOUT['FrMmDOCS1'], linewidth=3, label='DOC')
ax[3].plot(dataOUT.index, dataOUT['FrMmPHYTS1'], linewidth=3, label='PHYT')
ax[3].plot(dataOUT.index, dataOUT['FrMmDisS1'], linewidth=3, label='Dissolved')
ax[3].legend()
ax[3].set_ylabel('Percentage MeHg')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/Partitioning.png',
bbox_inches='tight', dpi=200)
#%% Plot Total Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
#Hg in Water
ax[0].set_title('Hg in Water')
ax[0].plot(dataOUT.index, dataOUT['Hg'] * 1000000, linewidth=3, label='Hg')
ax[0].legend()
ax[0].set_ylabel('Hg [ng/L]')
ax[0].set_xlabel('Distance Along Flume [m]')
# Mm in Water
ax[1].set_title('MeHg in Water')
ax[1].plot(dataOUT.index, dataOUT['Mm'] * 1000000, linewidth=3, label='MeHg')
ax[1].legend()
ax[1].set_ylabel('MeHg [ng/L]')
ax[1].set_xlabel('Distance Along Flume [m]')
#Hg in Sediment
ax[2].set_title('Hg in Sediment')
ax[2].plot(dataOUT.index, dataOUT['HgS1'] * 1*10 ** 9 / (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='HgS1')
ax[2].legend()
ax[2].set_ylabel('HgS1 [ng/g]')
ax[2].set_xlabel('Distance Along Flume [m]')
# Mm in Sediment
ax[3].set_title('MeHg in Sediment')
ax[3].plot(dataOUT.index, dataOUT['MmS1'] * 1*10 ** 9 / (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='MeHgS1')
ax[3].legend()
ax[3].set_ylabel('MeHg [ng/g]')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgTotals.png',
bbox_inches='tight', dpi=200)
#%% Plot POC total Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# POC1 in Water
ax[0].set_title('POC1 (Fast) in Water')
ax[0].plot(dataOUT.index, dataOUT['POC1'], linewidth=3, label='POC1')
ax[0].legend()
ax[0].set_ylabel('POC1 [g/m3]')
ax[0].set_xlabel('Distance Along Flume [m]')
# POC2 in Water
ax[1].set_title('POC2 (Slow) in Water')
ax[1].plot(dataOUT.index, dataOUT['POC2'], linewidth=3, label='POC2')
ax[1].legend()
ax[1].set_ylabel('POC2 [g/m3]')
ax[1].set_xlabel('Distance Along Flume [m]')
#DetC in sediment
ax[2].set_title('DetC (Fast) in Sediment')
ax[2].plot(dataOUT.index, dataOUT['DetCS1']
/ (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1'])
, linewidth=3, label='DetCS1')
ax[2].legend()
ax[2].set_ylabel('DetCS1 [g/g]')
ax[2].set_xlabel('Distance Along Flume [m]')
# OOC in Sediment
ax[3].set_title('OOC (Slow) in Sediment')
ax[3].plot(dataOUT.index, dataOUT['OOCS1']
/ (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='OOCS1')
ax[3].legend()
ax[3].set_ylabel('OOCS1 [g/g]')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/POCTotals.png',
bbox_inches='tight', dpi=200)
#%% More Total Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
#H0 in Water
ax[0].set_title('Hg(0) in Water')
ax[0].plot(dataOUT.index, dataOUT['H0']* 1000000, linewidth=3, label='Hg(0)')
ax[0].legend()
ax[0].set_ylabel('Hg(0) [ng/L]')
ax[0].set_xlabel('Distance Along Flume [m]')
# IM1 in Water
ax[1].set_title('IM1 (Sand) in Water')
ax[1].plot(dataOUT.index, dataOUT['IM1'], linewidth=3, label='IM1')
ax[1].legend()
ax[1].set_ylabel('IM1 [g/m3]')
ax[1].set_xlabel('Distance Along Flume [m]')
# IM2 in Water
ax[2].set_title('IM2 (Silt) in Water')
ax[2].plot(dataOUT.index, dataOUT['IM2'], linewidth=3, label='IM2')
ax[2].legend()
ax[2].set_ylabel('IM2 [g/m3]')
ax[2].set_xlabel('Distance Along Flume [m]')
# IM3 in Water
ax[3].set_title('IM3 (Woodchips) in Water')
ax[3].plot(dataOUT.index, dataOUT['IM3'], linewidth=3, label='IM3 (Woodchips)')
ax[3].legend()
ax[3].set_ylabel('IM3 [g/m3]')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/InorganicTotals.png',
bbox_inches='tight', dpi=200)
#%% Plot Fluxes
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# IM1 Sediment Flux
ax[0].set_title('IM1 (Sand) Resuspension Flux')
ax[0].plot(dataOUT.index, dataOUT['fResS1IM1'], linewidth=3, label='IM1 Resuspension')
ax[0].plot(dataOUT.index, dataOUT['fSedIM1'], linewidth=3, label='IM1 Sedimentation')
ax[0].legend()
ax[0].set_ylabel('IM1 (Sand) Sedimentation and Resuspension [g/m2/d]')
ax[0].set_xlabel('Distance Along Flume [m]')
# POC Sediment Flux
ax[1].set_title('POC Resuspension Flux')
ax[1].plot(dataOUT.index, dataOUT['fResS1DetC'], linewidth=3, label='DetC Resuspension')
ax[1].plot(dataOUT.index, dataOUT['fResS1OOC'], linewidth=3, label='OOC Resuspension')
ax[1].plot(dataOUT.index, dataOUT['fSedPOC1'], linewidth=3, label='POC1 (Fast) Sedimentation')
ax[1].plot(dataOUT.index, dataOUT['fSedPOC2'], linewidth=3, label='POC2 (Slow) Sedimentation')
ax[1].legend()
ax[1].set_ylabel('POC Sedimentation and Resuspension [g/m2/d]')
ax[1].set_xlabel('Distance Along Flume [m]')
# Hg Sediment Flux
ax[2].set_title('Hg Resuspension Flux')
ax[2].plot(dataOUT.index, dataOUT['fResS1Hg'], linewidth=3, label='Hg Resuspension')
ax[2].plot(dataOUT.index, dataOUT['fSedHg'], linewidth=3, label='Hg Sedimentation')
ax[2].legend()
ax[2].set_ylabel('Hg Sedimentation and Resuspension [g/m2/d]')
ax[2].set_xlabel('Distance Along Flume [m]')
# Mm Sediment Flux
ax[3].set_title('MeHg Resuspension Flux')
ax[3].plot(dataOUT.index, dataOUT['fResS1Mm'], linewidth=3, label='MeHg Resuspension')
ax[3].plot(dataOUT.index, dataOUT['fSedMm'], linewidth=3, label='MeHg Sedimentation')
ax[3].legend()
ax[3].set_ylabel('MeHg Sedimentation and Resuspension [g/m2/d]')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgFluxes.png',
bbox_inches='tight', dpi=200)
#%% Plot More Fluxes
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=4)
ax = axes.flat
# Methylation Flux
ax[0].set_title('Methylation Flux in Water')
ax[0].plot(dataOUT.index, dataOUT['HgMmO'], linewidth=3, label='Mehtylation')
ax[0].plot(dataOUT.index, dataOUT['MmHgO'], linewidth=3, label='Demehtylation')
ax[0].legend()
ax[0].set_ylabel('Methylation Flux in Water [g/m3/d]')
ax[0].set_xlabel('Distance Along Flume [m]')
# Methylation Flux in Sediment
ax[1].set_title('Methylation Flux in Sediment')
ax[1].plot(dataOUT.index, dataOUT['HgS1MmS1O']
/ (dataOUT['HgS1']),
linewidth=3, label='Mehtylation')
ax[1].plot(dataOUT.index, dataOUT['MmS1HgS1O']
/ (dataOUT['MmS1']),
linewidth=3, label='Demehtylation')
ax[1].legend()
ax[1].set_ylabel('Methylation Flux in Sediment [g/gHg/d]')
ax[1].set_xlabel('Distance Along Flume [m]')
# Hg Diffusion Flux
# ax[2].set_title('Diffusion Flux')
# ax[2].plot(dataOUT.index, dataOUT['HgHgS1O'], linewidth=3, label='Hg Diffusion')
# ax[2].plot(dataOUT.index, dataOUT['MmMmS1O'], linewidth=3, label='MeHg Diffusion')
#
# ax[2].legend()
# ax[2].set_ylabel('Diffusion [g/m2/d]')
# ax[2].set_xlabel('Distance Along Flume [m]')
# POC Degradation Flux
ax[2].set_title('POC Degradation Flux in Water')
ax[2].plot(dataOUT.index, dataOUT['f_minPOC1'], linewidth=3, label='POC Degradation Water')
ax[2].legend()
ax[2].set_ylabel('POC Degradation [g/m3/d]')
ax[2].set_xlabel('Distance Along Flume [m]')
# POC Degradation Flux
ax[3].set_title('POC Degradation Flux in Sediment')
ax[3].plot(dataOUT.index, dataOUT['MnODCS1']
/ (dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='POC Degradation Sediment')
ax[3].legend()
ax[3].set_ylabel('POC Degradation [g/gPOC/d')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/POCFluxes.png',
bbox_inches='tight', dpi=200)
#%% Photo Fluxes
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=4)
ax = axes.flat
# Diffusion Flux
ax[0].set_title('Diffusion Flux')
ax[0].plot(dataOUT.index, dataOUT['HgHgS1O'], linewidth=3, label='Hg Diffusion')
ax[0].plot(dataOUT.index, dataOUT['MmMmS1O'], linewidth=3, label='MeHg Diffusion')
ax[0].legend()
ax[0].set_ylabel('Diffusion [g/m2/d]')
ax[0].set_xlabel('Distance Along Flume [m]')
# Photo Oxidation Flux
ax[1].set_title('Photo Oxidation Flux')
ax[1].plot(dataOUT.index, dataOUT['oPhoOxy'], linewidth=3, label='Photo Oxidation Flux')
ax[1].legend()
ax[1].set_ylabel('Photo Oxidation Flux [g/m3/d]')
ax[1].set_xlabel('Distance Along Flume [m]')
# Photo Degradation Flux
ax[2].set_title('Photo Degradation Flux')
ax[2].plot(dataOUT.index, dataOUT['oPhoDeg'], linewidth=3, label='Photo Degradation Flux')
ax[2].legend()
ax[2].set_ylabel('Photo Degradation Flux [g/m3/d]')
ax[2].set_xlabel('Distance Along Flume [m]')
# Photo Reduction Flux
ax[3].set_title('Photo Reduction Flux')
ax[3].plot(dataOUT.index, dataOUT['oPhoRed'], linewidth=3, label='Photo Reduction Flux')
ax[3].legend()
ax[3].set_ylabel('Photo Reduction Flux [g/m3/d]')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/H0Fluxes.png',
bbox_inches='tight', dpi=200)
#%% Make summary table
dataTableOut = dict()
#Hg Parition in Water
dataTableOut['Hg:IM1 Fraction'] = dataOUT['FrHgIM1'].mean()
dataTableOut['Hg:IM2 Fraction'] = dataOUT['FrHgIM2'].mean()
dataTableOut['Hg:IM3 Fraction'] = dataOUT['FrHgIM3'].mean()
dataTableOut['Hg:POC Fraction'] = dataOUT['FrHgPOC'].mean()
dataTableOut['Hg:DOC Fraction'] = dataOUT['FrHgDOC'].mean()
dataTableOut['Hg:Phyt Fraction'] = dataOUT['FrHgPHYT'].mean()
dataTableOut['Hg:Dissolved Fraction'] = dataOUT['FrHgDis'].mean()
#MeHg Parition in Water
dataTableOut['MeHg:IM1 Fraction'] = dataOUT['FrMmIM1'].mean()
dataTableOut['MeHg:IM2 Fraction'] = dataOUT['FrMmIM2'].mean()
dataTableOut['MeHg:IM3 Fraction'] = dataOUT['FrMmIM3'].mean()
dataTableOut['MeHg:POC Fraction'] = dataOUT['FrMmPOC'].mean()
dataTableOut['MeHg:DOC Fraction'] = dataOUT['FrMmDOC'].mean()
dataTableOut['MeHg:Phyt Fraction'] = dataOUT['FrMmPHYT'].mean()
dataTableOut['MeHg:Dissolved Fraction'] = dataOUT['FrMmDis'].mean()
#Hg Parition in S1
dataTableOut['Hg:IM1 Fraction S1'] = dataOUT['FrHgIM1S1'].mean()
dataTableOut['Hg:IM2 Fraction S1'] = dataOUT['FrHgIM2S1'].mean()
dataTableOut['Hg:IM3 Fraction S1'] = dataOUT['FrHgIM3S1'].mean()
dataTableOut['Hg:POC Fraction S1'] = dataOUT['FrHgPOCS1'].mean()
dataTableOut['Hg:DOC Fraction S1'] = dataOUT['FrHgDOCS1'].mean()
dataTableOut['Hg:Phyt Fraction S1'] = dataOUT['FrHgPHYTS1'].mean()
dataTableOut['Hg:Dissolved Fraction S1'] = dataOUT['FrHgDisS1'].mean()
#MeHg Parition in S1
dataTableOut['MeHg:IM1 Fraction S1'] = dataOUT['FrMmIM1S1'].mean()
dataTableOut['MeHg:IM2 Fraction S1'] = dataOUT['FrMmIM2S1'].mean()
dataTableOut['MeHg:IM3 Fraction S1'] = dataOUT['FrMmIM3S1'].mean()
dataTableOut['MeHg:POC Fraction S1'] = dataOUT['FrMmPOCS1'].mean()
dataTableOut['MeHg:DOC Fraction S1'] = dataOUT['FrMmDOCS1'].mean()
dataTableOut['MeHg:Phyt Fraction S1'] = dataOUT['FrMmPHYT'].mean()
dataTableOut['MeHg:Dissolved Fraction S1'] = dataOUT['FrMmDisS1'].mean()
#Hg in Water
dataTableOut['Hg in Water [ng/L]'] = dataOUT['Hg'].mean() * 1000000
# Mm in Water
dataTableOut['MeHg in Water [ng/L]'] = dataOUT['Mm'].mean() * 1000000
#Hg in Sediment
dataTableOut['Hg in Sediment [ng/g]'] = dataOUT['HgS1'].mean() * 1*10 ** 9 /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# Mm in Sediment
dataTableOut['MeHg in Sediment [ng/g]'] = dataOUT['MmS1'].mean() * 1*10 ** 9 /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# Hg(0) in Water
dataTableOut['Hg(0) in Water [ng/L]'] = dataOUT['H0'].mean() * 1000000
# IM1 in Water
ax[1].set_title('IM1 (Sand) in Water')
dataTableOut['IM1 (Sand) in Water [g/m3]'] = dataOUT['IM1'].mean()
# IM2 in Water
dataTableOut['IM2 (Silt) in Water [g/m3]'] = dataOUT['IM2'].mean()
# IM3 in Water
dataTableOut['IM3 (Woodchips) in Water [g/m3]'] = dataOUT['IM3'].mean()
# POC1 in Water
dataTableOut['POC1 (Fast) in Water [g/m3]'] = dataOUT['POC1'].mean()
# POC2 in Water
dataTableOut['POC1 (Slow) in Water [g/m3]'] = dataOUT['POC2'].mean()
#DetC in sediment
dataTableOut['DetC (Fast) in Sediment [g/g]'] = dataOUT['DetCS1'].mean() /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# OOC in Sediment
dataTableOut['OOC (Slow) in Sediment [g/g]'] = dataOUT['OOCS1'].mean() /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# IM1 Resuspension
dataTableOut['IM1 (Sand) Resuspension Flux [g/m2/d]'] = dataOUT['fResS1IM1'].mean()
# IM1 Sedimentation
dataTableOut['IM1 (Sand) Sedimentation Flux [g/m2/d]'] = dataOUT['fSedIM1'].mean()
# DetC Resuspension
dataTableOut['DetC Resuspension Flux [g/m2/d]'] = dataOUT['fResS1DetC'].mean()
# OOC Resuspension
dataTableOut['OOC Resuspension Flux [g/m2/d]'] = dataOUT['fResS1OOC'].mean()
# POC1 Sedimentation
dataTableOut['POC1 (Fast) Sedimentation [g/m2/d]'] = dataOUT['fSedPOC1'].mean()
# POC2 Sedimentation
dataTableOut['POC2 (Slow) Sedimentation [g/m2/d]'] = dataOUT['fSedPOC2'].mean()
# Hg Resuspension
dataTableOut['Hg Resuspension Flux [g/m2/d]'] = dataOUT['fResS1Hg'].mean()
# Hg Sedimentation
dataTableOut['Hg Sedimentation Flux [g/m2/d]'] = dataOUT['fSedHg'].mean()
# MeHg Resuspension
dataTableOut['MeHg Resuspension Flux [g/m2/d]'] = dataOUT['fResS1Mm'].mean()
# MeHg Sedimentation
dataTableOut['MeHg Sedimentation Flux [g/m2/d]'] = dataOUT['fSedMm'].mean()
# Methylation Flux in Water
dataTableOut['Methylation Flux in Water [g/m3/d]'] = dataOUT['HgMmO'].mean()
# Demehtylation Flux in Water
dataTableOut['Demehtylation Flux in Water [g/m3/d]'] = dataOUT['MmHgO'].mean()
# Methylation Flux in Sediment
dataTableOut['Methylation Flux in Sediment [g/gHg/d]'] = dataOUT['HgS1MmS1O'].mean() / dataOUT['HgS1'].mean()
# Demehtylation Flux in Sediment
dataTableOut['Demehtylation Flux in Sediment [g/gMm/d]'] = dataOUT['MmS1HgS1O'].mean() / dataOUT['MmS1'].mean()
# POC Degradation Flux in Water
dataTableOut['POC Degradation Flux in Water [g/m3/d]'] = dataOUT['f_minPOC1'].mean()
# POC Degradation Flux in Sediment
dataTableOut['Demehtylation Flux in Water [g/gPOC/d]'] = dataOUT['MnODCS1'].mean() /\
(dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# Hg Diffusion Flux
dataTableOut['Hg Diffusion [g/m2/d]'] = dataOUT['HgHgS1O'].mean()
# MeHg Diffusion Flux
dataTableOut['MeHg Diffusion [g/m2/d]'] = dataOUT['MmMmS1O'].mean()
# Photo Oxidation Flux
dataTableOut['Hg Diffusion [g/m3/d]'] = dataOUT['oPhoOxy'].mean()
# Photo Degradation Flux
dataTableOut['MeHg Diffusion [g/m3/d]'] = dataOUT['oPhoDeg'].mean()
# Photo Reduction Flux
dataTableOut['Hg Diffusion [g/m3/d]'] = dataOUT['oPhoRed'].mean()
#%% Save Data Table as Excel
dataTableOut_df = pd.DataFrame(data=dataTableOut, index=[0])
dataTableOut_df = (dataTableOut_df.T)
dataTableOut_df.to_excel('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ModelOutputs_Sept26.xlsx')

View File

@ -0,0 +1,60 @@
#%% Process EWR Data for use in OceanMesh2D
# Alexander Rey, July 12 2022
# %% Import
import pandas as pd
import geopandas as gp
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.cm as cm
from shapely.geometry import Point, MultiPoint, Polygon
import alphashape
import cartopy.crs as ccrs
import contextily as ctx
# %% Import Bathymetry Points to find Concave Hull
bathyPoints = pd.read_csv("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/00_Grid/OceanMesh/TR_Bathy.xyz",
header=None, names=["x", "y", "z"], delim_whitespace=True)
# Convert to Geopandas
bathyPoints_gdf = gp.GeoDataFrame(bathyPoints, geometry=gp.points_from_xy(bathyPoints.x, bathyPoints.y),
crs="epsg:32615")
# %% Find Concave Hull
alphaPoints = list(map(tuple, np.column_stack((bathyPoints_gdf['x'], bathyPoints_gdf['y']))))
# Find the concave hull using the alphashape package
alphaPoly = alphashape.alphashape(alphaPoints, 0.02)
# %% Test Plot of Concave Hull
fig, axes = plt.subplots(nrows=1, ncols=1, subplot_kw={'projection': ccrs.epsg(32615)}, figsize=(10, 10))
fig.patch.set_facecolor('white')
axes.set_xlim(468545, 511043)
axes.set_ylim(5514782, 5546325)
# Add basemap
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
bathyPoints_gdf.plot('.', ax=axes, color='y', markersize=0.1)
axes.plot(*alphaPoly.exterior.xy, color='k', linewidth=1)
plt.show()
# %% Save Concave Hull to Shapefile
dataBounds = gp.GeoDataFrame(index=[0], geometry=[alphaPoly], crs='EPSG:32615')
dataBounds_wgs = dataBounds.to_crs('EPSG:4326')
dataBounds_wgs.to_file(
'//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/00_Grid/OceanMesh/ModelBoundsWGS.shp')

View File

@ -0,0 +1,526 @@
#%% Plotting script for LSU D3D data
# Alexander Rey, 2022
import os
import pandas as pd
import geopandas as gp
import netCDF4 as nc
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.cm as cm
import datetime as datetime
from scipy.interpolate import LinearNDInterpolator, interp1d
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc, get_ncfilelist, get_hisstationlist, get_ncvardimlist, get_timesfromnc
import cartopy.crs as ccrs
import contextily as ctx
from dfm_tools.regulargrid import scatter_to_regulargrid
import pathlib as pl
from shapely.geometry import Point, MultiPoint
import alphashape
import rasterio
from rasterio.plot import show as rioshow
from osgeo import osr #Spatial reference module
#%% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13522.101 LSU Lakes", "06_Models", "Delft3DFM", "13522.678.W.SBV.Rev0_LSU Lakes D3D Model Log.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
dataPath = "FlowFM_map.nc"
#%% Import using DFM functions
modelPlot = range(45, 46) #30
# stormTime = [datetime.datetime(2005, 8, 29, 14, 0, 0),
# datetime.datetime(2005, 9, 24, 14, 0, 0),
# datetime.datetime(2005, 12, 28, 14, 0, 0)]
# stormTime = [datetime.datetime(2005, 1, 14, 18, 0, 0),
# datetime.datetime(2005, 2, 3, 18, 0, 0),
# datetime.datetime(2005, 2, 10, 18, 0, 0)]
#stormTime = [datetime.datetime(2005, 12, 31, 18, 0, 0)]
#stormTime = [datetime.datetime(2005, 8, 7, 0, 0, 0)]
ugrid_all = [None] * (max(modelPlot)+1)
X = [None] * (max(modelPlot)+1)
Y = [None] * (max(modelPlot)+1)
U = [None] * (max(modelPlot)+1)
V = [None] * (max(modelPlot)+1)
facex = [None] * (max(modelPlot)+1)
facey = [None] * (max(modelPlot)+1)
ux_mean = [None] * (max(modelPlot)+1)
uy_mean = [None] * (max(modelPlot)+1)
ux = [None] * (max(modelPlot)+1)
uy = [None] * (max(modelPlot)+1)
sed = [None] * (max(modelPlot)+1)
area = [None] * (max(modelPlot)+1)
wd = [None] * (max(modelPlot)+1)
regularMask = [None] * (max(modelPlot)+1)
meshMask = [None] * (max(modelPlot)+1)
alphaPolyList = [None] * (max(modelPlot)+1)
for i in modelPlot:
#file_nc_map = os.path.join(runLog['Run Location'][i], 'FlowFM', 'output', 'FlowFM_map.nc')
file_nc_map = os.path.join(runLog['Run Location'][i], 'FlowFM', 'dflowfm', 'output', 'FlowFM_map.nc')
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
# Find nearest time step to desired time
stormTime = [runLog['End Date'][i]]
tStep = []
for s in stormTime:
abs_deltas_from_target_date = np.absolute(tSteps - s)
tStep.append(np.argmin(abs_deltas_from_target_date))
# tStep = 50
# Get Var info
vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_map)
ugrid_all[i] = get_netdata(file_nc=file_nc_map)#,multipart=False)
facex[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_face_x')
facey[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_face_y')
# ux_mean[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_ucxa', timestep=tStep)
# uy_mean[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_ucya', timestep=tStep)
ux[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_ucx',
timestep=tStep, layer='all')
uy[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_ucy',
timestep=tStep, layer='all')
sed[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_bodsed',
timestep=tStep, station='all')
area[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_flowelem_ba')
wd[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_waterdepth',
timestep=tStep)
ux_mean[i] = ux[i].mean(axis=2)
uy_mean[i] = uy[i].mean(axis=2)
X[i],Y[i],U[i] = scatter_to_regulargrid(xcoords=facex[i], ycoords=facey[i],
ncellx=50, ncelly=70, values=ux_mean[i][0, :],
maskland_dist=25, method='linear')
X[i],Y[i],V[i] = scatter_to_regulargrid(xcoords=facex[i], ycoords=facey[i],
ncellx=50, ncelly=70, values=uy_mean[i][0, :],
maskland_dist=25, method='linear')
# Find convex hull of pcolor data
# Convert grid to multipoint
# Create a list of grid points
# alphaPoints = list(map(tuple, np.column_stack((facex[i][:], facey[i][:])).data))
alphaPoints = list(map(tuple, np.column_stack((facex[i][wd[i][0, :] != 0],
facey[i][wd[i][0, :] != 0])).data))
# Find the concave hull
alphaPoly = alphashape.alphashape(alphaPoints, 0.05)
alphaPolyList[i] = alphaPoly
# Create mask of regular grid inside concave hull
regularMask[i] = [alphaPoly.contains(Point(i[0], i[1]))
for i in np.array([X[i].flatten(), Y[i].flatten()]).T]
# Create mask of mesh grid points inside concave hull
meshMask[i] = [alphaPoly.contains(Point(i[0], i[1]))
for i in np.array([facex[i], facey[i]]).T]
# Reshape back to regular grid for mask
regularMask[i] = np.array(regularMask[i]).reshape(X[i].shape)
#%% Plot using DFM functions
modelPlot = [24]
# stormDirs = [306, 135, 250]
stormDirs = [3, 346, 342]
# fig, axes = plt.subplots(nrows=1, ncols=3,subplot_kw={'projection': ccrs.epsg(26915)}, figsize=(13, 6))
fig, axes = plt.subplots(nrows=1, ncols=2, subplot_kw={'projection': ccrs.epsg(26915)}, figsize=(10, 6))
fig.patch.set_facecolor('white')
plt.tight_layout()
plotCount = 1
vmin = 0 # Legend min
vmax = 0.05 # Legend max
scale = 60 # Size of arrows, smaller is bigger #40 # 15
qmax = 0.025 # Scaling threshold. Above this value, arrows are scaled to 1 #0.1
amin = 1 # Minimum arrow length. A dot is used below this value
for tPlot in range(1, 2):
for i in modelPlot:
vel_mask = meshMask[i]
pc = plot_netmapdata(ugrid_all[i].verts[vel_mask],
values=np.sqrt(ux_mean[i][tPlot, :][vel_mask]**2 +\
uy_mean[i][tPlot, :][vel_mask]**2),
ax=axes[plotCount], linewidth=0.5, cmap="viridis")
axes[plotCount].set_xlim(675524, 677027)
axes[plotCount].set_ylim(3365191, 3367182)
# axes[plotCount].set_xlim(676470, 677021)
# axes[plotCount].set_ylim(3365939, 3366617)
# axes.quiver(X[i], Y[i], U[i], V[i], color='w', scale=8)
# Normalize velocity above limit
# Using fixed grid
overGrid_U_Scale = U[i]
overGrid_V_Scale = V[i]
# Using model grid
# overGrid_U_Scale = ux_mean[i][tPlot, :]
# overGrid_V_Scale = uy_mean[i][tPlot, :]
r = np.power(np.add(np.power(overGrid_U_Scale, 2), np.power(overGrid_V_Scale, 2)), 0.5)
curPlotMask = np.sqrt((overGrid_U_Scale) ** 2 + (overGrid_V_Scale) ** 2) > qmax
overGrid_U_Plot = np.zeros(overGrid_U_Scale.shape)
overGrid_V_Plot = np.zeros(overGrid_V_Scale.shape)
# Normalize arrows to 1 where not zero
overGrid_U_Plot[r != 0] = overGrid_U_Scale[r != 0] / r[r != 0]
overGrid_V_Plot[r != 0] = overGrid_V_Scale[r != 0] / r[r != 0]
# Scale arrows below qmax
overGrid_U_Plot[~curPlotMask] = overGrid_U_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
overGrid_V_Plot[~curPlotMask] = overGrid_V_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
# Using fixed grid
# Remove arrows outside of convex hull
overGrid_U_Plot[~regularMask[i]] = np.nan
overGrid_V_Plot[~regularMask[i]] = np.nan
qv = axes[plotCount].quiver(X[i], Y[i], overGrid_U_Plot, overGrid_V_Plot, scale=scale,
color='k', headlength=3.5, headwidth=3.5, headaxislength=3.5, width=0.0015, minlength=amin) #200
# # Using model grid
# # Remove arrows outside convex hull
# overGrid_U_Plot[~np.array(meshMask[i])] = np.nan
# overGrid_V_Plot[~np.array(meshMask[i])] = np.nan
# qv = axes[plotCount].quiver(facex[i], facey[i], overGrid_U_Plot, overGrid_V_Plot, scale=scale,
# color='k', headlength=3.5, headwidth=3.5, headaxislength=3.5, width=0.0015, minlength=amin) #200
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes[plotCount], source=mapbox, crs='EPSG:26915')
# extent = ax[plotCount].get_extent()
# ax[plotCount].set_xticks(np.linspace(extent[0], extent[1], 4))
# ax[plotCount].set_yticks(np.linspace(extent[2], extent[3], 4))
# Add model bounds line
axes[plotCount].plot(*alphaPoly.exterior.xy, color='k', linewidth=1)
pc.set_clim([vmin, vmax])
if plotCount == 1:
cbar = fig.colorbar(pc, ax=axes, shrink=0.95)
cbar.set_label('Depth Averaged Velocity [m/s]')
axes[plotCount].set_title('Wind Direction: ' + str(stormDirs[tPlot]) + ' Degrees')
# Add rectangle to show storm arrow
xLimDist = axes[plotCount].get_xlim()[1] - axes[plotCount].get_xlim()[0]
yLimDist = axes[plotCount].get_ylim()[1] - axes[plotCount].get_ylim()[0]
axes[plotCount].add_patch(patches.Rectangle((axes[plotCount].get_xlim()[0] + 0.7 * xLimDist,
axes[plotCount].get_ylim()[0] + 0.8 * yLimDist - xLimDist * 0.1),
xLimDist * 0.2, xLimDist * 0.2,
facecolor='white', edgecolor='w', linewidth=5))
# Add wind arrow
# Set at 80% of axis limits
axes[plotCount].arrow(axes[plotCount].get_xlim()[0] + 0.8 * xLimDist,
axes[plotCount].get_ylim()[0] + 0.8 * yLimDist,
xLimDist * 0.03 * math.sin(math.radians(stormDirs[tPlot])),
xLimDist * 0.03 * math.cos(math.radians(stormDirs[tPlot])),
width=xLimDist * 0.015,
color='k')
plotCount = plotCount + 1
plt.show()
fig.savefig(
'C:/Users/arey/files/Projects/LSU/Modelling/Figures/' +
'StormVelDisWide.png', bbox_inches='tight', dpi=400)
# %% Setup forebay polygon
# forebayPoly = gp.read_file('C:/Users/arey/files/Projects/LSU/ForebayPoly.shp')
forebayPoly = gp.read_file('C:/Users/arey/files/Projects/LSU/ForebayPolyExpand.shp')
# Setup mask. Can only do this once if grids are the same
i = 45
forebayPolyMask = [forebayPoly.iloc[0, 1].contains(Point(i[0], i[1]))
for i in np.array([facex[i], facey[i]]).T]
# %% Read in hist file
#
modelPlot = range(45, 46)
cumSedCS = [None] * (max(modelPlot)+1)
cumSedCS_B = [None] * (max(modelPlot)+1)
SedCS_B = [None] * (max(modelPlot)+1)
cs_Names = [None] * (max(modelPlot)+1)
cumDis = [None] * (max(modelPlot)+1)
cumDisVol = [None] * (max(modelPlot)+1)
for i in modelPlot:
file_nc_hist = os.path.join(runLog['Run Location'][i], 'FlowFM', 'dflowfm', 'output', 'FlowFM_his.nc')
# Get Var info
vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_hist)
# cumSedCS[i] = get_ncmodeldata(file_nc=file_nc_hist, varname='cross_section_cumulative_Sediment',
# timestep='all', station='all')
# SedCS_B[i] = get_ncmodeldata(file_nc=file_nc_hist, varname='cross_section_suspended_sediment_transport',
# timestep='all', station='all')
# cumSedCS_B[i] = SedCS_B[i].cumsum(axis=1)
#
# cs_Names[i] = get_ncmodeldata(file_nc=file_nc_hist, varname='cross_section_name',
# station='all')
cumDis[i] = get_ncmodeldata(file_nc=file_nc_hist, varname='source_sink_prescribed_discharge',
timestep='all', station='all')
cumDisVol[i] = cumDis[i] * 5 * 60 * 129.6
cumDisVol[i] = cumDisVol[i].sum()
# Get Time info
# tStepsHist = get_timesfromnc(file_nc=file_nc_hist, varname='time')
#%% Plot Sediment DFM functions
# modelPlot = [30, 31, 32, 33, 34, 35]
#modelPlot = [30, 35]
# modelPlot = [35]
# stormDirs = [306, 135, 250]
modelPlot = range(45, 46)
# fig, axes = plt.subplots(nrows=1, ncols=3,subplot_kw={'projection': ccrs.epsg(26915)}, figsize=(13, 6))
# fig, axes = plt.subplots(nrows=1, ncols=3,
# subplot_kw={'projection': ccrs.epsg(26915)},
# figsize=(6, 6))
# fig.patch.set_facecolor('white')
# plt.tight_layout()
plotCount = 1
sedIDX = 0
Trapping_List = [None] * (max(modelPlot)+1)
for tPlot in range(0, 1):
for i in modelPlot:
# Plot as seperate figures
fig, axes = plt.subplots(nrows=1, ncols=1,
subplot_kw={'projection': ccrs.epsg(26915)},
figsize=(6, 6))
fig.patch.set_facecolor('white')
vel_mask = meshMask[i]
pc = plot_netmapdata(ugrid_all[i].verts[vel_mask],
values=sed[i][tPlot, :, sedIDX][vel_mask],
ax=axes, linewidth=0.5,
cmap="viridis")
# pc = plot_netmapdata(ugrid_all[i].verts[vel_mask],
# values=area[i][:][vel_mask],
# ax=axes, linewidth=0.5,
# cmap="viridis")
# axes[plotCount].set_xlim(675524, 677027)
# axes[plotCount].set_ylim(3365191, 3367182)
axes.set_xlim(676470, 677021)
axes.set_ylim(3365939, 3366617)
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes, source=mapbox, crs='EPSG:26915')
# extent = ax[plotCount].get_extent()
# ax[plotCount].set_xticks(np.linspace(extent[0], extent[1], 4))
# ax[plotCount].set_yticks(np.linspace(extent[2], extent[3], 4))
# Add model bounds line
alphaPoly = alphaPolyList[i]
axes.plot(*alphaPoly.exterior.xy, color='k', linewidth=1)
# Add forebay bounds line
forebayPoly.plot(ax=axes, linewidth=2,
facecolor='none', edgecolor='r')
if i in [44, 45, 36, 37]:
pc.set_clim([0, 2000])
else:
pc.set_clim([0, 200])
cbar = fig.colorbar(pc, ax=axes, shrink=0.95)
cbar.set_label('Available mass of sediment [$kg/m^2$]')
# Calculate sediment in polygon
massSed = sed[i][0, :, 0] * area[i]
massSedSum = np.sum(massSed)
massSed_FB = sed[i][0, forebayPolyMask, 0]\
* area[i][forebayPolyMask]
massSedSum_FB = np.sum(massSed_FB)
# Add title
# axes.set_title('Existing Condition: ' + "{:0.1f}".format(
# (massSedSum_FB / massSedSum) * 100) + '% Trapping')
# axes.set_title(runLog['Short Description'][i] + ': ' + "{:0.1f}".format(
# (massSedSum_FB / massSedSum) * 100) + '% Trapping')
plt.title(runLog['Short Description'][i] + ": {:0.1f}".format(
(massSedSum_FB / cumDisVol[i]) * 100) + '% Trapping')
plt.tight_layout()
Trapping_List[i] = (massSedSum_FB / cumDisVol[i])
# # Add rectangle to show storm arrow
# xLimDist = axes.get_xlim()[1] - axes.get_xlim()[0]
# yLimDist = axes.get_ylim()[1] - axes.get_ylim()[0]
#
# axes.add_patch(patches.Rectangle((axes.get_xlim()[0] + 0.7 * xLimDist,
# axes.get_ylim()[0] + 0.8 * yLimDist - xLimDist * 0.1),
# xLimDist * 0.2, xLimDist * 0.2,
# facecolor='white', edgecolor='w', linewidth=5))
#
#
# # Add wind arrow
# # Set at 80% of axis limits
# axes.arrow(axes.get_xlim()[0] + 0.8 * xLimDist,
# axes.get_ylim()[0] + 0.8 * yLimDist,
# xLimDist * 0.03 * math.sin(math.radians(stormDirs[tPlot])),
# xLimDist * 0.03 * math.cos(math.radians(stormDirs[tPlot])),
# width=xLimDist * 0.015,
# color='k')
plotCount = plotCount + 1
plt.show()
# fig.savefig(
# 'C:/Users/arey/files/Projects/LSU/Modelling/FiguresExpand/' +
# runLog['Run Name'][i] + '.png', bbox_inches='tight', dpi=400)
# plt.show()
# fig.savefig(
# 'C:/Users/arey/files/Projects/LSU/Modelling/Figures/' +
# 'SedZoomPercent.png', bbox_inches='tight', dpi=400)
# %% Create summary table
modelPlot = range(30, 46)
trappingSumTable = pd.DataFrame()
trappingSumTable['Description'] = runLog['Short Description'][modelPlot]
trappingSumTable['Trapping'] = np.array(Trapping_List)[modelPlot]
# %% Calculate the total sediment discharged
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6,6))
modelPlot = [27]
sourceSinkDat = []
for i in modelPlot:
for sourceSink in range(1, 5):
# Read in the sediment discharge file (.tim)
sourceSinkFile = os.path.join(runLog['Run Location'][i], 'FlowFM', 'input', 'SourceSink0' + str(sourceSink) + '.tim')
sourceSinkDat.append(pd.read_csv(sourceSinkFile, sep='\s+', header=None, names=['time', 'Q', 'conc_sed', 'conc_fluff']))
# Add datetimes
sourceSinkDat[-1].index = pd.to_datetime(sourceSinkDat[-1].time, unit='m', origin=pd.Timestamp('2005-01-01'))
# Vol sediment at each time step, multiplied by the time step
sourceSinkDat[-1]['SedVol'] = sourceSinkDat[-1]['Q'] *\
sourceSinkDat[-1].loc[:, 'time'].diff() * 60 *\
sourceSinkDat[-1]['conc_sed']
# Fix first time step
sourceSinkDat[-1].iloc[0, -1] = 0
# Sediment cumlative sum
sourceSinkDat[-1].loc[:, 'CumSed'] = sourceSinkDat[-1].loc[:, 'SedVol'].cumsum()
sourceSinkDat[-1].loc[:, 'Q'].rolling(24*4).sum().plot()
plt.show()
# %% Read in 15 minute precipitation data
prep_data = pd.read_excel('//srv-mad3/Projects/13522.101 LSU Lakes/06_Models/PCSWMM Model/DataforHistoricRuns/15min_precip_formatted.xlsx',
sheet_name='Sheet2')
prep_data['DateTime'] = pd.to_datetime(prep_data['DT'])
prep_data.set_index('DateTime', inplace=True)
# %% Plot 24h precipitationto find events
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(12, 6))
prep_data.loc[:, 'per 15min'].rolling(24 * 4).sum().plot()
axes.set_ylabel('24h Precipiation (in)')
axes.set_xlim((pd.to_datetime('20050701'), pd.to_datetime('20060101')))
plt.show()
# fig.savefig(('//srv-mad3/Projects/13522.101 LSU Lakes/06_Models/PCSWMM Model/DataforHistoricRuns/24h_PrecpSum.png')
# %% Plot the sediment discharge and cumlative transport
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6,6))
csIDX = 3
# Calculate the total sediment discharged
# SS 1/2 is SouthWest, 3/4 is SouthEast
sourceSinkDat[0].loc[:, 'CumSed'].multiply(2).plot(
label='Discharged Sediment SW') # in kg
sourceSinkDat[2].loc[:, 'CumSed'].multiply(2).plot(
label='Discharged Sediment NE') # in kg
totalSedDis = sourceSinkDat[0].loc[:, 'CumSed'].multiply(2) +\
sourceSinkDat[2].loc[:, 'CumSed'].multiply(2)
# Plot the total sediment discharge
totalSedDis.plot(label='Total Discharged Sediment')
axes.plot(tStepsHist, cumSedCS[i][:, csIDX]*2650,
label='Cross Section Sediment Transport', color='k') # In M3. Multiple by 2650 to get kg
# axes.plot(tStepsHist, cumSedCS_B[i][:, csIDX],
# label='Cross Section Sediment Transport B', color='r') # In M3. Multiple by 2650 to get kg
axes.set_xlabel('Date')
axes.set_ylabel('Sediment [kg]')
trappedSed = (1 - (cumSedCS[i][-1, csIDX]*2650) /
totalSedDis[-1])*100
trappedSedB = (1 - (cumSedCS_B[i][-1, csIDX]) /
totalSedDis[-1])*100
axes.set_title('Sediment Transport: ' + "{:0.1f}".format(trappedSed) + '% Trapping')
plt.legend()
plt.show()
# fig.savefig(
# 'C:/Users/arey/files/Projects/LSU/Modelling/Figures/' +
# 'SedTrap_Concept_NE.png', bbox_inches='tight', dpi=400)
# fig.savefig(
# 'C:/Users/arey/files/Projects/LSU/Modelling/Figures/' +
# 'SedTrap_Concept.png', bbox_inches='tight', dpi=400)

184
LSU_D3D/PENOB_EnvDat.py Normal file
View File

@ -0,0 +1,184 @@
# -*- coding: utf-8 -*-
"""
Script to comapre EFDC boundry conditions against observations
"""
import noaa_coops as noaa
from solarpy import beam_irradiance, irradiance_on_plane
from datetime import datetime, timedelta
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import geopandas as gp
gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
import scipy as sp
import scipy.ndimage
#from astropy.convolution import Gaussian2DKernel
import contextily as ctx
import os
from shapely.geometry import Point
import pickle
# %% Import NOAA CO-OPS data at the battery
theBattery = noaa.Station(8518750)
df_air_temperature = theBattery.get_data(
begin_date="20120101",
end_date="20121231",
product="air_temperature",
units="metric",
time_zone="gmt")
df_air_temperature['DateTime'] = df_air_temperature.index
df_air_temperature['DateTime'] = df_air_temperature['DateTime'].dt.tz_localize('UTC')
df_water_temperature = theBattery.get_data(
begin_date="20120101",
end_date="20121231",
product="water_temperature",
units="metric",
time_zone="gmt")
df_water_temperature['DateTime'] = df_water_temperature.index
df_water_temperature['DateTime'] = df_water_temperature['DateTime'].dt.tz_localize('UTC')
# %% Read in EFDC met bounds
# WEATHER DATA FOR TIME SERIES N
# TASER(M,N) = TIME WHEN THE DATA WAS COLLECTED, IT IS IN DAY IF TCASER(N) IS 86400.
# PATM(M,N) = ATMOSPHERIC [PRESSURE MILLIBAR]
# TDRY(M,N) = DRY BULB ATMOSPHERIC TEMPERATURE [DEGREE C) ISTOPT(2)=1 OR EQUIL TEMP
# ISTOPT(2)=2; AQ VERSION SETS ISTOPT(2)=1.
# TWET(M,N) = WET BULB ATM TEMP IF IRELH=0, RELATIVE HUMIDITY (BETWEEN 0 AND 1) IF
# IRELH=1
# RAIN(M,N) = RAIN FALL RATE LENGTH/TIME, IT IS INCH/DAY IF RAINCVT IS SET TO 7.055560e-006
# (=0.0254/3600.)
# EVAP(M,N) = EVAPORATION RATE IF EVAPCVT>0.
# SOLSWR(M,N) = SOLAR SHORT WAVE RADIATION AT WATER SURFACE ENERGY FLUX/UNIT AREA.
# IT IS IN W/M2 IF SOLRCVT = 1.
# CLOUD(M,N) = FRACTIONAL CLOUD COVER
efdc_air_in = pd.read_csv('//ott-athena/E/INPUT_FILES/BC_WEATHER/20191206/files/NC_bc_weather_2012_191206.inp',
delim_whitespace=True, skiprows=10,
names=['TASER', 'PATM', 'TDRY', 'TWET', 'RAIN', 'EVAP', 'SOLSWR', 'CLOUD'])
# Remove final row
efdc_air_in.drop(efdc_air_in.tail(1).index, inplace=True)
# Correct type
efdc_air_in['TASER'] = efdc_air_in['TASER'].astype(float)
# Create datetime index
efdc_air_in['DateTime'] = pd.to_datetime(efdc_air_in['TASER'], unit='D', origin=pd.Timestamp('2012-01-01')).\
dt.tz_localize('EST').dt.tz_convert('UTC')
efdc_air_in.set_index('DateTime', inplace=True)
# %% Read in EFDC Water Temperature Bounds
efdc_water_in = pd.read_csv('//ott-athena/E/INPUT_FILES/TEMPERATURE_FILES/RM_North_RM_South/190423/NC_bc_temp_Yr2012_20190423.inp',
delim_whitespace=True, skiprows=16,
names=['TASER', 'L1', 'L2', 'L3', 'L4', 'L5', 'L6', 'L7', 'L8', 'L9', 'L10'])
# Find row when the next boundry is reached and create new df
efdc_water_South = efdc_water_in.iloc[0:np.where(efdc_water_in['TASER'] == 'AQEA_QA_QC')[0][0], :].copy()
efdc_water_North = efdc_water_in.iloc[np.where(efdc_water_in['TASER'] == 'AQEA_QA_QC')[0][0]+2:
np.where(efdc_water_in['TASER'] == 'AQEA_QA_QC')[0][1], :].copy()
# Correct type
efdc_water_South.iloc[:, 0] = efdc_water_South.iloc[:, 0].astype(float)
efdc_water_North.iloc[:, 0] = efdc_water_North.iloc[:, 0].astype(float)
# Create datetime index
efdc_water_South['DateTime'] = pd.to_datetime(efdc_water_South['TASER'], unit='D', origin=pd.Timestamp('2012-01-01')).\
dt.tz_localize('EST').dt.tz_convert('UTC')
efdc_water_South.set_index('DateTime', inplace=True)
efdc_water_North['DateTime'] = pd.to_datetime(efdc_water_North['TASER'], unit='D', origin=pd.Timestamp('2012-01-01')).\
dt.tz_localize('EST').dt.tz_convert('UTC')
efdc_water_North.set_index('DateTime', inplace=True)
del efdc_water_in
# %% Import Clear Sly radiation
vnorm = np.array([0, 0, -1])
h = 0 # sea-level
# date = np.arange(datetime(2012, 1, 1), datetime(2012, 12, 31), timedelta(hours=1)).astype(datetime)
solar_date = [datetime(2012, 1, 1) + timedelta(minutes=i) for i in range(0, 60 * 24 * 365, 60)]
solar_date_utc = pd.date_range(start=datetime(2012, 1, 1), end=datetime(2012, 12, 31), freq='1H').tz_localize('EST')
lat = 40.7128 # southern hemisphere
# solar_rad = [beam_irradiance(h, i, lat) for i in solar_date]
solar_rad = [irradiance_on_plane(vnorm, h, i, lat) for i in solar_date]
# %% Merge the dataframes
# met_merged = pd.merge(df_air_temperature, efdc_air_in, how='outer', left_index=True, right_index=True)
met_merged = pd.merge_ordered(df_air_temperature, efdc_air_in, on='DateTime',
how='outer', fill_method='ffill')
waterTempSouth_merged = pd.merge_ordered(df_water_temperature, efdc_water_South, on='DateTime',
how='outer', fill_method='ffill')
waterTempNorth_merged = pd.merge_ordered(df_water_temperature, efdc_water_North, on='DateTime',
how='outer', fill_method='ffill')
#%% Plot air temperature
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6))
# efdc_air_in['TDRY'].plot(ax=axes, color='black', label='EFDC Air Temperature')
# df_air_temperature['air_temp'].plot(ax=axes, color='blue', label='Obs at The Battery')
# axes.set_xlim(pd.to_datetime('2012-08-01'), pd.to_datetime('2012-08-31'))
# efdc_water_North['L10'].astype(float).plot(ax=axes, color='blue', label='EFDC North Boundry L10')
# efdc_water_North['L5'].astype(float).plot(ax=axes, color='red', label='EFDC North Boundry L5')
# efdc_water_North['L1'].astype(float).plot(ax=axes, color='green', label='EFDC North Boundry L1')
efdc_water_South['L10'].astype(float).plot(ax=axes, color='blue', label='EFDC South Boundry L10')
efdc_water_South['L5'].astype(float).plot(ax=axes, color='red', label='EFDC South Boundry L5')
efdc_water_South['L1'].astype(float).plot(ax=axes, color='green', label='EFDC South Boundry L1')
df_water_temperature['water_temp'].plot(ax=axes, color='black', label='Obs at The Battery')
axes.set_xlim(pd.to_datetime('2012-08-01'), pd.to_datetime('2012-08-31'))
axes.set_ylabel('Temperature (deg C)')
axes.set_xlabel('Date')
axes.legend()
# axes.scatter(met_merged['TDRY'], met_merged['air_temp'])
# axes.scatter(waterTempSouth_merged['water_temp'], waterTempSouth_merged['L5'])
# axes.scatter(waterTempNorth_merged['water_temp'], waterTempSouth_merged['L5'])
plt.show()
#%% Plot Solar Radiation
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6))
efdc_air_in['SOLSWR'].plot(ax=axes, color='black', label='EFDC Air Temperature')
axes.plot(solar_date_utc[0:-1], solar_rad, color='blue', label='Solar Radiation')
axes.set_xlim(pd.to_datetime('2012-08-01'), pd.to_datetime('2012-08-07'))
axes.set_ylabel('Solar Radiation (w/m^2)')
axes.set_xlabel('Date')
axes.legend()
plt.show()

View File

@ -0,0 +1,160 @@
#%% Plotting script for LSU D3D data
# Alexander Rey, 2022
import os
import pandas as pd
import geopandas as gp
import netCDF4 as nc
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.cm as cm
import datetime as datetime
from scipy.interpolate import LinearNDInterpolator, interp1d
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc, get_ncfilelist, get_hisstationlist, get_ncvardimlist, get_timesfromnc
import cartopy.crs as ccrs
import contextily as ctx
from dfm_tools.regulargrid import scatter_to_regulargrid
import pathlib as pl
from shapely.geometry import Point, MultiPoint
import alphashape
import rioxarray
import xarray as xr
#%% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13522.101 LSU Lakes", "06_Models", "Delft3DFM", "13522.678.W.SBV.Rev0_LSU Lakes D3D Model Log.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
dataPath = "FlowFM_map.nc"
#%% Import using DFM functions
modelPlot = range(30, 46)
# stormTime = [datetime.datetime(2005, 8, 29, 14, 0, 0),
# datetime.datetime(2005, 9, 24, 14, 0, 0),
# datetime.datetime(2005, 12, 28, 14, 0, 0)]
# stormTime = [datetime.datetime(2005, 1, 14, 18, 0, 0),
# datetime.datetime(2005, 2, 3, 18, 0, 0),
# datetime.datetime(2005, 2, 10, 18, 0, 0)]
#stormTime = [datetime.datetime(2005, 12, 31, 18, 0, 0)]
#stormTime = [datetime.datetime(2005, 8, 7, 0, 0, 0)]
ugrid_all = [None] * (max(modelPlot)+1)
X2 = [None] * (max(modelPlot)+1)
Y2 = [None] * (max(modelPlot)+1)
sed2 = [None] * (max(modelPlot)+1)
facex = [None] * (max(modelPlot)+1)
facey = [None] * (max(modelPlot)+1)
ux_mean = [None] * (max(modelPlot)+1)
uy_mean = [None] * (max(modelPlot)+1)
ux = [None] * (max(modelPlot)+1)
uy = [None] * (max(modelPlot)+1)
sed = [None] * (max(modelPlot)+1)
bed = [None] * (max(modelPlot)+1)
bed2 = [None] * (max(modelPlot)+1)
wd = [None] * (max(modelPlot)+1)
regularMask2 = [None] * (max(modelPlot)+1)
meshMask = [None] * (max(modelPlot)+1)
alphaPolyList = [None] * (max(modelPlot)+1)
sedIDX = 0
tPlot = 0
for i in modelPlot:
#file_nc_map = os.path.join(runLog['Run Location'][i], 'FlowFM', 'output', 'FlowFM_map.nc')
file_nc_map = os.path.join(runLog['Run Location'][i], 'FlowFM', 'dflowfm', 'output', 'FlowFM_map.nc')
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
# Find nearest time step to desired time
stormTime = [runLog['End Date'][i]]
tStep = []
for s in stormTime:
abs_deltas_from_target_date = np.absolute(tSteps - s)
tStep.append(np.argmin(abs_deltas_from_target_date))
# Get Var info
vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_map)
facex[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_face_x')
facey[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_face_y')
sed[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_bodsed',
timestep=tStep, station='all')
bed[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_flowelem_bl')
wd[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_waterdepth',
timestep=tStep)
# Create a list of grid points
alphaPoints = list(map(tuple, np.column_stack((facex[i][wd[i][0, :] != 0],
facey[i][wd[i][0, :] != 0])).data))
# Find the concave hull
alphaPoly = alphashape.alphashape(alphaPoints, 0.05)
alphaPolyList[i] = alphaPoly
plottingMask = (facex[i] > 676470) & (facex[i] < 677021) & \
(facey[i] > 3365939) & (facey[i] < 3366617)
# Convert to regular grid for interpolation
X2[i], Y2[i], sed2[i] = scatter_to_regulargrid(xcoords=facex[i][plottingMask], ycoords=facey[i][plottingMask],
ncellx=500, ncelly=500, values=sed[i][tPlot, plottingMask, sedIDX],
maskland_dist=25, method='linear')
X2[i], Y2[i], bed2[i] = scatter_to_regulargrid(xcoords=facex[i][plottingMask], ycoords=facey[i][plottingMask],
ncellx=750, ncelly=750, values=bed[i][plottingMask],
maskland_dist=25, method='cubic')
# Create mask of regular grid inside concave hull
regularMask2[i] = [alphaPoly.contains(Point(i[0], i[1]))
for i in np.array([X2[i].flatten(), Y2[i].flatten()]).T]
# Reshape back to regular grid for mask
regularMask2[i] = np.array(regularMask2[i]).reshape(X2[i].shape)
# %% Setup forebay polygon
# forebayPoly = gp.read_file('C:/Users/arey/files/Projects/LSU/ForebayPoly.shp')
forebayPoly = gp.read_file('C:/Users/arey/files/Projects/LSU/ForebayPolyExpand.shp')
# Setup mask. Can only do this once if grids are the same
i = 45
forebayPolyMask = [forebayPoly.iloc[0, 1].contains(Point(i[0], i[1]))
for i in np.array([facex[i], facey[i]]).T]
#%% Plot Sediment DFM functions
modelPlot = range(30, 46)
sedIDX = 0
for tPlot in range(0, 1):
for i in modelPlot:
sedPlot = bed2[i]
# Blank zero sed
sedPlot[sedPlot == 0] = np.nan
# Blank outside of hull
sedPlot[~regularMask2[i]] = np.nan
# Convert to xarray dataset
sed2_xr = xr.DataArray(data=sedPlot.T,
dims=["x", "y"],
coords=dict(
x=(["x"], X2[i][0, :]),
y=(["y"], Y2[i][:, 0])))
# Transpose for geotiff writing
sed2_xr = sed2_xr.transpose('y', 'x')
sed2_xr.rio.write_crs("epsg:26915", inplace=True)
sed2_xr.rio.to_raster('C:/Users/arey/files/Projects/LSU/Modelling/GeoTiffBed/' +
runLog['Run Name'][i] + 'B.tif')

682
Mustique/NCCPlotting_HD.py Normal file
View File

@ -0,0 +1,682 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0, Dfs1
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import datetime as datetime
import matplotlib as mpl
import matplotlib.pyplot as plt
import time
import matplotlib.animation as animation
import contextily as ctx
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "13539.101 L'Ansecoy Bay, Mustique", "06_Models",
"Model Log Mustique.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Map Model Results
modelPlot = [8, 10, 11, 12, 13, 14]
modelPlot = [15, 16, 17]
modelPlot = [18]
dfs_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix())
dfs_list[m] = dfsIN
ds_list[m] = dfs_list[m].read()
# %% Read Model Results at point
modelPlot = [12]
dfs2d_list = [None] * (max(modelPlot) + 1)
dfs3d_list = [None] * (max(modelPlot) + 1)
z_list = [None] * (max(modelPlot) + 1)
z_profile_list = [None] * (max(modelPlot) + 1)
elm_list = [None] * (max(modelPlot) + 1)
elm_df_list = [None] * (max(modelPlot) + 1)
elm2d_list = [None] * (max(modelPlot) + 1)
elm2d_df_list = [None] * (max(modelPlot) + 1)
readPointsName = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevF.csv",
delimiter=",")
readPoints = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevF.csv",
delimiter=",").iloc[0:, 2:-2].values
disPipePtsin = gp.read_file('C:/Users/arey/files/Projects/Mustique/DisPipeA_Points6.shp')
disPipePtsin_np = np.array([disPipePtsin.geometry.x.values, disPipePtsin.geometry.y.values])
readPoints = np.vstack((readPoints,disPipePtsin_np.T))
disPipePtsin = gp.read_file('C:/Users/arey/files/Projects/Mustique/DisPipeB_Points2.shp')
disPipePtsin_np = np.array([disPipePtsin.geometry.x.values, disPipePtsin.geometry.y.values])
readPoints = np.vstack((readPoints,disPipePtsin_np.T))
for m in modelPlot:
# dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain2D.dfsu').as_posix())
# dfs2d_list[m] = dfsIN
#
# # Read Map
# # ds_list[m] = dfs_list[m].read()
# # curU_list[m] = ds_list[m]["Depth averaged U velocity"]
# # curV_list[m] = ds_list[m]["Depth averaged V velocity"]
# # curElm_list[m] = dfs_list[m].element_coordinates
# # wl_list[m] = ds_list[m]["Surface elevation"]
# # z_list[m] = ds_list[m]["Z coordinate"]
#
# ## Read specific points in 2D
# # Find nearest elements
# elem_ids = dfs2d_list[m].find_nearest_elements(readPoints[:, 0], readPoints[:, 1])
# # Read in data from nearest elements
# elm2d_list[m] = dfs2d_list[m].read(elements=elem_ids)
#
# # Convert to Pandas DataFrame
# elm2d_df_list[m] = [None] * readPoints.shape[0]
# for p in range(0, readPoints.shape[0]):
# elm2d_df_list[m][p] = elm2d_list[m].isel(idx=p).to_dataframe()
## Read specific points 3D
# Setup MIKE object
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix())
dfs3d_list[m] = dfsIN
# Find nearest elements
elem_ids = dfs3d_list[m].find_nearest_profile_elements(readPoints[:, 0], readPoints[:, 1])
# Read from elements- flatten 2d element array (xy and z) to 1d for reading
elm_list[m] = dfs3d_list[m].read(elements=elem_ids.flatten().astype(int))
elm_df_list[m] = [None] * readPoints.shape[0]
z_profile_list[m] = [None] * readPoints.shape[0]
# Convert to Pandas DataFrame
# Loop through points, selecting corresponding depths
for p in range(0, readPoints.shape[0]):
z_profile_list[m][p] = dfs3d_list[m].element_coordinates[elem_ids[p, :].astype(int), 2]
# Convert to Pandas and add zidx
for z in range(0, z_profile_list[m][p].shape[0]):
df_tmp = elm_list[m].isel(idx=p * 5 + z).to_dataframe()
df_tmp['Z'] = z_profile_list[m][p][z]
df_tmp['Z_IDX'] = z
if z == 0:
elm_df_list[m][p] = df_tmp
else:
elm_df_list[m][p] = elm_df_list[m][p].append(df_tmp)
del df_tmp
# %% Import RBR data
rbr_path = "C:/Users/arey/files/Projects/Mustique/041279_20211203_1541Ruskin.xlsx"
rbr_pd = pd.read_excel(rbr_path, sheet_name='Wave', parse_dates=True, header=1, index_col=0)
rbr_pd['WL'] = rbr_pd['Depth'] - rbr_pd['Depth'].mean() - 0.2
# Correct time zone
rbr_pd.index = rbr_pd.index + datetime.timedelta(hours=4)
# %% Import Nortek Eco data
eco_path_1 = "//srv-ott3/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/02_Nortek ECO/Eco61_20210910184606.csv"
eco_path_2 = "//srv-ott3/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/02_Nortek ECO/Eco214_20210902120703.csv"
eco1_pd = pd.read_csv(eco_path_1, sep=',', header=[28], index_col=0,
parse_dates=False)
eco2_pd = pd.read_csv(eco_path_2, sep=',', header=[28], index_col=0,
parse_dates=False)
# Drop first row
eco1_pd.drop(eco1_pd.index[0], inplace=True)
eco2_pd.drop(eco2_pd.index[0], inplace=True)
eco1_pd.index = pd.to_datetime(eco1_pd.index, format='%m/%d/%Y %I:%M:%S %p')
eco2_pd.index = pd.to_datetime(eco2_pd.index, format='%m/%d/%Y %I:%M:%S %p')
# Set water level to zero
eco1_pd['WL'] = eco1_pd['Depth'].astype(float) - eco1_pd['Depth'].astype(float).mean(skipna=True) + 0.2
eco2_pd['WL'] = eco2_pd['Depth'].astype(float) - eco2_pd['Depth'].astype(float).mean(skipna=True) + 0.2
# Correct time zone
eco1_pd.index = eco1_pd.index + datetime.timedelta(hours=4)
eco2_pd.index = eco2_pd.index + datetime.timedelta(hours=4)
# %% Read in boundary conditions
ds_wl = []
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_EastBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_NorthBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_SouthBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_WestBound.dfs1")
ds_wl.append(dfs1.read())
# %% Plot Water Level at a point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
plotstart = '2021-10-01 00:00:00'
plotend = '2021-10-15 00:00:00'
# plotstart = '2021-10-05 00:00:00'
# plotend = '2021-10-07 00:00:00'
modelPlot = [13]
for m in modelPlot:
for sub, p in enumerate([5, 9, 6]):
if p == 9:
rbr_pd['WL'].plot(ax=axes[sub], color='k', label='Nearshore East Obervations')
elif p == 5:
eco1_pd['WL'].plot(ax=axes[sub], color='k', label='Offshore Obervations')
elif p == 6:
eco2_pd['WL'].plot(ax=axes[sub], color='k', label='Nearshore West Obervations')
elm2d_df_list[m][p].plot(y='Surface elevation', ax=axes[sub], label='Model')
axes[sub].legend(loc="upper left")
axes[sub].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[sub].set_ylim(-1.0, 1.0)
axes[sub].set_ylabel('Water Level (m)')
axes[sub].set_xlabel('Date')
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/wl_TS_RevB.png', bbox_inches='tight')
# %% Velocity point U and V
# Z_IDX = 4
# ecoVar1 = 'Upper speed U'
# ecoVar2 = 'Upper speed V'
# ecoVar3 = 'Upper speed'
# ecoVar4 = 'Upper direction'
# plotstart = '2021-09-14 00:00:00'
# # plotend = '2021-09-16 00:00:00'
# plotend = '2021-10-03 00:00:00'
plotstart = '2021-10-01 00:00:00'
# plotend = '2021-09-16 00:00:00'
plotend = '2021-10-21 00:00:00'
# plotstart = '2021-10-05 00:00:00'
# plotend = '2021-10-07 00:00:00'
Z_IDX = 2
ecoVar1 = 'Middle speed U'
ecoVar2 = 'Middle speed V'
ecoVar3 = 'Middle speed'
ecoVar4 = 'Middle direction'
# Z_IDX = 1
# ecoVar1 = 'Lower speed U'
# ecoVar2 = 'Lower speed V'
# ecoVar3 = 'Lower speed'
# ecoVar4 = 'Lower direction'
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 4), sharex=True)
modelPlot = [13]
for m in modelPlot:
eco1_pd[ecoVar1] = eco1_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar1].astype(float).plot(ax=axes[0], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model')
axes[0].set_ylim(-0.5, 0.5)
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[0].set_ylabel('Nearshore U (m/s)')
axes[0].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco1_pd[ecoVar2] = eco1_pd[ecoVar3].astype(float) * \
np.cos(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar2].astype(float).plot(ax=axes[1], color='k', label='Observations')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model')
axes[1].set_ylim(-0.5, 0.5)
axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Nearshore V (m/s)')
axes[1].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
# eco2_pd[ecoVar1] = eco2_pd[ecoVar3].astype(float) * \
# np.sin(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
# eco2_pd[ecoVar1].astype(float).plot(ax=axes[2], color='k', label='Observations')
# elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
# y='U velocity', ax=axes[2], label='Model')
# axes[2].set_ylim(-1.5, 1.5)
# axes[2].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
# axes[2].set_ylabel('Offshore U (m/s)')
# axes[2].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
#
# eco2_pd[ecoVar2] = eco2_pd[ecoVar3].astype(float) * \
# np.cos(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
# eco2_pd[ecoVar2].astype(float).plot(ax=axes[3], color='k', label='Observations')
# elm_df_list[m][5].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
# y='V velocity', ax=axes[3], label='Model')
#
# axes[3].set_ylim(-1.00, 1.00)
# axes[3].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
# axes[3].set_ylabel('Offshore V (m/s)')
# axes[3].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
axes[1].set_xlabel('Date')
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/uVel_TS_RevC.png', bbox_inches='tight')
# %% Subplot Map Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = [18]
tStepsPlot = np.zeros((5, 6))
tStepsPlot[0, :] = range(110, 116)
tStepsPlot[1, :] = range(158, 164)
tStepsPlot[2, :] = range(206, 212)
tStepsPlot[3, :] = range(255, 261)
tStepsPlot[4, :] = range(138, 144)
for m in modelPlot:
vmax = 34.0
vmin = 33.7
cmap = 'inferno'
if m == 8 or m == 17:
disPlot = 7
elif m == 11 or m == 15 or m == 18:
disPlot = 13
elif m == 14 or m == 16:
disPlot = 8
for a in range(0, 5):
fig, axes = plt.subplots(nrows=2, ncols=3, sharey=True, sharex=True, figsize=(16, 9))
ax = axes.flatten()
for t in range(0, 6):
# Plot salinity
# Select salinity data at selected time step
plotDat = ds_list[m]['Salinity'][int(tStepsPlot[a, t]), :]
# Identify nan values
plot_nan = np.isnan(plotDat)
# Add additional nan values to show bed
plot_nan = plot_nan | (plotDat < 33.725)
# Plot all selected values at a given later and not nan
axDHI = dfs_list[m].plot(plotDat[(dfs_list[m].layer_ids == 0) & (~plot_nan)], plot_type='contourf',
show_mesh=False, cmap=cmap, ax=ax[t], add_colorbar=False,
vmin=vmin, vmax=vmax,
elements=dfs_list[m].element_ids[(dfs_list[m].layer_ids == 0) & (~plot_nan)])
# Add discharge point
ax[t].scatter(readPoints[disPlot, 0], readPoints[disPlot, 1], marker='^', color='r', s=20)
# ax[t].set_xlim(left=696680.7, right=698252.7)
# ax[t].set_ylim(bottom=1425547.5, top=1426681.8)
ax[t].set_xlim(left=696508.3, right=698252.7)
ax[t].set_ylim(bottom=1425267.0, top=1426681.8)
ctx.add_basemap(ax[t], source=mapbox, crs='EPSG:32620')
ax[t].title.set_text(str(ds_list[m].time[int(tStepsPlot[a, t])]))
ax[t].xaxis.set_major_locator(plt.MaxNLocator(4))
# axes.titlesize = 'x-large'
# fig.suptitle(runLog['Short Description'][m], fontsize=18)
# ax[t].set_xlabel('Easting [m]')
# ax[t].set_ylabel('Northing [m]')
fig.subplots_adjust(right=0.8)
cmap = mpl.cm.inferno
norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
cax = plt.axes([0.85, 0.2, 0.05, 0.6])
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
norm=norm,
orientation='vertical')
cb1.set_label('Salinity (PSU)')
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/Plume_6_Plot_' + runLog['Run'][m][0:-20] +'_Alt' + str(a) + '.png', bbox_inches='tight')
# %% Map Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(8, 9)
for m in modelPlot:
vmax = 34.2
vmin = 34
fig, axes = plt.subplots(figsize=(8, 8))
# Plot salinity
# Select salinity data at last time step
plotDat = ds_list[m]['Salinity'][-1, :]
# Identify nan values
plot_nan = np.isnan(plotDat)
# Add additional nan values to show bed
plot_nan = plot_nan | (plotDat<34.05)
# Plot all selected values at a given later and not nan
axDHI = dfs_list[m].plot(plotDat[(dfs_list[m].layer_ids == 1) & (~plot_nan)], plot_type='contourf',
show_mesh=False, cmap='inferno', ax=axes,
vmin=vmin, vmax=vmax, label='Salinity (PSU)',
elements=dfs_list[m].element_ids[(dfs_list[m].layer_ids == 1) & (~plot_nan)])
axes.set_xlim(left=696680.7, right=698252.7)
axes.set_ylim(bottom=1425547.5, top=1426681.8)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32620')
# axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large'
# fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
# %% Animated map
plt.rcParams['animation.ffmpeg_path'] = 'C:/Users/arey/Local/ffmpeg-2022-02-14-git-59c647bcf3-full_build/bin/ffmpeg.exe'
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
# Set model to plot
modelPlot = [15, 16, 17]
m = 16
# m = 11
vmax = 34.0
vmin = 33.7
cmap = 'inferno'
metadata = dict(title='Movie Test', artist='Matplotlib',
comment='Movie support')
writer = animation.FFMpegWriter(fps=2, metadata=metadata, codec='h264')
fig, axes = plt.subplots(figsize=(8, 8))
writer.setup(fig, 'C:/Users/arey/files/Projects/Mustique/' +
runLog['Run'][m][0:-20] + 'Intake.mp4')
# Animation function
for i in np.arange(1, 384, 1): #384
if m == 8 or m == 17:
disPlot = 7
elif m == 11 or m == 15:
disPlot = 13
elif m == 14 or m == 16:
disPlot = 8
# fig, axes = plt.subplots(figsize=(8, 8))
axes.cla()
# Plot salinity
# Select salinity data at last time step
plotDat = ds_list[m]['Salinity'][i, :]
# Identify nan values
plot_nan = np.isnan(plotDat)
# Add additional nan values to show bed
plot_nan = plot_nan | (plotDat<33.725)
# Plot all selected values at a given later and not nan
axDHI = dfs_list[m].plot(plotDat[(dfs_list[m].layer_ids == 0) & (~plot_nan)], plot_type='contourf',
show_mesh=False, cmap='inferno', ax=axes, add_colorbar=False,
vmin=vmin, vmax=vmax, label='Salinity (PSU)', levels=24,
elements=dfs_list[m].element_ids[(dfs_list[m].layer_ids == 0) & (~plot_nan)])
axes.scatter(readPoints[disPlot, 0], readPoints[disPlot, 1], marker='^', color='r', s=20)
# axes.set_xlim(left=696680.7, right=698252.7)
# axes.set_ylim(bottom=1425547.5, top=1426681.8)
axes.set_xlim(left=696508.3, right=698252.7)
axes.set_ylim(bottom=1425267.0, top=1426681.8)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32620')
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
axes.title.set_text(str(ds_list[m].time[i]))
if i == 1:
fig.subplots_adjust(right=0.8)
cmap = mpl.cm.inferno
norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
cax = plt.axes([0.82, 0.25, 0.04, 0.5])
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
norm=norm,
orientation='vertical')
cb1.set_label('Salinity (PSU)')
writer.grab_frame()
# plt.show()
print(i)
writer.finish()
# %% Plot Bathymetry
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=False, cmap='magma_r', ax=axes, levels=9,
vmin=-80, vmax=0, label='Bed Elevation (ft)',
elements=dfs_list[m].element_ids[~bed_nan])
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762500)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32620')
fig.suptitle('Bed Elevation', fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
# fig.savefig(
# '//srv-mad3/Projects/13632.101 South Shore Breakwater/10_Reports&Pres/13632.101.R3 Ph I BOD/Support files/' +
# 'Bed_Elevation.png', bbox_inches='tight', dpi=300)
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
# %% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')
# %% Setup path along pipe transect
m = 12
df_time_rows = elm_df_list[m][0].Z.drop_duplicates()
df_time_rowsidx = np.where(df_time_rows.index.values[0] == elm_df_list[m][0].index.values)[0]
tSteps = range(100, 2000)
u_plotarray = np.zeros((2000, 5, 81))
for layer in range(0, 5):
for tStep in tSteps:
u_plotarray[tStep, layer, :] = [x.iloc[df_time_rowsidx[layer] + tStep, 0] for x in elm_df_list[m]]
v_plotarray = np.zeros((2000, 5, 81))
for layer in range(0, 5):
for tStep in tSteps:
v_plotarray[tStep, layer, :] = [x.iloc[df_time_rowsidx[layer] + tStep, 0] for x in elm_df_list[m]]
mag_plotarray = np.sqrt(u_plotarray ** 2 + v_plotarray ** 2)
max_plotarray = np.max(mag_plotarray, axis=0)
# Find layer depths along transect and interpolate along vertical profile
layer_depth = np.zeros((5, 81))
max_plot_interp = np.zeros((60, 81))
interp_depths = np.arange(-6, 0, 0.1)
for chain in range(14, 81):
layer_depth[:, chain] = elm_df_list[m][chain].Z.unique()
max_plot_interp[:, chain] = np.interp(interp_depths, layer_depth[:, chain], max_plotarray[:, chain])
# Set points below bed as nan
max_plot_interp[interp_depths<layer_depth[0, chain], chain] = np.nan
# %% Plot along transect
fig, axes = plt.subplots(figsize=(8, 8))
# plt.pcolor(np.arange(0, 67*5, 5), elm_df_list[m][0].Z.unique(), max_plotarray[:, 14:], cmap='jet')
# for chain in range(15, 81):
# plt.pcolor([(chain-15) * 5, (chain-14) * 5], layer_depth[:, chain-1:chain+1], max_plotarray[:, chain-1:chain+1], cmap='jet')
axes.set_ylabel('Depth (m)')
axes.set_xlabel('Distance along discharge pipe (m). Existing Pipe at 170 m')
plt.pcolor(np.arange(0, 67*5, 5), interp_depths, max_plot_interp[:, 14:], cmap='jet')
plt.colorbar(label="Maximum Current Speed (m/s)")
plt.show()
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "13539.101 L'Ansecoy Bay, Mustique", "06_Models", "01_MIKE3", "00_Figures",
"Discharge Velocity.png")
fig.savefig(pth.as_posix(), bbox_inches='tight', dpi=300)

View File

@ -0,0 +1,560 @@
#%% Project Setup
import os
import pandas as pd
import geopandas as gp
from scipy.signal import argrelextrema
import numpy as np
import math
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import matplotlib as mpl
import cartopy.crs as ccrs
import contextily as ctx
import cmocean.cm as cmo
from shapely.geometry import Point, LineString
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from metpy.plots import ctables
from siphon.catalog import TDSCatalog
import gsw as gsw
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
importPath = '//srv-ott3.baird.com/Projects/13711.101 Caribee (Indigo) Hotel, Barbados/03_Data/03_Field/20220613_EnvData'
siteName = 'Caribee Hotel'
timeLabel= 'June 13, 2022'
importFiles = os.listdir(importPath)
# Initialize import variables
OBS_File = None
GPS_File = None
for i in range(0, len(importFiles)):
if '.csv' in importFiles[i] and 'Summary' not in importFiles[i] and 'export' not in importFiles[i]:
OBS_File = importFiles[i]
elif '.csv' in importFiles[i] and 'export' in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'CSV'
elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'xls'
#%% Obs Import Data
if OBS_File is not None:
Obs_dat = pd.read_csv(importPath + '/' + OBS_File, skiprows=0, header=0)
# Drop rows with metadata
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados')
# Set Index
Obs_dat.set_index('DateTime', inplace=True)
#%% GPS Import Data
if GPS_File is not None:
if GPS_Type == 'CSV':
GPS = pd.read_csv(importPath + '/' + GPS_File,
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str)).dt.tz_localize('America/Barbados')
elif GPS_Type == 'xls':
GPS = pd.read_excel(importPath + '/' + GPS_File, header=0)
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC')
# Set Datetime as index
GPS_gdf.set_index('DateTime', inplace=True)
# Sort by time
GPS_gdf.sort_index(inplace=True)
# Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
#%% Read in site shapefile
siteShp = gp.read_file('//srv-ott3.baird.com/Projects/13711.101 Caribee (Indigo) Hotel, Barbados/03_Data/03_Field/20220613_EnvData\SiteBounds_Caribee.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
#%% Merge GPS to RBR
# Process RBR into datetime
if OBS_File is not None:
# Merge with GPS as dataframe
Obs_geo = pd.merge_asof(Obs_dat, GPS_gdf,
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
Obs_geo = gp.GeoDataFrame(Obs_geo, geometry=Obs_geo.geometry, crs="EPSG:32621")
#%% Find and setup key plotting details
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
# Sat water
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
axXlim = (218050, 218345)
axYlim = (1446635, 1446802)
GFS_Lon = -59.5992624
GFS_Lat = 13.0733928
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[3, 1])
# Set min and max times using conductivity
if Obs_geo['inArea'].any():
# First and last times from area in shapefile
minTime = Obs_geo[Obs_geo['inArea']==True].index[0]
maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
else:
# First and last times if no GPS data
minTime = Obs_geo.index[20]
maxTime = Obs_geo.index[-20]
metDate = minTime.tz_convert('UTC') - timedelta(
hours = minTime.hour % 6,
minutes=minTime.minute,
seconds=minTime.second,
microseconds=minTime.microsecond)
#%% GFS Met Import
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
### USED TO BE -18/ +6 CHECK ###
startdate = metDate - timedelta(hours=30)
enddate = metDate - timedelta(hours=6)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(10)
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Process Met Data
# 24h Precipitation Total
# Time weighted average of last two points for everything else
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
timeWeight1 = minTime-metDate
timeWeight2 = (metDate+timedelta(hours=6))-minTime
timeWeight1 = timeWeight1.seconds/21600
timeWeight2 = timeWeight2.seconds/21600
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
met_wdir = math.degrees(math.atan2(
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
#%% Add PSU and depth units
Obs_geo['PSU'] = gsw.conversions.SP_from_C(Obs_geo['CH3:Conductivity(mS/cm)'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values)
Obs_geo['Depth'] = (Obs_geo['CH0:Pressure(dbar)'].astype(float)*10000)/(gsw.rho(Obs_geo['PSU'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values) * 9.81)
#%% Plot time series for Geo data
fontprops = fm.FontProperties(size=25)
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(19, 25), constrained_layout=True)
dataTable = np.zeros([14, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ sat [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]']
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
paramMin = [0.0, 31.0, 100, 26.0, 0, 0]
paramMax = [1.0, 34.0, 130, 30.0, 600.0, 12000]
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
dataTable[0, 0] = met_temp.m.item(0)-272.15
dataTable[1, 0] = met_wind
dataTable[2, 0] = met_wdir
dataTable[3, 0] = met_prep.m.item(0)
ilocs_max = []
ilocs_max_pts = []
OBS_mask = []
for paramIDX, param in enumerate(params):
Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
# Create mask for RBR data based on time and parameter minimum
OBS_mask.append(((Obs_geo.index>minTime) &
(Obs_geo.index<maxTime) &
(Obs_geo[param]>paramMin[paramIDX])))
OBS_smoothed = Obs_geo.loc[OBS_mask[paramIDX], param].rolling(
12, win_type='nuttall',center=True).mean()
OBS_smoothed.plot(
ax=axes[paramIDX], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'CH6:Turbidity(NTU)':
ilocs_max.append(argrelextrema(OBS_smoothed.values,
np.greater_equal, order=6, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(OBS_smoothed.values)
ilocs_max_pts.append(OBS_smoothed.iloc[ilocs_max[paramIDX]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
# axes[paramIDX].scatter(OBS_smoothed.iloc[
# ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
# color='blue')
for a in range(0, len(ilocs_max[paramIDX])):
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 50), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
dataTable[paramIDX+7, 0] = OBS_smoothed.mean(skipna=True)
dataTable[paramIDX+7, 1] = OBS_smoothed.std(skipna=True)
dataTable[paramIDX+7, 2] = max(OBS_smoothed.min(skipna=True), 0)
dataTable[paramIDX+7, 3] = OBS_smoothed.max(skipna=True)
axes[paramIDX].set_ylabel(paramName[paramIDX])
axes[paramIDX].set_title(paramName[paramIDX])
axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
axes[paramIDX].legend(loc='upper left')
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
# Formate Data Table
dataTableFormat_mean = []
dataTableFormat_maxmin = []
for d in range(0, len(roundIDX)):
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
if dataTable[d, 3] == 0:
dataTableFormat_maxmin.append('--')
else:
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
dfOut = pd.DataFrame(dataTable[:, :])
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
# Change the column names
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
np.array(['Mean', 'Min / Max'])]
# Change the row indexes
dfOut.iloc[13, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
fig.suptitle(siteName + ' (' + timeLabel + ')', fontsize=30)
plt.show()
# outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
outPath = importPath
if not os.path.exists(outPath):
os.mkdir(outPath)
dfOut.to_excel(outPath + '/Summary_Stats_' + siteName + '.xlsx')
dfOutFormat.to_excel(outPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
dfOut.to_csv(outPath + '/Summary_Stats_' + siteName + '.csv')
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Plot Maps
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(25, 19), constrained_layout=True)
ax = axes.flat
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
x, y, arrow_length = 0.95, 0.93, 0.20
plt.rcParams.update({'font.size': 22})
axXlimTT = (Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
axYlimTT = (Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
Obs_geo.loc[minTime:maxTime].geometry.y.max()+200)
plt.setp(axes, xlim=axXlim, ylim=axYlim)
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
# Plot the RBR observations
# Salinity
for paramIDX, param in enumerate(params):
Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=paramMin[paramIDX], vmax=paramMax[paramIDX],
legend=True, legend_kwds={'label': paramName[paramIDX]},
cmap=parmCmap[paramIDX], markersize=20)
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
# Add time labels
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.x,
# RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
if (not Obs_geo.geometry.isnull().all()) &\
(GPS_File is not None) & (param == 'CH6:Turbidity(NTU)'):
for a in range(0, len(ilocs_max[paramIDX])):
ax[paramIDX].annotate(str(a + 1), (Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.x,
Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
ax[paramIDX].set_title(paramName[paramIDX])
# ax[paramIDX].set_ylabel('UTM 21N [m]')
# ax[paramIDX].set_xlabel('UTM 21N [m]')
ax[paramIDX].locator_params(axis='y', nbins=3)
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX].get_xaxis().set_ticks([])
ax[paramIDX].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX].add_artist(scalebar)
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes)
fig.suptitle(siteName + ' (' + timeLabel + ')', fontsize=30)
plt.show()
#outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
outPath = importPath
if not os.path.exists(outPath):
os.mkdir(outPath)
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryMap_' + timeLabel + '_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(outPath + '/Figures/SummaryMap_' + timeLabel + '_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Summary Sheet
plotIDXsLoop = []
# plotIDXsLoop.append([0, 3, 6, 9])
# plotIDXsLoop.append([1, 4, 7, 10])
# plotIDXsLoop.append([2, 5, 8, 11])
# plotIDXsLoop.append([np.arange(0, 21*3, 3)])
# plotIDXsLoop.append([np.arange(1, 21*3, 3)])
# plotIDXsLoop.append([np.arange(2, 21*3, 3)])
for i in range(0, 3):
summTable = None
# plotIDXs = plotIDXsLoop[i]
plotIDXs = np.arange(i, 21, 3)
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0, 1], index_col=0)
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
if plotIDX > 11:
obsStatsIN.rename({'Chl-a [ppb]': 'Chl-a [ug/l]'}, inplace=True)
if s == 0:
summTable = obsStatsIN
else:
summTable = summTable.join(obsStatsIN)
# Remove -999 with nan
summTable.replace(-999, np.nan, inplace=True)
summTable.to_excel('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/Summary_StatsMerge_' + siteName + '.xlsx')
#%% Summary Plot
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height [m]', 'Salinity [PSU]',
'Dissolved O₂ [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a. [ug/L]']
for i in range(0, 3):
summTable = None
plotIDXs = np.arange(i, 21, 3)
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
plotTable[8, s] = obsStatsIN.iloc[7, 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
elif plotIDX<12:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 13], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
else:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[13, 1], '%B %d, %Y'))
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# Repalce zero with nan
plotTable[plotTable < 0.00001] = np.nan
for v in range(0, 10):
ax[v].scatter(plotDates, plotTable[v, :], 250)
ax[v].set_ylabel(plotvars[v])
fig.suptitle(siteName, fontsize=35)
plt.gcf().autofmt_xdate()
plt.gcf().align_ylabels()
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.png',
bbox_inches='tight', dpi=500)

View File

@ -0,0 +1,685 @@
#%% Project Setup
import os
import pandas as pd
import geopandas as gp
from scipy.signal import argrelextrema
import numpy as np
import math
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import matplotlib as mpl
import cartopy.crs as ccrs
import contextily as ctx
import cmocean.cm as cmo
from shapely.geometry import Point, LineString
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from metpy.plots import ctables
from siphon.catalog import TDSCatalog
import gsw as gsw
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
importPaths = ['//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211006 WQ Pre_Construction/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211006 WQ Pre_Construction/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211006 WQ Pre_Construction/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211021 WQ During Construction/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211021 WQ During Construction/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211021 WQ During Construction/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211104 WQ Post Construction Monitoring/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211104 WQ Post Construction Monitoring/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211104 WQ Post Construction Monitoring/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211125 WQ Post Construction Monitoring/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211125 WQ Post Construction Monitoring/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20211125 WQ Post Construction Monitoring/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220208 WQ February/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220208 WQ February/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220208 WQ February/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220318 WQ sampling by LNW with WiMo/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220318 WQ sampling by LNW with WiMo/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220318 WQ sampling by LNW with WiMo/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220414 WQ sampling/Great House',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220414 WQ sampling/Greensleeves',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220414 WQ sampling/Old Queens Fort']
siteNames = ['Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort']
timeLabels= ['Before Construction',
'Before Construction',
'Before Construction',
'During Construction',
'During Construction',
'During Construction',
'After Construction',
'After Construction',
'After Construction',
'Monitoring',
'Monitoring',
'Monitoring',
'February',
'February',
'February',
'March',
'March',
'March',
'April',
'April',
'April']
wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None]
for s in range(12, 21):
## Define master import path
importPath = importPaths[s]
siteName = siteNames[s]
timeLabel = timeLabels[s]
importFiles = os.listdir(importPath)
# Initialize import variables
OBS_File = None
GPS_File = None
for i in range(0, len(importFiles)):
if '.csv' in importFiles[i] and 'Summary' not in importFiles[i] and 'export' not in importFiles[i]:
OBS_File = importFiles[i]
elif '.csv' in importFiles[i] and 'export' in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'CSV'
elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'xls'
#%% Obs Import Data
if OBS_File is not None:
Obs_dat = pd.read_csv(importPath + '/' + OBS_File, skiprows=0, header=0)
# Drop rows with metadata
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
if s < 15:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
else:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
# Set Index
Obs_dat.set_index('DateTime', inplace=True)
#%% GPS Import Data
if GPS_File is not None:
if GPS_Type == 'CSV':
GPS = pd.read_csv(importPath + '/' + GPS_File,
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
elif GPS_Type == 'xls':
GPS = pd.read_excel(importPath + '/' + GPS_File, header=0)
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC')
# Set Datetime as index
GPS_gdf.set_index('DateTime', inplace=True)
# Sort by time
GPS_gdf.sort_index(inplace=True)
# Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
#%% Read in site shapefile
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
#%% Merge GPS to RBR
# Process RBR into datetime
if OBS_File is not None:
# Merge with GPS as dataframe
Obs_geo = pd.merge_asof(Obs_dat, GPS_gdf,
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
Obs_geo = gp.GeoDataFrame(Obs_geo, geometry=Obs_geo.geometry, crs="EPSG:32621")
#%% Find and setup key plotting details
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
# Sat water
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
if siteName == 'Great House':
axXlim = (213210.7529575412, 213562.64172686986)
axYlim = (1464769.2243017585, 1465135.2219089477)
GFS_Lon = -59.6441
GFS_Lat = 13.2372
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[2, 1])
elif siteName == 'Greensleeves':
axXlim = (213269.99233348924, 213648.1643157148)
# axYlim = (1463378.1020314451, 1463843.5442048472)
axYlim = (1463378.1020314451, 1463950.5442048472)
GFS_Lon = -59.6428
GFS_Lat = 13.2289
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[1, 1])
elif siteName == 'Old Queens Fort':
axXlim = (213368.59866770002, 213745.6997016811)
axYlim = (1460192.707288096, 1460672.371780407)
GFS_Lon = -59.6419
GFS_Lat = 13.1960
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[0, 1])
# Set min and max times using conductivity
if Obs_geo['inArea'].any():
# First and last times from area in shapefile
minTime = Obs_geo[Obs_geo['inArea']==True].index[0]
maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
else:
# First and last times if no GPS data
minTime = Obs_geo.index[20]
maxTime = Obs_geo.index[-20]
metDate = minTime - timedelta(
hours = minTime.hour % 6,
minutes=minTime.minute,
seconds=minTime.second,
microseconds=minTime.microsecond)
#%% GFS Met Import
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
startdate = metDate - timedelta(hours=18)
enddate = metDate + timedelta(hours=6)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(10)
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Process Met Data
# 24h Precipitation Total
# Time weighted average of last two points for everything else
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
timeWeight1 = minTime-metDate
timeWeight2 = (metDate+timedelta(hours=6))-minTime
timeWeight1 = timeWeight1.seconds/21600
timeWeight2 = timeWeight2.seconds/21600
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
met_wdir = math.degrees(math.atan2(
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
#%% Read in wave conditions from BTS file
if wave_bts_file[s] is not None:
if siteName == 'Great House':
wave_bts = pd.read_csv(wave_bts_file[s],
names=['date', 'time', 'HM0', 'TP', 'TM', 'MWD', 'DPK', 'HSWL', 'TSWL', 'DPSWL', 'HSEA', 'TSEA', 'DPSEA'],
header=0, skiprows=22, delim_whitespace=True)
wave_bts['datetime'] = pd.to_datetime(wave_bts['date'] + ' ' + wave_bts['time'])
wave_bts.set_index('datetime', inplace=True)
met_hmo = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].HM0
met_tp = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].TP
met_mwd = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].MWD
else:
met_hmo = -999
met_tp = -999
met_mwd = -999
#%% Add PSU and depth units
Obs_geo['PSU'] = gsw.conversions.SP_from_C(Obs_geo['CH3:Conductivity(mS/cm)'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values)
Obs_geo['Depth'] = (Obs_geo['CH0:Pressure(dbar)'].astype(float)*10000)/(gsw.rho(Obs_geo['PSU'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values) * 9.81)
#%% Plot time series for Geo data
fontprops = fm.FontProperties(size=25)
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(19, 25), constrained_layout=True)
dataTable = np.zeros([14, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ sat [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]']
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
paramMin = [0.0, 32.0, 100, 26.0, 0, 0]
paramMax = [1.0, 36.0, 130, 30.0, 75.0, 12000]
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
dataTable[0, 0] = met_temp.m.item(0)-272.15
dataTable[1, 0] = met_wind
dataTable[2, 0] = met_wdir
dataTable[3, 0] = met_prep.m.item(0)
dataTable[4, 0] = met_hmo
dataTable[5, 0] = met_tp
dataTable[6, 0] = met_mwd
ilocs_max = []
ilocs_max_pts = []
OBS_mask = []
for paramIDX, param in enumerate(params):
Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
# Create mask for RBR data based on time and parameter minimum
OBS_mask.append(((Obs_geo.index>minTime) &
(Obs_geo.index<maxTime) &
(Obs_geo[param]>paramMin[paramIDX])))
OBS_smoothed = Obs_geo.loc[OBS_mask[paramIDX], param].rolling(
12, win_type='nuttall',center=True).mean()
OBS_smoothed.plot(
ax=axes[paramIDX], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'CH6:Turbidity(NTU)':
ilocs_max.append(argrelextrema(OBS_smoothed.values,
np.greater_equal, order=6, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(OBS_smoothed.values)
ilocs_max_pts.append(OBS_smoothed.iloc[ilocs_max[paramIDX]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
# axes[paramIDX].scatter(OBS_smoothed.iloc[
# ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
# color='blue')
for a in range(0, len(ilocs_max[paramIDX])):
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 12), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
dataTable[paramIDX+7, 0] = OBS_smoothed.mean(skipna=True)
dataTable[paramIDX+7, 1] = OBS_smoothed.std(skipna=True)
dataTable[paramIDX+7, 2] = max(OBS_smoothed.min(skipna=True), 0)
dataTable[paramIDX+7, 3] = OBS_smoothed.max(skipna=True)
axes[paramIDX].set_ylabel(paramName[paramIDX])
axes[paramIDX].set_title(paramName[paramIDX])
axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
axes[paramIDX].legend(loc='upper left')
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
# Formate Data Table
dataTableFormat_mean = []
dataTableFormat_maxmin = []
for d in range(0, len(roundIDX)):
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
if dataTable[d, 3] == 0:
dataTableFormat_maxmin.append('--')
else:
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
dfOut = pd.DataFrame(dataTable[:, :])
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
# Change the column names
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
np.array(['Mean', 'Min / Max'])]
# Change the row indexes
dfOut.iloc[13, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
fig.suptitle(siteName + ', ' + minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
# outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
outPath = importPaths[s]
if not os.path.exists(outPath):
os.mkdir(outPath)
dfOut.to_excel(outPath + '/Summary_Stats_' + siteName + '.xlsx')
dfOutFormat.to_excel(outPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
dfOut.to_csv(outPath + '/Summary_Stats_' + siteName + '.csv')
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Plot Maps
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
ax = axes.flat
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
x, y, arrow_length = 0.95, 0.93, 0.20
plt.rcParams.update({'font.size': 22})
axXlimTT = (Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
axYlimTT = (Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
plt.setp(axes, xlim=axXlim, ylim=axYlim)
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
# Plot the RBR observations
# Salinity
for paramIDX, param in enumerate(params):
Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=paramMin[paramIDX], vmax=paramMax[paramIDX],
legend=True, legend_kwds={'label': paramName[paramIDX]},
cmap=parmCmap[paramIDX], markersize=20)
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
# Add time labels
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.x,
# RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
if (not Obs_geo.geometry.isnull().all()) &\
(GPS_File is not None) & (param == 'CH6:Turbidity(NTU)'):
for a in range(0, len(ilocs_max[paramIDX])):
ax[paramIDX].annotate(str(a + 1), (Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.x,
Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
ax[paramIDX].set_title(paramName[paramIDX])
# ax[paramIDX].set_ylabel('UTM 21N [m]')
# ax[paramIDX].set_xlabel('UTM 21N [m]')
ax[paramIDX].locator_params(axis='y', nbins=3)
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX].get_xaxis().set_ticks([])
ax[paramIDX].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX].add_artist(scalebar)
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes)
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
#outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
outPath = importPaths[s]
if not os.path.exists(outPath):
os.mkdir(outPath)
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryMap_' + timeLabels[s] + '_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(outPath + '/Figures/SummaryMap_' + timeLabels[s] + '_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Summary Sheet
plotIDXsLoop = []
# plotIDXsLoop.append([0, 3, 6, 9])
# plotIDXsLoop.append([1, 4, 7, 10])
# plotIDXsLoop.append([2, 5, 8, 11])
# plotIDXsLoop.append([np.arange(0, 21*3, 3)])
# plotIDXsLoop.append([np.arange(1, 21*3, 3)])
# plotIDXsLoop.append([np.arange(2, 21*3, 3)])
for i in range(0, 3):
summTable = None
# plotIDXs = plotIDXsLoop[i]
plotIDXs = np.arange(i, 21, 3)
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0, 1], index_col=0)
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
if plotIDX > 11:
obsStatsIN.rename({'Chl-a [ppb]': 'Chl-a [ug/l]'}, inplace=True)
if s == 0:
summTable = obsStatsIN
else:
summTable = summTable.join(obsStatsIN)
# Remove -999 with nan
summTable.replace(-999, np.nan, inplace=True)
summTable.to_excel('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/Summary_StatsMerge_' + siteName + '.xlsx')
#%% Summary Plot
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height [m]', 'Salinity [PSU]',
'Dissolved O₂ [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a. [ug/L]']
for i in range(0, 3):
summTable = None
plotIDXs = np.arange(i, 21, 3)
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
plotTable[8, s] = obsStatsIN.iloc[7, 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
elif plotIDX<12:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 13], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
else:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[13, 1], '%B %d, %Y'))
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# Repalce zero with nan
plotTable[plotTable < 0.00001] = np.nan
for v in range(0, 10):
ax[v].scatter(plotDates, plotTable[v, :], 250)
ax[v].set_ylabel(plotvars[v])
fig.suptitle(siteName, fontsize=35)
plt.gcf().autofmt_xdate()
plt.gcf().align_ylabels()
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + siteName + '.png',
bbox_inches='tight', dpi=500)

View File

View File