SouthShore transmission and plotting

This commit is contained in:
Alexander Rey 2022-02-23 10:45:02 -05:00
parent b08bca6217
commit df32b0901a
8 changed files with 1036 additions and 705 deletions

View File

@ -3,12 +3,13 @@
# December 22, 2021 # December 22, 2021
# %% Setup Project # %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0 from mikeio import Dfsu, Mesh, Dfs2, Dfs0, Dfs1
import pandas as pd import pandas as pd
import pathlib as pl import pathlib as pl
import numpy as np import numpy as np
import geopandas as gp import geopandas as gp
import datetime as datetime
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import contextily as ctx import contextily as ctx
@ -16,48 +17,342 @@ import matplotlib.font_manager as fm
# %% Read Model Log # %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models", pth = pl.Path("//srv-ott3.baird.com/", "Projects", "13539.101 L'Ansecoy Bay, Mustique", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx") "Model Log Mustique.xlsx")
runLog = pd.read_excel(pth.as_posix(), "HD")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Model Results # %% Read Model Results
modelPlot = range(6, 7) modelPlot = range(10, 11)
curSpeed_list = [None] * (max(modelPlot) + 1) curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1) curElm_list = [None] * (max(modelPlot) + 1)
curU_list = [None] * (max(modelPlot) + 1) curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1) curV_list = [None] * (max(modelPlot) + 1)
curW_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1) dfs_list = [None] * (max(modelPlot) + 1)
dfs2d_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1) ds_list = [None] * (max(modelPlot) + 1)
wl_list = [None] * (max(modelPlot) + 1) wl_list = [None] * (max(modelPlot) + 1)
wd_list = [None] * (max(modelPlot) + 1) wd_list = [None] * (max(modelPlot) + 1)
bed_list = [None] * (max(modelPlot) + 1) bed_list = [None] * (max(modelPlot) + 1)
z_list = [None] * (max(modelPlot) + 1)
z_profile_list = [None] * (max(modelPlot) + 1)
elm_list = [None] * (max(modelPlot) + 1)
elm_df_list = [None] * (max(modelPlot) + 1)
elm2d_list = [None] * (max(modelPlot) + 1)
elm2d_df_list = [None] * (max(modelPlot) + 1)
readPointsName = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevB.csv",
delimiter=",")
readPoints = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevE.csv",
delimiter=",").iloc[0:, 2:-2].values
# readPoints = np.zeros((14, 2))
# readPoints[0, :] = [693000, 1425500]
# readPoints[1, :] = [697000, 1429500]
# readPoints[2, :] = [697000, 1421500]
# readPoints[3, :] = [701000, 1425500]
#
# # readPoints[4, :] = [697576.96, 1425767.10] # RBR NEAR
# # readPoints[5, :] = [697760.89, 1426109.73] # RBR FAR
#
# # From Mathew
# readPoints[4, :] = [697858.00, 1426095.00] # RBR NEAR
# readPoints[5, :] = [697613.00, 1426509.00] # RBR FAR
#
# # Location Uncertainty
# # 500 -x
# readPoints[6, :] = [697858.00-100, 1426095.00-0] # RBR NEAR
# readPoints[7, :] = [697613.00-150, 1426509.00+150] # RBR FAR
# # 500 +x
# readPoints[8, :] = [697858.00+100, 1426095.00-0] # RBR NEAR
# readPoints[9, :] = [697613.00-50, 1426509.00+50] # RBR FAR
# # 500 -x
# readPoints[10, :] = [697858.00+150, 1426095.00+150] # RBR NEAR
# readPoints[11, :] = [697613.00+150, 1426509.00+150] # RBR FAR
# # 500 -x
# readPoints[12, :] = [697858.00-150, 1426095.00+150] # RBR NEAR
# readPoints[13, :] = [697613.00-150, 1426509.00+150] # RBR FAR
#125 +125
for m in modelPlot: for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m],
runLog['Run Name'][m] + '.m21fm - Result Files', 'FullDomain.dfsu').as_posix())
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain2D.dfsu').as_posix())
dfs2d_list[m] = dfsIN
# Read Map
# ds_list[m] = dfs_list[m].read()
# curU_list[m] = ds_list[m]["Depth averaged U velocity"]
# curV_list[m] = ds_list[m]["Depth averaged V velocity"]
# curElm_list[m] = dfs_list[m].element_coordinates
# wl_list[m] = ds_list[m]["Surface elevation"]
# z_list[m] = ds_list[m]["Z coordinate"]
## Read specific points in 2D
# Find nearest elements
elem_ids = dfs2d_list[m].find_nearest_elements(readPoints[:, 0], readPoints[:, 1])
# Read in data from nearest elements
elm2d_list[m] = dfs2d_list[m].read(elements=elem_ids)
# Convert to Pandas DataFrame
elm2d_df_list[m] = [None] * readPoints.shape[0]
for p in range(0, readPoints.shape[0]):
elm2d_df_list[m][p] = elm2d_list[m].isel(idx=p).to_dataframe()
## Read specific points 3D
# Setup MIKE object
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix())
dfs_list[m] = dfsIN dfs_list[m] = dfsIN
ds_list[m] = dfs_list[m].read()
curSpeed_list[m] = ds_list[m]["Current speed"][-1]
curU_list[m] = ds_list[m]["U velocity"][-1]
curV_list[m] = ds_list[m]["V velocity"][-1]
curElm_list[m] = dfs_list[m].element_coordinates
wl_list[m] = ds_list[m]["Surface elevation"][-1]
wd_list[m] = ds_list[m]["Total water depth"][-1]
bed_list[m] = wl_list[m] - wd_list[m]
# # Read Mesh for first model
# mikeMeshIN = dfsIN.to_shapely()
# # Convert to list of polygons
# mikeMeshIN_list = [p for p in mikeMeshIN]
# Add to list # Find nearest elements
# hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list) elem_ids = dfs_list[m].find_nearest_profile_elements(readPoints[:, 0], readPoints[:, 1])
# Read from elements- flatten 2d element array (xy and z) to 1d for reading
elm_list[m] = dfs_list[m].read(elements=elem_ids.flatten().astype(int))
# Cleanup unnecessary variables elm_df_list[m] = [None] * readPoints.shape[0]
# del mikeMeshIN z_profile_list[m] = [None] * readPoints.shape[0]
# del mikeMeshIN_list # Convert to Pandas DataFrame
del dfsIN # Loop through points, selecting corresponding depths
for p in range(0, readPoints.shape[0]):
z_profile_list[m][p] = dfs_list[m].element_coordinates[elem_ids[p, :].astype(int), 2]
# Convert to Pandas and add zidx
for z in range(0, z_profile_list[m][p].shape[0]):
df_tmp = elm_list[m].isel(idx=p * 5 + z).to_dataframe()
df_tmp['Z'] = z_profile_list[m][p][z]
df_tmp['Z_IDX'] = z
if z == 0:
elm_df_list[m][p] = df_tmp
else:
elm_df_list[m][p] = elm_df_list[m][p].append(df_tmp)
del df_tmp
# %% Import RBR data
rbr_path = "C:/Users/arey/files/Projects/Mustique/041279_20211203_1541Ruskin.xlsx"
rbr_pd = pd.read_excel(rbr_path, sheet_name='Wave', parse_dates=True, header=1, index_col=0)
rbr_pd['WL'] = rbr_pd['Depth'] - rbr_pd['Depth'].mean() - 0.2
# Correct time zone
rbr_pd.index = rbr_pd.index + datetime.timedelta(hours=4)
# %% Import Nortek Eco data
eco_path_1 = "//srv-ott3/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/02_Nortek ECO/Eco61_20210910184606.csv"
eco_path_2 = "//srv-ott3/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/02_Nortek ECO/Eco214_20210902120703.csv"
eco1_pd = pd.read_csv(eco_path_1, sep=',', header=[28], index_col=0,
parse_dates=False)
eco2_pd = pd.read_csv(eco_path_2, sep=',', header=[28], index_col=0,
parse_dates=False)
# Drop first row
eco1_pd.drop(eco1_pd.index[0], inplace=True)
eco2_pd.drop(eco2_pd.index[0], inplace=True)
eco1_pd.index = pd.to_datetime(eco1_pd.index, format='%m/%d/%Y %I:%M:%S %p')
eco2_pd.index = pd.to_datetime(eco2_pd.index, format='%m/%d/%Y %I:%M:%S %p')
# Set water level to zero
eco1_pd['WL'] = eco1_pd['Depth'].astype(float) - eco1_pd['Depth'].astype(float).mean(skipna=True) + 0.2
eco2_pd['WL'] = eco2_pd['Depth'].astype(float) - eco2_pd['Depth'].astype(float).mean(skipna=True) + 0.2
# Correct time zone
eco1_pd.index = eco1_pd.index + datetime.timedelta(hours=4)
eco2_pd.index = eco2_pd.index + datetime.timedelta(hours=4)
# %% Read in boundary conditions
ds_wl = []
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_EastBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_NorthBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_SouthBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_WestBound.dfs1")
ds_wl.append(dfs1.read())
# %% Plot Water Level at a point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
# plotstart = '2021-10-01 00:00:00'
# plotend = '2021-10-21 00:00:00'
plotstart = '2021-10-05 00:00:00'
plotend = '2021-10-07 00:00:00'
for sub, p in enumerate([5, 6, 9]):
if p == 9:
rbr_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations')
elif p == 5:
eco1_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations')
elif p == 6:
eco2_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations')
elm2d_df_list[m][p].plot(y='Surface elevation', ax=axes[sub], label=readPointsName.Dataset[p])
axes[sub].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[sub].set_ylim(-1.0, 1.0)
plt.show()
# %% Plot Water Level at a 3D point
plotstart = '2021-09-14 00:00:00'
# plotend = '2021-09-16 00:00:00'
plotend = '2021-10-03 00:00:00'
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 8), sharex=True)
# elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == 4, :].plot(
# y='Z', ax=axes[0], label='Model')
axes[0].plot(elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == 4, :].index,
elm_list[m]['Z coordinate'][:, 30*6-1]+0.1, label='Model')
eco1_pd['WL'].plot(ax=axes[0], color='k', label='Observations')
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[0].set_ylabel('Water Level Offshore (m)')
axes[0].legend()
# elm_df_list[m][7].loc[elm_df_list[m][7]['Z_IDX'] == 4, :].plot(
# y='Z', ax=axes[1], label='Model')
axes[1].plot(elm_df_list[m][7].loc[elm_df_list[m][7]['Z_IDX'] == 4, :].index,
elm_list[m]['Z coordinate'][:, 14*6-1]+0.1, label='Model')
eco2_pd['WL'].plot(ax=axes[1], color='k', label='Observations')
axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Water Level Nearshore (m)')
axes[1].legend()
axes[1].set_xlabel('Date')
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/wl_TS.png', bbox_inches='tight')
# %% Velocity point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
eco1_pd['Upper speed'].astype(float).plot(ax=axes[0], color='tab:red', label='Observations')
elm_df_list[m][5]['Current Speed'] = np.sqrt(
elm_df_list[m][5]['U velocity'] ** 2 + elm_df_list[m][5]['V velocity'] ** 2)
elm_df_list[m][5].loc[elm_df_list[m][4]['Z_IDX'] == 4, :].plot(
y='Current Speed', ax=axes[0], label='Model')
axes[0].set_xlim(pd.Timestamp('2021-09-15 00:00:00'), pd.Timestamp('2021-10-3 00:00:00'))
axes[0].set_ylabel('Nearshore Eco Current')
axes[0].legend()
eco2_pd['Upper speed'].astype(float).plot(ax=axes[1], color='tab:red', label='Observations')
elm_df_list[m][5]['Current Speed'] = np.sqrt(
elm_df_list[m][5]['U velocity'] ** 2 + elm_df_list[m][5]['V velocity'] ** 2)
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == 4, :].plot(
y='Current Speed', ax=axes[1], label='Model')
axes[1].set_xlim(pd.Timestamp('2021-09-15 00:00:00'), pd.Timestamp('2021-10-3 00:00:00'))
axes[1].set_ylabel('Offshore Eco Current')
plt.show()
# %% Velocity point U and V
# Z_IDX = 4
# ecoVar1 = 'Upper speed U'
# ecoVar2 = 'Upper speed V'
# ecoVar3 = 'Upper speed'
# ecoVar4 = 'Upper direction'
# plotstart = '2021-09-14 00:00:00'
# # plotend = '2021-09-16 00:00:00'
# plotend = '2021-10-03 00:00:00'
plotstart = '2021-10-01 00:00:00'
# plotend = '2021-09-16 00:00:00'
plotend = '2021-10-21 00:00:00'
Z_IDX = 2
ecoVar1 = 'Middle speed U'
ecoVar2 = 'Middle speed V'
ecoVar3 = 'Middle speed'
ecoVar4 = 'Middle direction'
# Z_IDX = 1
# ecoVar1 = 'Lower speed U'
# ecoVar2 = 'Lower speed V'
# ecoVar3 = 'Lower speed'
# ecoVar4 = 'Lower direction'
fig, axes = plt.subplots(nrows=4, ncols=1, figsize=(8, 8), sharex=True)
eco1_pd[ecoVar1] = eco1_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar1].astype(float).plot(ax=axes[0], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][3]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model Shift')
axes[0].set_ylim(-0.5, 0.5)
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[0].set_ylabel('Nearshore U (m/s)')
axes[0].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco1_pd[ecoVar2] = eco1_pd[ecoVar3].astype(float) * \
np.cos(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar2].astype(float).plot(ax=axes[1], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][3]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model Shift')
axes[1].set_ylim(-0.5, 0.5)
axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Nearshore V (m/s)')
axes[1].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco2_pd[ecoVar1] = eco2_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
eco2_pd[ecoVar1].astype(float).plot(ax=axes[2], color='k', label='Observations')
elm_df_list[m][4].loc[elm_df_list[m][4]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[2], label='Model')
elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[2], label='Model Shift')
axes[2].set_ylim(-1.5, 1.5)
axes[2].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[2].set_ylabel('Offshore U (m/s)')
axes[2].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco2_pd[ecoVar2] = eco2_pd[ecoVar3].astype(float) * \
np.cos(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
eco2_pd[ecoVar2].astype(float).plot(ax=axes[3], color='k', label='Observations')
elm_df_list[m][4].loc[elm_df_list[m][4]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[3], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[3], label='Model Shift')
axes[3].set_ylim(-1.00, 1.00)
axes[3].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[3].set_ylabel('Offshore V (m/s)')
axes[3].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
axes[3].set_xlabel('Date')
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
# '/uVel_TS.png', bbox_inches='tight')
# %% Plotting # %% Plotting
# Shaded Water # Shaded Water
@ -88,7 +383,8 @@ for m in modelPlot:
cur_plot = curSpeed_list[m] * 3.28084 cur_plot = curSpeed_list[m] * 3.28084
cur_nan = np.isnan(cur_plot) cur_nan = np.isnan(cur_plot)
axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes, levels=11, axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes,
levels=11,
vmin=vmin, vmax=vmax, label='Current Speed (ft/s)', vmin=vmin, vmax=vmax, label='Current Speed (ft/s)',
elements=dfs_list[m].element_ids[~cur_nan]) elements=dfs_list[m].element_ids[~cur_nan])
@ -97,7 +393,8 @@ for m in modelPlot:
interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1) interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1)
# Interpolate velocity at last time step by selecting time from dataset # Interpolate velocity at last time step by selecting time from dataset
overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant, shape=(overGrid.ny, overGrid.nx)) overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant,
shape=(overGrid.ny, overGrid.nx))
overGrid_U = overGrid_Cur["U velocity"][-1] overGrid_U = overGrid_Cur["U velocity"][-1]
overGrid_V = overGrid_Cur["V velocity"][-1] overGrid_V = overGrid_Cur["V velocity"][-1]
@ -261,7 +558,6 @@ for m in modelPlot:
breakTimes_TOut[m] = breakwaterPTS_times breakTimes_TOut[m] = breakwaterPTS_times
# %% Format and save # %% Format and save
for m in modelPlot: for m in modelPlot:
@ -282,7 +578,6 @@ for m in modelPlot:
saveTmp2.drop(['geometry'], axis=1, inplace=True) saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True) saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' + saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv') 'Toe' + runLog['Short Description'][m] + '.csv')

View File

@ -11,7 +11,10 @@ import numpy as np
import geopandas as gp import geopandas as gp
import datetime as datetime import datetime as datetime
import matplotlib as mpl
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
import time
import matplotlib.animation as animation
import contextily as ctx import contextily as ctx
import matplotlib.font_manager as fm import matplotlib.font_manager as fm
@ -21,21 +24,26 @@ pth = pl.Path("//srv-ott3.baird.com/", "Projects", "13539.101 L'Ansecoy Bay, Mus
"Model Log Mustique.xlsx") "Model Log Mustique.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog") runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Map Model Results
modelPlot = [8, 10, 11, 12, 13, 14]
modelPlot = [15, 16, 17]
# %% Read Model Results
modelPlot = range(10, 11)
curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1)
curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1)
curW_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1) dfs_list = [None] * (max(modelPlot) + 1)
dfs2d_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1) ds_list = [None] * (max(modelPlot) + 1)
wl_list = [None] * (max(modelPlot) + 1)
wd_list = [None] * (max(modelPlot) + 1) for m in modelPlot:
bed_list = [None] * (max(modelPlot) + 1) dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix())
dfs_list[m] = dfsIN
ds_list[m] = dfs_list[m].read()
# %% Read Model Results at point
modelPlot = [4, 10, 12, 13]
dfs2d_list = [None] * (max(modelPlot) + 1)
dfs3d_list = [None] * (max(modelPlot) + 1)
z_list = [None] * (max(modelPlot) + 1) z_list = [None] * (max(modelPlot) + 1)
z_profile_list = [None] * (max(modelPlot) + 1) z_profile_list = [None] * (max(modelPlot) + 1)
@ -45,41 +53,12 @@ elm_df_list = [None] * (max(modelPlot) + 1)
elm2d_list = [None] * (max(modelPlot) + 1) elm2d_list = [None] * (max(modelPlot) + 1)
elm2d_df_list = [None] * (max(modelPlot) + 1) elm2d_df_list = [None] * (max(modelPlot) + 1)
readPointsName = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevB.csv", readPointsName = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevF.csv",
delimiter=",") delimiter=",")
readPoints = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevE.csv", readPoints = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevF.csv",
delimiter=",").iloc[0:, 2:-2].values delimiter=",").iloc[0:, 2:-2].values
# readPoints = np.zeros((14, 2))
# readPoints[0, :] = [693000, 1425500]
# readPoints[1, :] = [697000, 1429500]
# readPoints[2, :] = [697000, 1421500]
# readPoints[3, :] = [701000, 1425500]
#
# # readPoints[4, :] = [697576.96, 1425767.10] # RBR NEAR
# # readPoints[5, :] = [697760.89, 1426109.73] # RBR FAR
#
# # From Mathew
# readPoints[4, :] = [697858.00, 1426095.00] # RBR NEAR
# readPoints[5, :] = [697613.00, 1426509.00] # RBR FAR
#
# # Location Uncertainty
# # 500 -x
# readPoints[6, :] = [697858.00-100, 1426095.00-0] # RBR NEAR
# readPoints[7, :] = [697613.00-150, 1426509.00+150] # RBR FAR
# # 500 +x
# readPoints[8, :] = [697858.00+100, 1426095.00-0] # RBR NEAR
# readPoints[9, :] = [697613.00-50, 1426509.00+50] # RBR FAR
# # 500 -x
# readPoints[10, :] = [697858.00+150, 1426095.00+150] # RBR NEAR
# readPoints[11, :] = [697613.00+150, 1426509.00+150] # RBR FAR
# # 500 -x
# readPoints[12, :] = [697858.00-150, 1426095.00+150] # RBR NEAR
# readPoints[13, :] = [697613.00-150, 1426509.00+150] # RBR FAR
#125 +125
for m in modelPlot: for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain2D.dfsu').as_posix()) dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain2D.dfsu').as_posix())
@ -108,19 +87,19 @@ for m in modelPlot:
## Read specific points 3D ## Read specific points 3D
# Setup MIKE object # Setup MIKE object
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix()) dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix())
dfs_list[m] = dfsIN dfs3d_list[m] = dfsIN
# Find nearest elements # Find nearest elements
elem_ids = dfs_list[m].find_nearest_profile_elements(readPoints[:, 0], readPoints[:, 1]) elem_ids = dfs3d_list[m].find_nearest_profile_elements(readPoints[:, 0], readPoints[:, 1])
# Read from elements- flatten 2d element array (xy and z) to 1d for reading # Read from elements- flatten 2d element array (xy and z) to 1d for reading
elm_list[m] = dfs_list[m].read(elements=elem_ids.flatten().astype(int)) elm_list[m] = dfs3d_list[m].read(elements=elem_ids.flatten().astype(int))
elm_df_list[m] = [None] * readPoints.shape[0] elm_df_list[m] = [None] * readPoints.shape[0]
z_profile_list[m] = [None] * readPoints.shape[0] z_profile_list[m] = [None] * readPoints.shape[0]
# Convert to Pandas DataFrame # Convert to Pandas DataFrame
# Loop through points, selecting corresponding depths # Loop through points, selecting corresponding depths
for p in range(0, readPoints.shape[0]): for p in range(0, readPoints.shape[0]):
z_profile_list[m][p] = dfs_list[m].element_coordinates[elem_ids[p, :].astype(int), 2] z_profile_list[m][p] = dfs3d_list[m].element_coordinates[elem_ids[p, :].astype(int), 2]
# Convert to Pandas and add zidx # Convert to Pandas and add zidx
for z in range(0, z_profile_list[m][p].shape[0]): for z in range(0, z_profile_list[m][p].shape[0]):
@ -184,84 +163,34 @@ ds_wl.append(dfs1.read())
# %% Plot Water Level at a point # %% Plot Water Level at a point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True) fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
# plotstart = '2021-10-01 00:00:00' plotstart = '2021-10-01 00:00:00'
# plotend = '2021-10-21 00:00:00' plotend = '2021-10-15 00:00:00'
plotstart = '2021-10-05 00:00:00' # plotstart = '2021-10-05 00:00:00'
plotend = '2021-10-07 00:00:00' # plotend = '2021-10-07 00:00:00'
modelPlot = [13]
for sub, p in enumerate([5, 6, 9]): for m in modelPlot:
for sub, p in enumerate([5, 9, 6]):
if p == 9: if p == 9:
rbr_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations') rbr_pd['WL'].plot(ax=axes[sub], color='k', label='Nearshore East Obervations')
elif p == 5: elif p == 5:
eco1_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations') eco1_pd['WL'].plot(ax=axes[sub], color='k', label='Offshore Obervations')
elif p == 6: elif p == 6:
eco2_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations') eco2_pd['WL'].plot(ax=axes[sub], color='k', label='Nearshore West Obervations')
elm2d_df_list[m][p].plot(y='Surface elevation', ax=axes[sub], label=readPointsName.Dataset[p]) elm2d_df_list[m][p].plot(y='Surface elevation', ax=axes[sub], label='Model')
axes[sub].legend(loc="upper left")
axes[sub].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend)) axes[sub].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[sub].set_ylim(-1.0, 1.0) axes[sub].set_ylim(-1.0, 1.0)
axes[sub].set_ylabel('Water Level (m)')
plt.show() axes[sub].set_xlabel('Date')
# %% Plot Water Level at a 3D point
plotstart = '2021-09-14 00:00:00'
# plotend = '2021-09-16 00:00:00'
plotend = '2021-10-03 00:00:00'
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 8), sharex=True)
# elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == 4, :].plot(
# y='Z', ax=axes[0], label='Model')
axes[0].plot(elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == 4, :].index,
elm_list[m]['Z coordinate'][:, 30*6-1]+0.1, label='Model')
eco1_pd['WL'].plot(ax=axes[0], color='k', label='Observations')
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[0].set_ylabel('Water Level Offshore (m)')
axes[0].legend()
# elm_df_list[m][7].loc[elm_df_list[m][7]['Z_IDX'] == 4, :].plot(
# y='Z', ax=axes[1], label='Model')
axes[1].plot(elm_df_list[m][7].loc[elm_df_list[m][7]['Z_IDX'] == 4, :].index,
elm_list[m]['Z coordinate'][:, 14*6-1]+0.1, label='Model')
eco2_pd['WL'].plot(ax=axes[1], color='k', label='Observations')
axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Water Level Nearshore (m)')
axes[1].legend()
axes[1].set_xlabel('Date')
plt.show() plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' + fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/wl_TS.png', bbox_inches='tight') '/wl_TS_RevB.png', bbox_inches='tight')
# %% Velocity point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
eco1_pd['Upper speed'].astype(float).plot(ax=axes[0], color='tab:red', label='Observations')
elm_df_list[m][5]['Current Speed'] = np.sqrt(
elm_df_list[m][5]['U velocity'] ** 2 + elm_df_list[m][5]['V velocity'] ** 2)
elm_df_list[m][5].loc[elm_df_list[m][4]['Z_IDX'] == 4, :].plot(
y='Current Speed', ax=axes[0], label='Model')
axes[0].set_xlim(pd.Timestamp('2021-09-15 00:00:00'), pd.Timestamp('2021-10-3 00:00:00'))
axes[0].set_ylabel('Nearshore Eco Current')
axes[0].legend()
eco2_pd['Upper speed'].astype(float).plot(ax=axes[1], color='tab:red', label='Observations')
elm_df_list[m][5]['Current Speed'] = np.sqrt(
elm_df_list[m][5]['U velocity'] ** 2 + elm_df_list[m][5]['V velocity'] ** 2)
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == 4, :].plot(
y='Current Speed', ax=axes[1], label='Model')
axes[1].set_xlim(pd.Timestamp('2021-09-15 00:00:00'), pd.Timestamp('2021-10-3 00:00:00'))
axes[1].set_ylabel('Offshore Eco Current')
plt.show()
# %% Velocity point U and V # %% Velocity point U and V
# Z_IDX = 4 # Z_IDX = 4
@ -278,6 +207,9 @@ plotstart = '2021-10-01 00:00:00'
# plotend = '2021-09-16 00:00:00' # plotend = '2021-09-16 00:00:00'
plotend = '2021-10-21 00:00:00' plotend = '2021-10-21 00:00:00'
# plotstart = '2021-10-05 00:00:00'
# plotend = '2021-10-07 00:00:00'
Z_IDX = 2 Z_IDX = 2
ecoVar1 = 'Middle speed U' ecoVar1 = 'Middle speed U'
ecoVar2 = 'Middle speed V' ecoVar2 = 'Middle speed V'
@ -290,16 +222,16 @@ ecoVar4 = 'Middle direction'
# ecoVar3 = 'Lower speed' # ecoVar3 = 'Lower speed'
# ecoVar4 = 'Lower direction' # ecoVar4 = 'Lower direction'
fig, axes = plt.subplots(nrows=4, ncols=1, figsize=(8, 8), sharex=True) fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 4), sharex=True)
modelPlot = [13]
for m in modelPlot:
eco1_pd[ecoVar1] = eco1_pd[ecoVar3].astype(float) * \ eco1_pd[ecoVar1] = eco1_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy())) np.sin(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar1].astype(float).plot(ax=axes[0], color='k', label='Observations') eco1_pd[ecoVar1].astype(float).plot(ax=axes[0], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][3]['Z_IDX'] == Z_IDX, :].plot( elm_df_list[m][3].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model') y='U velocity', ax=axes[0], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model Shift')
axes[0].set_ylim(-0.5, 0.5) axes[0].set_ylim(-0.5, 0.5)
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend)) axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
@ -311,10 +243,8 @@ eco1_pd[ecoVar2] = eco1_pd[ecoVar3].astype(float) * \
eco1_pd[ecoVar2].astype(float).plot(ax=axes[1], color='k', label='Observations') eco1_pd[ecoVar2].astype(float).plot(ax=axes[1], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][3]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot( elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model Shift') y='V velocity', ax=axes[1], label='Model')
axes[1].set_ylim(-0.5, 0.5) axes[1].set_ylim(-0.5, 0.5)
@ -322,127 +252,232 @@ axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Nearshore V (m/s)') axes[1].set_ylabel('Nearshore V (m/s)')
axes[1].legend(bbox_to_anchor=(0.90, 1), loc="upper left") axes[1].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco2_pd[ecoVar1] = eco2_pd[ecoVar3].astype(float) * \ # eco2_pd[ecoVar1] = eco2_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy())) # np.sin(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
eco2_pd[ecoVar1].astype(float).plot(ax=axes[2], color='k', label='Observations') # eco2_pd[ecoVar1].astype(float).plot(ax=axes[2], color='k', label='Observations')
elm_df_list[m][4].loc[elm_df_list[m][4]['Z_IDX'] == Z_IDX, :].plot( # elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[2], label='Model') # y='U velocity', ax=axes[2], label='Model')
elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[2], label='Model Shift')
axes[2].set_ylim(-1.5, 1.5) # axes[2].set_ylim(-1.5, 1.5)
axes[2].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend)) # axes[2].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[2].set_ylabel('Offshore U (m/s)') # axes[2].set_ylabel('Offshore U (m/s)')
axes[2].legend(bbox_to_anchor=(0.90, 1), loc="upper left") # axes[2].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
#
# eco2_pd[ecoVar2] = eco2_pd[ecoVar3].astype(float) * \
# np.cos(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
# eco2_pd[ecoVar2].astype(float).plot(ax=axes[3], color='k', label='Observations')
# elm_df_list[m][5].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
# y='V velocity', ax=axes[3], label='Model')
#
# axes[3].set_ylim(-1.00, 1.00)
# axes[3].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
# axes[3].set_ylabel('Offshore V (m/s)')
# axes[3].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco2_pd[ecoVar2] = eco2_pd[ecoVar3].astype(float) * \ axes[1].set_xlabel('Date')
np.cos(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
eco2_pd[ecoVar2].astype(float).plot(ax=axes[3], color='k', label='Observations')
elm_df_list[m][4].loc[elm_df_list[m][4]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[3], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[3], label='Model Shift')
axes[3].set_ylim(-1.00, 1.00)
axes[3].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[3].set_ylabel('Offshore V (m/s)')
axes[3].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
axes[3].set_xlabel('Date')
plt.show() plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' + fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
# '/uVel_TS.png', bbox_inches='tight') '/uVel_TS_RevC.png', bbox_inches='tight')
# %% Plotting
# Shaded Water # %% Subplot Map Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw' mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12 x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12) fontprops = fm.FontProperties(size=12)
modelPlot = range(6, 7) modelPlot = [15, 16, 17]
vmin = 0 tStepsPlot = np.zeros((5, 6))
vmax = 1 tStepsPlot[0, :] = range(110, 116)
scale = 1 # 5 tStepsPlot[1, :] = range(158, 164)
tStepsPlot[2, :] = range(206, 212)
tStepsPlot[3, :] = range(255, 261)
tStepsPlot[4, :] = range(138, 144)
for m in modelPlot: for m in modelPlot:
if m < 4 or m == 6: vmax = 34.0
vmax = 5 vmin = 33.7
qmax = 4 cmap = 'inferno'
scale = 50
amin = 1 if m == 8 or m == 17:
else: disPlot = 7
vmax = 1 elif m == 11 or m == 15:
qmax = 0.25 disPlot = 13
scale = 75 elif m == 14 or m == 16:
amin = 3 disPlot = 8
for a in range(0, 5):
fig, axes = plt.subplots(nrows=2, ncols=3, sharey=True, sharex=True, figsize=(16, 9))
ax = axes.flatten()
for t in range(0, 6):
# Plot salinity
# Select salinity data at selected time step
plotDat = ds_list[m]['Salinity'][int(tStepsPlot[a, t]), :]
# Identify nan values
plot_nan = np.isnan(plotDat)
# Add additional nan values to show bed
plot_nan = plot_nan | (plotDat < 33.725)
# Plot all selected values at a given later and not nan
axDHI = dfs_list[m].plot(plotDat[(dfs_list[m].layer_ids == 0) & (~plot_nan)], plot_type='contourf',
show_mesh=False, cmap=cmap, ax=ax[t], add_colorbar=False,
vmin=vmin, vmax=vmax,
elements=dfs_list[m].element_ids[(dfs_list[m].layer_ids == 0) & (~plot_nan)])
# Add discharge point
ax[t].scatter(readPoints[disPlot, 0], readPoints[disPlot, 1], marker='^', color='r', s=20)
# ax[t].set_xlim(left=696680.7, right=698252.7)
# ax[t].set_ylim(bottom=1425547.5, top=1426681.8)
ax[t].set_xlim(left=696508.3, right=698252.7)
ax[t].set_ylim(bottom=1425267.0, top=1426681.8)
ctx.add_basemap(ax[t], source=mapbox, crs='EPSG:32620')
ax[t].title.set_text(str(ds_list[m].time[int(tStepsPlot[a, t])]))
ax[t].xaxis.set_major_locator(plt.MaxNLocator(4))
# axes.titlesize = 'x-large'
# fig.suptitle(runLog['Short Description'][m], fontsize=18)
# ax[t].set_xlabel('Easting [m]')
# ax[t].set_ylabel('Northing [m]')
fig.subplots_adjust(right=0.8)
cmap = mpl.cm.inferno
norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
cax = plt.axes([0.85, 0.2, 0.05, 0.6])
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
norm=norm,
orientation='vertical')
cb1.set_label('Salinity (PSU)')
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/Plume_6_Plot_' + runLog['Run'][m][0:-20] +'_Alt' + str(a) + '.png', bbox_inches='tight')
# %% Map Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(8, 9)
for m in modelPlot:
vmax = 34.2
vmin = 34
fig, axes = plt.subplots(figsize=(8, 8)) fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing # Plot salinity
cur_plot = curSpeed_list[m] * 3.28084 # Select salinity data at last time step
cur_nan = np.isnan(cur_plot) plotDat = ds_list[m]['Salinity'][-1, :]
# Identify nan values
plot_nan = np.isnan(plotDat)
axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes, # Add additional nan values to show bed
levels=11, plot_nan = plot_nan | (plotDat<34.05)
vmin=vmin, vmax=vmax, label='Current Speed (ft/s)',
elements=dfs_list[m].element_ids[~cur_nan])
# Plot arrows on overset grid # Plot all selected values at a given later and not nan
overGrid = dfs_list[m].get_overset_grid(dx=50) axDHI = dfs_list[m].plot(plotDat[(dfs_list[m].layer_ids == 1) & (~plot_nan)], plot_type='contourf',
interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1) show_mesh=False, cmap='inferno', ax=axes,
vmin=vmin, vmax=vmax, label='Salinity (PSU)',
elements=dfs_list[m].element_ids[(dfs_list[m].layer_ids == 1) & (~plot_nan)])
# Interpolate velocity at last time step by selecting time from dataset
overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant,
shape=(overGrid.ny, overGrid.nx))
overGrid_U = overGrid_Cur["U velocity"][-1] axes.set_xlim(left=696680.7, right=698252.7)
overGrid_V = overGrid_Cur["V velocity"][-1] axes.set_ylim(bottom=1425547.5, top=1426681.8)
# Normalize velocity above limit ctx.add_basemap(axes, source=mapbox, crs='EPSG:32620')
overGrid_U_Scale = overGrid_U * 3.28084
overGrid_V_Scale = overGrid_V * 3.28084
r = np.power(np.add(np.power(overGrid_U_Scale, 2), np.power(overGrid_V_Scale, 2)), 0.5)
curPlotMask = np.sqrt((overGrid_U_Scale) ** 2 + (overGrid_V_Scale) ** 2) > qmax
overGrid_U_Plot = np.zeros(overGrid_U_Scale.shape)
overGrid_V_Plot = np.zeros(overGrid_V_Scale.shape)
# Normalize arrows to 1
overGrid_U_Plot = overGrid_U_Scale / r
overGrid_V_Plot = overGrid_V_Scale / r
# Scale arrows below qmax
overGrid_U_Plot[~curPlotMask] = overGrid_U_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
overGrid_V_Plot[~curPlotMask] = overGrid_V_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
qv = axDHI.quiver(overGrid.x, overGrid.y, overGrid_U_Plot, overGrid_V_Plot, scale=scale,
color='k', headlength=3.5, headwidth=3.5, headaxislength=3.5, width=0.0015, minlength=amin) # 200
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
# axes.title.set_text(runLog['Short Description'][m]) # axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large' # axes.titlesize = 'x-large'
fig.suptitle(runLog['Short Description'][m], fontsize=18) # fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]') axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]') axes.set_ylabel('Northing [m]')
plt.show() plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' + # %% Animated map
runLog['Short Description'][m] + '_RevC_Current.png', bbox_inches='tight', dpi=300) plt.rcParams['animation.ffmpeg_path'] = 'C:/Users/arey/Local/ffmpeg-2022-02-14-git-59c647bcf3-full_build/bin/ffmpeg.exe'
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
# Set model to plot
modelPlot = [15, 16, 17]
m = 16
# m = 11
vmax = 34.0
vmin = 33.7
cmap = 'inferno'
metadata = dict(title='Movie Test', artist='Matplotlib',
comment='Movie support')
writer = animation.FFMpegWriter(fps=2, metadata=metadata, codec='h264')
fig, axes = plt.subplots(figsize=(8, 8))
writer.setup(fig, 'C:/Users/arey/files/Projects/Mustique/' +
runLog['Run'][m][0:-20] + 'Intake.mp4')
# Animation function
for i in np.arange(1, 384, 1): #384
if m == 8 or m == 17:
disPlot = 7
elif m == 11 or m == 15:
disPlot = 13
elif m == 14 or m == 16:
disPlot = 8
# fig, axes = plt.subplots(figsize=(8, 8))
axes.cla()
# Plot salinity
# Select salinity data at last time step
plotDat = ds_list[m]['Salinity'][i, :]
# Identify nan values
plot_nan = np.isnan(plotDat)
# Add additional nan values to show bed
plot_nan = plot_nan | (plotDat<33.725)
# Plot all selected values at a given later and not nan
axDHI = dfs_list[m].plot(plotDat[(dfs_list[m].layer_ids == 0) & (~plot_nan)], plot_type='contourf',
show_mesh=False, cmap='inferno', ax=axes, add_colorbar=False,
vmin=vmin, vmax=vmax, label='Salinity (PSU)', levels=24,
elements=dfs_list[m].element_ids[(dfs_list[m].layer_ids == 0) & (~plot_nan)])
axes.scatter(readPoints[disPlot, 0], readPoints[disPlot, 1], marker='^', color='r', s=20)
# axes.set_xlim(left=696680.7, right=698252.7)
# axes.set_ylim(bottom=1425547.5, top=1426681.8)
axes.set_xlim(left=696508.3, right=698252.7)
axes.set_ylim(bottom=1425267.0, top=1426681.8)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32620')
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
axes.title.set_text(str(ds_list[m].time[i]))
if i == 1:
fig.subplots_adjust(right=0.8)
cmap = mpl.cm.inferno
norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
cax = plt.axes([0.82, 0.25, 0.04, 0.5])
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
norm=norm,
orientation='vertical')
cb1.set_label('Salinity (PSU)')
writer.grab_frame()
# plt.show()
print(i)
writer.finish()
# %% Plot Bathymetry # %% Plot Bathymetry
fig, axes = plt.subplots(figsize=(8, 8)) fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing # Convert to feet and ignore missing
bed_plot = (bed_list[m] * 3.28084) - 578.007
bed_nan = np.isnan(bed_plot)
axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=False, cmap='magma_r', ax=axes, levels=9, axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=False, cmap='magma_r', ax=axes, levels=9,
vmin=-80, vmax=0, label='Bed Elevation (ft)', vmin=-80, vmax=0, label='Bed Elevation (ft)',
elements=dfs_list[m].element_ids[~bed_nan]) elements=dfs_list[m].element_ids[~bed_nan])
@ -450,7 +485,7 @@ axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=Fal
axes.set_xlim(left=427800, right=431000) axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762500) axes.set_ylim(bottom=4758000, top=4762500)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616') ctx.add_basemap(axes, source=mapbox, crs='EPSG:32620')
fig.suptitle('Bed Elevation', fontsize=18) fig.suptitle('Bed Elevation', fontsize=18)
axes.set_xlabel('Easting [m]') axes.set_xlabel('Easting [m]')
@ -458,9 +493,9 @@ axes.set_ylabel('Northing [m]')
plt.show() plt.show()
fig.savefig( # fig.savefig(
'//srv-mad3/Projects/13632.101 South Shore Breakwater/10_Reports&Pres/13632.101.R3 Ph I BOD/Support files/' + # '//srv-mad3/Projects/13632.101 South Shore Breakwater/10_Reports&Pres/13632.101.R3 Ph I BOD/Support files/' +
'Bed_Elevation.png', bbox_inches='tight', dpi=300) # 'Bed_Elevation.png', bbox_inches='tight', dpi=300)
# %% Read time series # %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1) MIKEds_list = [None] * (max(modelPlot) + 1)

View File

@ -31,6 +31,8 @@ from metpy.plots import ctables
from siphon.catalog import TDSCatalog from siphon.catalog import TDSCatalog
import gsw as gsw
#%% Helper function for finding proper time variable #%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'): def find_time_var(var, time_basename='time'):
@ -42,62 +44,25 @@ def find_time_var(var, time_basename='time'):
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
importPaths = ['C:/Users/arey/files/Projects/West Coast/Pre_Construction/Great House/', importPaths = ['C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Great House',
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Greensleeves/', 'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Greensleeves',
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Old Queens Fort/', 'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Old Queens Fort']
'C:/Users/arey/files/Projects/West Coast/Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Great House/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Old Queens Fort/']
siteNames = ['Great House', siteNames = ['Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves', 'Greensleeves',
'Old Queens Fort'] 'Old Queens Fort']
timeLabels= ['Before Construction', timeLabels= ['February',
'Before Construction', 'February',
'Before Construction', 'February']
'During Construction',
'During Construction',
'During Construction',
'After Construction',
'After Construction',
'After Construction',
'Monitoring',
'Monitoring',
'Monitoring']
wave_bts_file = [ wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
None, None,
None, None,
None] None]
for s in range(0, 3):
for s in range(9,13):
## Define master import path ## Define master import path
importPath = importPaths[s] importPath = importPaths[s]
siteName = siteNames[s] siteName = siteNames[s]
@ -105,49 +70,43 @@ for s in range(9,13):
importFiles = os.listdir(importPath) importFiles = os.listdir(importPath)
# Initialize import variables # Initialize import variables
RBR_File = None OBS_File = None
JFE_File = None
GPS_File = None GPS_File = None
for i in range(0, len(importFiles)): for i in range(0, len(importFiles)):
if '.xlsx' in importFiles[i] and 'Summary' not in importFiles[i]: if '.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
RBR_File = importFiles[i] OBS_File = importFiles[i]
elif '_A.csv' in importFiles[i] and 'Summary' not in importFiles[i]: elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]:
JFE_File = importFiles[i]
elif '.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i] GPS_File = importFiles[i]
#%% RBR Import Data #%% Obs Import Data
if RBR_File is not None: if OBS_File is not None:
RBR_Obs = pd.read_excel(importPath + RBR_File, Obs_dat = pd.read_csv(importPath + '/' + OBS_File, skiprows=0, header=0)
sheet_name='Data', skiprows=0, header=1)
#%% JFE Import Data # Drop rows with metadata
if JFE_File is not None: Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
JFE_Obs = pd.read_csv(importPath + JFE_File, skiprows=30)
# Set Time Zone
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
# Set Index
Obs_dat.set_index('DateTime', inplace=True)
#%% GPS Import Data #%% GPS Import Data
if GPS_File is not None: if GPS_File is not None:
GPS = pd.read_csv(importPath + GPS_File, GPS = pd.read_excel(importPath + '/' + GPS_File, header=0)
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe #convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326")) GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str)) GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC')
GPS_gdf.set_index('DateTime', inplace=True) GPS_gdf.set_index('DateTime', inplace=True)
# Sort by time
GPS_gdf.sort_index(inplace=True)
# Convert to UTM # Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621") GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
else:
# Synthesize GPS data for great house
GPS_gdf = gp.read_file('C:/Users/arey/files/Projects/West Coast/GreatHousePath_R3.shp')
GPS_gdf['DateTime'] = pd.date_range(pd.to_datetime(RBR_Obs['Time'].iloc[0]),
pd.to_datetime(RBR_Obs['Time'].iloc[-1]),
periods=len(GPS_gdf)).values
GPS_gdf.set_index('DateTime', inplace=True)
#%% Read in site shapefile #%% Read in site shapefile
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp') siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621") siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
@ -155,27 +114,13 @@ for s in range(9,13):
#%% Merge GPS to RBR #%% Merge GPS to RBR
# Process RBR into datetime # Process RBR into datetime
if RBR_File is not None: if OBS_File is not None:
RBR_Obs['DateTime'] = pd.to_datetime(RBR_Obs['Time'])
RBR_Obs.set_index('DateTime', inplace=True)
# Merge with GPS as dataframe # Merge with GPS as dataframe
RBR_Obs_geo = pd.merge_asof(RBR_Obs, GPS_gdf, Obs_geo = pd.merge_asof(Obs_dat, GPS_gdf,
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s')) left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
RBR_Obs_geo = gp.GeoDataFrame(RBR_Obs_geo, geometry=RBR_Obs_geo.geometry, crs="EPSG:32621") Obs_geo = gp.GeoDataFrame(Obs_geo, geometry=Obs_geo.geometry, crs="EPSG:32621")
#%% Merge GPS to JFE
if JFE_File is not None:
# Process JFE into datetime
JFE_Obs['DateTime'] = pd.to_datetime(JFE_Obs['Date'])
JFE_Obs.set_index('DateTime', inplace=True)
# Merge with GPS as dataframe
JFE_Obs_geo = pd.merge_asof(JFE_Obs, GPS_gdf, left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('60s'))
JFE_Obs_geo = gp.GeoDataFrame(JFE_Obs_geo, geometry=JFE_Obs_geo.geometry, crs="EPSG:32621")
#%% Find and setup key plotting details #%% Find and setup key plotting details
# Shaded Water # Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw' mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
# Sat water # Sat water
@ -186,44 +131,31 @@ for s in range(9,13):
axYlim = (1464769.2243017585, 1465135.2219089477) axYlim = (1464769.2243017585, 1465135.2219089477)
GFS_Lon = -59.6441 GFS_Lon = -59.6441
GFS_Lat = 13.2372 GFS_Lat = 13.2372
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[2, 1]) Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[2, 1])
elif siteName == 'Greensleeves': elif siteName == 'Greensleeves':
axXlim = (213269.99233348924, 213648.1643157148) axXlim = (213269.99233348924, 213648.1643157148)
# axYlim = (1463378.1020314451, 1463843.5442048472) # axYlim = (1463378.1020314451, 1463843.5442048472)
axYlim = (1463378.1020314451, 1463950.5442048472) axYlim = (1463378.1020314451, 1463950.5442048472)
GFS_Lon = -59.6428 GFS_Lon = -59.6428
GFS_Lat = 13.2289 GFS_Lat = 13.2289
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[1, 1]) Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[1, 1])
elif siteName == 'Old Queens Fort': elif siteName == 'Old Queens Fort':
axXlim = (213368.59866770002, 213745.6997016811) axXlim = (213368.59866770002, 213745.6997016811)
axYlim = (1460192.707288096, 1460672.371780407) axYlim = (1460192.707288096, 1460672.371780407)
GFS_Lon = -59.6419 GFS_Lon = -59.6419
GFS_Lat = 13.1960 GFS_Lat = 13.1960
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[0, 1]) Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[0, 1])
# Set min and max times using conductivity # Set min and max times using conductivity
# if JFE_File is None: if Obs_geo['inArea'].any():
if RBR_Obs_geo['inArea'].any():
# First and last times from area in shapefile # First and last times from area in shapefile
minTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[0, 0] minTime = Obs_geo[Obs_geo['inArea']==True].index[0]
maxTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[-1, 0] maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
else: else:
# First and last times if no GPS data # First and last times if no GPS data
minTime = RBR_Obs_geo.iloc[20, 0] minTime = Obs_geo.iloc[20, 0]
maxTime = RBR_Obs_geo.iloc[-20, 0] maxTime = Obs_geo.iloc[-20, 0]
# else:
# minTime = (RBR_Obs['Conductivity ']>5).idxmax()
# minTime = minTime + timedelta(seconds=30)
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
# maxTime = maxTime - timedelta(seconds=30)
# obsPeriod = maxTime-minTime
#
# if (obsPeriod.seconds<180) | (maxTime<minTime):
# minTime = ((RBR_Obs['Conductivity ']>5) & (RBR_Obs['Time']>(minTime+timedelta(seconds=180)))).idxmax()
# minTime = minTime + timedelta(seconds=30)
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
# maxTime = maxTime - timedelta(seconds=30)
metDate = minTime - timedelta( metDate = minTime - timedelta(
hours = minTime.hour % 6, hours = minTime.hour % 6,
@ -335,38 +267,35 @@ for s in range(9,13):
met_tp = -999 met_tp = -999
met_mwd = -999 met_mwd = -999
#%% Add PSU and depth units
Obs_geo['PSU'] = gsw.conversions.SP_from_C(Obs_geo['CH3:Conductivity(mS/cm)'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values)
Obs_geo['Depth'] = (Obs_geo['CH0:Pressure(dbar)'].astype(float)*10000)/(gsw.rho(Obs_geo['PSU'].values,
Obs_geo['CH1:Temperature(degC)'].values,
Obs_geo['CH0:Pressure(dbar)'].values) * 9.81)
#%% Plot time series for Geo data #%% Plot time series for Geo data
fontprops = fm.FontProperties(size=25) fontprops = fm.FontProperties(size=25)
if JFE_File is None:
fig, axesTMP = plt.subplots(nrows=1, ncols=1, figsize=(19, 5), constrained_layout=True)
RBRparam = ['Turbidity '] fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(19, 25), constrained_layout=True)
RBRparamName = ['Turbidity [NTU]'] dataTable = np.zeros([14, 4])
RBRparmCmap = [cmo.turbid] roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1]
RBRparamMin = [0.0]
RBRparamMax = [60.0]
dataTable = np.zeros([9, 4]) params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
roundIDX = [1, 1, 0, 1, 1, 1, 1, 1] 'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
axes = [] paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]',
axes.append(axesTMP) 'Turbidity [FTU]', 'Chl-a [ppb]']
else:
fig, axes = plt.subplots(nrows=7, ncols=1, figsize=(19, 25), constrained_layout=True)
dataTable = np.zeros([15, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
RBRparam = ['Depth ', 'Salinity ', 'Dissolved O₂ saturation ', 'Temperature '] parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
RBRparamName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]'] # paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
RBRparmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal] # paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
RBRparamMin = [0.0, 34.0, 32.5, 29.0] paramMin = [0.0, 34.0, 100, 26.0, 0, 0]
RBRparamMax = [1.0, 36.0, 34.0, 31.0] paramMax = [1.0, 36.0, 130, 29.0, 50.0, 12000]
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]', 'Chl-a[ug/l]']
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]']
JFEparamMin = [0.0, 0.0, 0.0]
JFEparamMax = [20.0, 1.0, 1.0]
fig.patch.set_facecolor('white') fig.patch.set_facecolor('white')
@ -384,119 +313,58 @@ for s in range(9,13):
ilocs_max = [] ilocs_max = []
ilocs_max_pts = [] ilocs_max_pts = []
RBR_mask = [] OBS_mask = []
JFE_mask = []
for paramIDX, param in enumerate(RBRparam): for paramIDX, param in enumerate(params):
RBR_Obs_geo.loc[minTime:maxTime, param].plot( Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
# Create mask for RBR data based on time and parameter minimum # Create mask for RBR data based on time and parameter minimum
RBR_mask.append(((RBR_Obs_geo.index>minTime) & OBS_mask.append(((Obs_geo.index>minTime) &
(RBR_Obs_geo.index<maxTime) & (Obs_geo.index<maxTime) &
(RBR_Obs_geo[param]>RBRparamMin[paramIDX]))) (Obs_geo[param]>paramMin[paramIDX])))
RBR_smoothed = RBR_Obs_geo.loc[RBR_mask[paramIDX], param].rolling( OBS_smoothed = Obs_geo.loc[OBS_mask[paramIDX], param].rolling(
60, win_type='nuttall',center=True).mean() 12, win_type='nuttall',center=True).mean()
RBR_smoothed.plot( OBS_smoothed.plot(
ax=axes[paramIDX], label='1 Minute Average', color='black', ax=axes[paramIDX], label='1 Minute Average', color='black',
linewidth=3) linewidth=3)
# Find the local maximums for Turbidity # Find the local maximums for Turbidity
if param == 'Turbidity ': if param == 'CH6:Turbidity(NTU)':
ilocs_max.append(argrelextrema(RBR_smoothed.values, ilocs_max.append(argrelextrema(OBS_smoothed.values,
np.greater_equal, order=40, mode='wrap')[0]) np.greater_equal, order=6, mode='wrap')[0])
# Add start and end points? # Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10) # ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(RBR_smoothed.values)-10 # ilocs_max[-1] = len(OBS_smoothed.values)
ilocs_max_pts.append(RBR_smoothed.iloc[ilocs_max[paramIDX]].index.values)
ilocs_max_pts.append(OBS_smoothed.iloc[ilocs_max[paramIDX]].index.values)
# Add labels if GPS data is available # Add labels if GPS data is available
if GPS_File is not None: if GPS_File is not None:
axes[paramIDX].scatter(RBR_smoothed.iloc[ # axes[paramIDX].scatter(OBS_smoothed.iloc[
ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75, # ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
color='blue') # color='blue')
for a in range(0, len(ilocs_max[paramIDX])): for a in range(0, len(ilocs_max[paramIDX])):
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 30), fontsize=30) axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 12), fontsize=30)
else: else:
ilocs_max.append(None) ilocs_max.append(None)
ilocs_max_pts.append(None) ilocs_max_pts.append(None)
# dataTable[paramIDX+7, 0] = RBR_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True) dataTable[paramIDX+7, 0] = OBS_smoothed.mean(skipna=True)
# dataTable[paramIDX+7, 1] = RBR_Obs_geo.loc[minTime:maxTime, param].std(skipna=True) dataTable[paramIDX+7, 1] = OBS_smoothed.std(skipna=True)
# dataTable[paramIDX+7, 2] = max(RBR_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0) dataTable[paramIDX+7, 2] = max(OBS_smoothed.min(skipna=True), 0)
# dataTable[paramIDX+7, 3] = RBR_Obs_geo.loc[minTime:maxTime, param].max(skipna=True) dataTable[paramIDX+7, 3] = OBS_smoothed.max(skipna=True)
dataTable[paramIDX+7, 0] = RBR_smoothed.mean(skipna=True)
dataTable[paramIDX+7, 1] = RBR_smoothed.std(skipna=True)
dataTable[paramIDX+7, 2] = max(RBR_smoothed.min(skipna=True), 0)
dataTable[paramIDX+7, 3] = RBR_smoothed.max(skipna=True)
axes[paramIDX].set_ylabel(RBRparamName[paramIDX]) axes[paramIDX].set_ylabel(paramName[paramIDX])
axes[paramIDX].set_title('RBR: ' + RBRparamName[paramIDX]) axes[paramIDX].set_title(paramName[paramIDX])
axes[paramIDX].set_xlabel('') axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(RBRparamMin[paramIDX], RBRparamMax[paramIDX]) axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
axes[paramIDX].legend(loc='upper right') axes[paramIDX].legend(loc='upper right')
if JFE_File is not None: # axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
for paramIDX, param in enumerate(JFEparam):
JFE_Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX+4], label='15 Second Observations', color='lightgrey')
JFE_mask.append(((JFE_Obs_geo.index > minTime) &
(JFE_Obs_geo.index < maxTime) &
(JFE_Obs_geo[param] > JFEparamMin[paramIDX])))
JFE_smoothed = JFE_Obs_geo.loc[JFE_mask[paramIDX], param].rolling(
20, win_type='nuttall', center=True).mean()
JFE_smoothed.plot(
ax=axes[paramIDX+4], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'Turb. -M[FTU]':
ilocs_max.append(argrelextrema(JFE_smoothed.values,
np.greater_equal, order=60, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(RBR_smoothed.values)-10
ilocs_max_pts.append(JFE_smoothed.iloc[ilocs_max[paramIDX+4]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
axes[paramIDX+4].scatter(JFE_smoothed.iloc[
ilocs_max[paramIDX+4]].index, np.ones(len(ilocs_max[paramIDX+4])) * 10, 75,
color='blue')
for a in range(0, len(ilocs_max[paramIDX+4])):
axes[paramIDX+4].annotate(str(a + 1), (ilocs_max_pts[paramIDX+4][a], 10), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
# dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
# dataTable[paramIDX+4+7, 1] = JFE_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
# dataTable[paramIDX+4+7, 2] = max(JFE_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
# dataTable[paramIDX+4+7, 3] = JFE_Obs_geo.loc[minTime:maxTime, param].max(skipna=True) dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
dataTable[paramIDX+4+7, 0] = JFE_smoothed.mean(skipna=True)
dataTable[paramIDX+4+7, 1] = JFE_smoothed.std(skipna=True)
dataTable[paramIDX+4+7, 2] = max(JFE_smoothed.min(skipna=True), 0)
dataTable[paramIDX+4+7, 3] = JFE_smoothed.max(skipna=True)
axes[paramIDX+4].set_ylabel(JFEparamName[paramIDX])
axes[paramIDX+4].set_title('JFE: ' + JFEparamName[paramIDX])
axes[paramIDX+4].set_xlabel('')
axes[paramIDX+4].set_ylim(JFEparamMin[paramIDX], JFEparamMax[paramIDX])
axes[paramIDX+4].legend(loc='upper right')
axes[paramIDX+4].set_xlabel(minTime.strftime('%B %#d, %Y'))
else:
axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
# Formate Data Table # Formate Data Table
dataTableFormat_mean = [] dataTableFormat_mean = []
@ -517,33 +385,29 @@ for s in range(9,13):
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]), dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
np.array(['Mean', 'Min / Max'])] np.array(['Mean', 'Min / Max'])]
# Change the row indexes # Change the row indexes
if JFE_File is not None: dfOut.iloc[13, 0] = minTime.strftime('%B %#d, %Y')
dfOut.iloc[14, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]', rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]', 'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]', 'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
'Dissolved O₂ saturation [%]', 'Temperature [degC]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]', 'Observation Date'] 'Turbidity [FTU]', 'Chl-a [ppb]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
else:
dfOut.iloc[8, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]',
'Turbidity [NTU]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
fig.suptitle(siteName + ', ' + minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30) fig.suptitle(siteName + ', ' + minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show() plt.show()
dfOut.to_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx') dfOut.to_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
dfOutFormat.to_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx') dfOutFormat.to_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
dfOut.to_csv(importPath + '/Summary_Stats_' + siteName + '.csv') dfOut.to_csv(importPath + '/Summary_Stats_' + siteName + '.csv')
if not os.path.exists(importPath + '/Figures'):
os.mkdir(importPath + '/Figures')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf', fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
bbox_inches='tight') bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.png', fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
@ -551,12 +415,6 @@ for s in range(9,13):
#%% Plot Maps #%% Plot Maps
if JFE_File is None:
# Only Turbidity Data
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
ax = []
ax.append(axes)
else:
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True) fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
ax = axes.flat ax = axes.flat
@ -567,30 +425,21 @@ for s in range(9,13):
x, y, arrow_length = 0.95, 0.93, 0.20 x, y, arrow_length = 0.95, 0.93, 0.20
plt.rcParams.update({'font.size': 22}) plt.rcParams.update({'font.size': 22})
axXlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.x.min()-100, axXlimTT = (Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
RBR_Obs_geo.loc[minTime:maxTime].geometry.x.max()+100) Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
axYlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.y.min()-100, axYlimTT = (Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
RBR_Obs_geo.loc[minTime:maxTime].geometry.y.max()+100) Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
plt.setp(axes, xlim=axXlim, ylim=axYlim) plt.setp(axes, xlim=axXlim, ylim=axYlim)
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT) # plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
# Plot the RBR observations # Plot the RBR observations
# Salinity # Salinity
for paramIDX, param in enumerate(RBRparam): for paramIDX, param in enumerate(params):
if RBR_File is not None: Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=paramMin[paramIDX], vmax=paramMax[paramIDX],
# Draw thick black line to show approx path legend=True, legend_kwds={'label': paramName[paramIDX]},
if GPS_File is None: cmap=parmCmap[paramIDX], markersize=20)
ax[paramIDX].scatter(RBR_Obs_geo.loc[minTime:maxTime].geometry.x,
RBR_Obs_geo.loc[minTime:maxTime].geometry.y,
150, marker='o', color='black', label='Approximate Path')
plt.legend(loc='upper left')
RBR_Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=RBRparamMin[paramIDX], vmax=RBRparamMax[paramIDX],
legend=True, legend_kwds={'label': RBRparamName[paramIDX]},
cmap=RBRparmCmap[paramIDX], markersize=20)
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621') ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
@ -601,14 +450,15 @@ for s in range(9,13):
# RBR_Obs_geo.loc[RBR_mask].iloc[ # RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.y, 75, marker='o', color='black') # ilocs_max, :].geometry.y, 75, marker='o', color='black')
if (not RBR_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turbidity '): if (not Obs_geo.geometry.isnull().all()) &\
(GPS_File is not None) & (param == 'CH6:Turbidity(NTU)'):
for a in range(0, len(ilocs_max[paramIDX])): for a in range(0, len(ilocs_max[paramIDX])):
ax[paramIDX].annotate(str(a + 1), (RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[ ax[paramIDX].annotate(str(a + 1), (Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.x, ilocs_max[paramIDX][a], :].geometry.x,
RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[ Obs_geo.loc[OBS_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30) ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
ax[paramIDX].set_title(RBRparamName[paramIDX]) ax[paramIDX].set_title(paramName[paramIDX])
# ax[paramIDX].set_ylabel('UTM 21N [m]') # ax[paramIDX].set_ylabel('UTM 21N [m]')
# ax[paramIDX].set_xlabel('UTM 21N [m]') # ax[paramIDX].set_xlabel('UTM 21N [m]')
ax[paramIDX].locator_params(axis='y', nbins=3) ax[paramIDX].locator_params(axis='y', nbins=3)
@ -626,49 +476,7 @@ for s in range(9,13):
ha='center', va='center', fontsize=35, ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes) xycoords=ax[paramIDX].transAxes)
# Plot Plot JFE Points
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]']
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]']
JFEparamCmp = [cmo.turbid, cmo.algae]
JFEparamMin = [0.0, 0.0]
JFEparamMax = [10.0, 1.0]
if JFE_File is not None:
for paramIDX, param in enumerate(JFEparam):
if JFE_File is not None:
JFE_Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX+4], vmin=JFEparamMin[paramIDX], vmax=JFEparamMax[paramIDX],
legend=True, legend_kwds={'label': JFEparamName[paramIDX]},
cmap=JFEparamCmp[paramIDX], markersize=20) # Note the space in the col name
ctx.add_basemap(ax[paramIDX+4], source=mapbox, crs='EPSG:32621')
# Add time labels
if (not JFE_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turb. -M[FTU]'):
for a in range(0, len(ilocs_max[paramIDX+4])):
ax[paramIDX+4].annotate(str(a + 1), (JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
ilocs_max[paramIDX+4][a], :].geometry.x,
JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
ilocs_max[paramIDX+4][a], :].geometry.y), fontsize=30)
ax[paramIDX+4].set_title(JFEparamName[paramIDX])
# ax[paramIDX+4].set_ylabel('UTM 21N [m]')
# ax[paramIDX+4].set_xlabel('UTM 21N [m]')
ax[paramIDX+4].locator_params(axis='y', nbins=3)
ax[paramIDX+4].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX+4].get_xaxis().set_ticks([])
ax[paramIDX+4].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX+4].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX+4].add_artist(scalebar)
ax[paramIDX+4].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=25,
xycoords=ax[paramIDX+4].transAxes)
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show() plt.show()
if not os.path.exists(importPath + '/Figures'): if not os.path.exists(importPath + '/Figures'):
os.mkdir(importPath + '/Figures') os.mkdir(importPath + '/Figures')

View File

@ -0,0 +1,102 @@
## Read, process, and merge bathymetry data
# Author: AJMR
# Date: 2022-02-10
#%% Import
import pandas as pd
import geopandas as gp
from mikeio import Mesh
import math
from scipy.spatial import cKDTree
from shapely.geometry import Point
import numpy as np
import cartopy.crs as ccrs
import contextily as ctx
import matplotlib.pyplot as plt
#%% Functions
def ckdnearest(gdA, gdB):
nA = np.array(list(gdA.geometry.apply(lambda x: (x.x, x.y))))
nB = np.array(list(gdB.geometry.apply(lambda x: (x.x, x.y))))
btree = cKDTree(nB)
dist, idx = btree.query(nA, k=1)
gdB_nearest = gdB.iloc[idx].drop(columns="geometry").reset_index(drop=True)
gdf = pd.concat(
[
gdA.reset_index(drop=True),
gdB_nearest,
pd.Series(dist, name='dist')
],
axis=1)
return gdf
#%% Setup Paths
dataPath = "C:/Users/arey/files/Projects/SouthShore/Bathy/"
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
#%% Read in Source 1
S1 = pd.read_csv(dataPath + '211102_05 South MLW Breakwall inner Breakwall SBES and Median MBES NAD83 NAVD88 1x1.xyz',
delimiter='\s+', header=None, names=['x', 'y', 'z'])
#%% Read in Source 2
S2 = pd.read_csv(dataPath + '211102_05 South MLW Breakwall sturcture Median MBES NAD83 NAVD88 1x1.xyz',
delimiter='\s+', header=None, names=['x', 'y', 'z'])
#%% Read in Source 3
S3 = pd.read_csv(dataPath + '211102_05 South MLW Breakwall outer Breakwall Median MBES NAD83 NAVD88 1x1.xyz',
delimiter='\s+', header=None, names=['x', 'y', 'z'])
#%% Merge Sources
bathy_merged = pd.concat([S1, S2, S3])
# Convert to GeoPandas
bathy_gdf = gp.GeoDataFrame(bathy_merged, crs='EPSG:2289',
geometry=gp.points_from_xy(bathy_merged.x, bathy_merged.y))
bathy_gdf.to_crs('EPSG:32616', inplace=True)
bathy_gdf.loc[:, 'z'] = bathy_gdf.loc[:, 'z'] * 0.3048
del S1
del S2
del S3
del bathy_merged
bathy_gdf.drop(columns=['x', 'y'], inplace=True)
#%% Read in Previous Bathymetry
previousBath = gp.read_file("//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/SSB_Mesh_v40_Nodes.shp")
#%% Read in new mesh
currenMesh = Mesh("//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/southshore_mesh2_v3InnerBounds.mesh")
#%% Make Geodataframe of mesh
currenMesh_gdf = gp.GeoDataFrame(currenMesh.node_coordinates, crs='EPSG:32616',
geometry=gp.points_from_xy(currenMesh.node_coordinates[:, 0],
currenMesh.node_coordinates[:, 1]))
#%% Find Nearest Points to All
Bathy_nearest = ckdnearest(currenMesh_gdf, bathy_gdf)
# Save to file
Bathy_nearest.rename(columns={0: 'x'}, inplace=True)
Bathy_nearest.rename(columns={1: 'y'}, inplace=True)
Bathy_nearest.rename(columns={2: 'z_old'}, inplace=True)
Bathy_nearest.to_parquet("C:/Users/arey/Files/Projects/SouthShoreNearest.parquet")
Bathy_nearest.to_file("C:/Users/arey/Files/Projects/SouthShoreNearest.shp")
#%% Update Mesh
newMesh = currenMesh
newMesh.node_coordinates[Bathy_nearest.dist<100, 2] = Bathy_nearest.loc[Bathy_nearest.dist<100, 'z']
newMesh.write("//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/southshore_mesh2_v3InnerBounds_Bathy.mesh")

View File

@ -37,8 +37,31 @@ breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf) breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True) breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Modify Breakwater Crest to new elevation
# From email
breakSegments = np.zeros((4, 3))
breakSegments[0, :] = [2534702.7417, 372246.1099, 0] # 6+85
breakSegments[1, :] = [2535059.6902, 371628.1622, 180] # 14+00
breakSegments[2, :] = [2535910.4820, 370655.9834, 181.1] # 27+00
breakSegments[3, :] = [2536821.5682, 369722.4850, 180.5] # 40+05
breakSegments_gdf = gp.GeoDataFrame(breakSegments,
geometry=gp.points_from_xy(breakSegments[:, 0], breakSegments[:, 1]),
crs='EPSG:8796')
breakSegments_gdf.to_crs('EPSG:32616', inplace=True)
for idx in range(0, breakwaterPTS_Crest.shape[0]):
# Check where point is (based on x coordinate) and assign new crest elevation
if breakSegments_gdf.geometry.x[0] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[1]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[1, 2]
elif breakSegments_gdf.geometry.x[1] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[2]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[2, 2]
elif breakSegments_gdf.geometry.x[2] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[3]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[3, 2]
# %% Read Model Results # %% Read Model Results
modelPlot = range(12, 20) modelPlot = range(31, 32)
for m in modelPlot: for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' + dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
@ -94,32 +117,41 @@ for m in modelPlot:
(transmit_paramA > transmit_paramB)) (transmit_paramA > transmit_paramB))
# Updated approach # Updated approach
breakwaterPTS_times['transmitted'][breakMask_A] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma breakwaterPTS_times.loc[breakMask_A, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma
breakwaterPTS_times['transmitted'][breakMask_B] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\ breakwaterPTS_times.loc[breakMask_B, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\
transmit_paramA transmit_paramA
breakwaterPTS_times['transmitted'][breakMask_C] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \ breakwaterPTS_times.loc[breakMask_C, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \
transmit_paramB[breakMask_C] transmit_paramB[breakMask_C]
# %% Merge with breakwater transformed waves # %% Merge with breakwater transformed waves
breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max() breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max()
# %% Read Inner Boundary Points North # %% Read Inner Boundary Points North
# innerBoundNorth = pd.read_csv(
# '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt',
# delimiter='\s+', header=None, names=['x', 'y'])
innerBoundNorth = pd.read_csv( innerBoundNorth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt', '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mesh2_North.txt',
delimiter='\s+', header=None, names=['x', 'y']) delimiter='\s+', header=None, names=['node', 'val', 'x', 'y'])
innerBoundNorth.sort_values(by=['x'], inplace=True) innerBoundNorth.sort_values(by=['x'], inplace=True)
innerBoundNorth_gdf = gp.GeoDataFrame(innerBoundNorth, crs='EPSG:32616', innerBoundNorth_gdf = gp.GeoDataFrame(innerBoundNorth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundNorth.x, innerBoundNorth.y)) geometry=gp.points_from_xy(innerBoundNorth.x, innerBoundNorth.y))
innerBoundNorth_gdf = innerBoundNorth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :]) innerBoundNorth_gdf = innerBoundNorth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points Mid # %% Read Inner Boundary Points Mid
# innerBoundMid = pd.read_csv(
# '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mid.txt',
# delimiter='\s+', header=None, names=['x', 'y'])
innerBoundMid = pd.read_csv( innerBoundMid = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mid.txt', '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mesh2_Mid.txt',
delimiter='\s+', header=None, names=['x', 'y']) delimiter='\s+', header=None, names=['node', 'val', 'x', 'y'])
innerBoundMid.sort_values(by=['x'], inplace=True) innerBoundMid.sort_values(by=['x'], inplace=True)
innerBoundMid_gdf = gp.GeoDataFrame(innerBoundMid, crs='EPSG:32616', innerBoundMid_gdf = gp.GeoDataFrame(innerBoundMid, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundMid.x, innerBoundMid.y)) geometry=gp.points_from_xy(innerBoundMid.x, innerBoundMid.y))
innerBoundMid_gdf = innerBoundMid_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :]) innerBoundMid_gdf = innerBoundMid_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points North
# %% Read Inner Boundary Points South
innerBoundSouth = pd.read_csv( innerBoundSouth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_South.txt', '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_South.txt',
delimiter='\s+', header=None, names=['x', 'y']) delimiter='\s+', header=None, names=['x', 'y'])

View File

@ -37,13 +37,18 @@ breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf) breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True) breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# Add in Default width and slope
breakwaterPTS_Crest['Width'] = 5.0
breakwaterPTS_Crest['Slope'] = 2.6
# %% Modify Breakwater Crest to new elevation # %% Modify Breakwater Crest to new elevation
# From email # From email
breakSegments = np.zeros((4, 3)) # X, Y, Crest, Slope, Cotangent
breakSegments[0, :] = [2534702.7417, 372246.1099, 0] # 6+85 breakSegments = np.zeros((4, 5))
breakSegments[1, :] = [2535059.6902, 371628.1622, 180] # 14+00 breakSegments[0, :] = [2534702.7417, 372246.1099, 0, 0, 0] # 6+85
breakSegments[2, :] = [2535910.4820, 370655.9834, 181.1] # 27+00 breakSegments[1, :] = [2535059.6902, 371628.1622, 180.5, 5.5, 2.6] # 14+00
breakSegments[3, :] = [2536821.5682, 369722.4850, 180.5] # 40+05 breakSegments[2, :] = [2535910.4820, 370655.9834, 181.1, 6.7, 2.6] # 27+00
breakSegments[3, :] = [2536821.5682, 369722.4850, 180.5, 5.5, 2.6] # 40+05
breakSegments_gdf = gp.GeoDataFrame(breakSegments, breakSegments_gdf = gp.GeoDataFrame(breakSegments,
geometry=gp.points_from_xy(breakSegments[:, 0], breakSegments[:, 1]), geometry=gp.points_from_xy(breakSegments[:, 0], breakSegments[:, 1]),
@ -55,13 +60,19 @@ for idx in range(0, breakwaterPTS_Crest.shape[0]):
# Check where point is (based on x coordinate) and assign new crest elevation # Check where point is (based on x coordinate) and assign new crest elevation
if breakSegments_gdf.geometry.x[0] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[1]: if breakSegments_gdf.geometry.x[0] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[1]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[1, 2] breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[1, 2]
breakwaterPTS_Crest.loc[idx, 'Width'] = breakSegments[1, 3]
breakwaterPTS_Crest.loc[idx, 'Slope'] = breakSegments[1, 4]
elif breakSegments_gdf.geometry.x[1] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[2]: elif breakSegments_gdf.geometry.x[1] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[2]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[2, 2] breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[2, 2]
breakwaterPTS_Crest.loc[idx, 'Width'] = breakSegments[2, 3]
breakwaterPTS_Crest.loc[idx, 'Slope'] = breakSegments[2, 4]
elif breakSegments_gdf.geometry.x[2] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[3]: elif breakSegments_gdf.geometry.x[2] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[3]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[3, 2] breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[3, 2]
breakwaterPTS_Crest.loc[idx, 'Width'] = breakSegments[3, 3]
breakwaterPTS_Crest.loc[idx, 'Slope'] = breakSegments[3, 4]
# %% Read Model Results # %% Read Model Results
modelPlot = range(31, 32) modelPlot = range(33, 40)
for m in modelPlot: for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' + dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
@ -99,37 +110,53 @@ for m in modelPlot:
# %% Conversion # %% Conversion
breakwaterPTS_times['transmitted'] = np.full([breakwaterPTS_times.shape[0], 1], np.nan) breakwaterPTS_times['transmitted'] = np.full([breakwaterPTS_times.shape[0], 1], np.nan)
gamma = 1.2
transmit_paramA = 0.81 * gamma
transmit_paramB = 0.45 * gamma - 0.3 * gamma * \
(breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height']
# Identify different overtopping regimes # Identify different overtopping regimes
breakMask_A = (breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \ breakMask_A = breakwaterPTS_times['Sign. Wave Height'] < 0
breakwaterPTS_times['Sign. Wave Height'] >= 1.2 breakMask_B = (breakwaterPTS_times['Width'] / breakwaterPTS_times['Sign. Wave Height']) <=8
breakMask_B = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / breakMask_C = (breakwaterPTS_times['Width'] / breakwaterPTS_times['Sign. Wave Height']) >=12
breakwaterPTS_times['Sign. Wave Height'] < 1.2) & breakMask_Da = (breakwaterPTS_times['Width'] / breakwaterPTS_times['Sign. Wave Height']) >8
(transmit_paramA < transmit_paramB)) breakMask_Db = (breakwaterPTS_times['Width'] / breakwaterPTS_times['Sign. Wave Height']) <12
breakMask_C = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / breakMask_D = breakMask_Da & breakMask_Db
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA > transmit_paramB))
# Updated approach # Updated approach
breakwaterPTS_times.loc[breakMask_A, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma breakwaterPTS_times.loc[breakMask_A, 'transmitted'] = 0
breakwaterPTS_times.loc[breakMask_B, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\ breakwaterPTS_times.loc[breakMask_B, 'transmitted'] = breakwaterPTS_times.loc[breakMask_B, 'Sign. Wave Height']*\
transmit_paramA np.maximum(0, np.minimum(0.8, (-0.4 * (breakwaterPTS_times.loc[breakMask_B, 'Crest'] - breakwaterPTS_times.loc[breakMask_B, 'Surface elevation']) /
breakwaterPTS_times.loc[breakMask_C, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \ breakwaterPTS_times.loc[breakMask_B, 'Sign. Wave Height'] + 0.64 * (breakwaterPTS_times.loc[breakMask_B, 'Width'] /
transmit_paramB[breakMask_C] breakwaterPTS_times.loc[breakMask_B, 'Sign. Wave Height']) ** (-0.31) * (1 - np.exp(-0.5 * (1 / breakwaterPTS_times.loc[breakMask_B, 'Slope'] /
np.sqrt(2 * math.pi * breakwaterPTS_times.loc[breakMask_B, 'Sign. Wave Height'] / 9.81 / breakwaterPTS_times.loc[breakMask_B, 'Peak Wave Period'] ** 2)))))))
breakwaterPTS_times.loc[breakMask_C, 'transmitted'] = breakwaterPTS_times.loc[breakMask_C, 'Sign. Wave Height'] * \
np.maximum(0.05, np.minimum(np.maximum(0, -0.006 * breakwaterPTS_times.loc[breakMask_C, 'Width'] / breakwaterPTS_times.loc[breakMask_C, 'Sign. Wave Height'] + 0.93),
(-0.35 * (breakwaterPTS_times.loc[breakMask_C, 'Crest'] - breakwaterPTS_times.loc[breakMask_C, 'Surface elevation']) /
breakwaterPTS_times.loc[breakMask_C, 'Sign. Wave Height'] + 0.51 * (breakwaterPTS_times.loc[breakMask_C, 'Width'] /
breakwaterPTS_times.loc[breakMask_C, 'Sign. Wave Height']) ** (-0.65) * (1 - np.exp(-0.41 * (1 / breakwaterPTS_times.loc[breakMask_C, 'Slope'] /
np.sqrt(2 * math.pi * breakwaterPTS_times.loc[breakMask_C, 'Sign. Wave Height'] / 9.81 / breakwaterPTS_times.loc[breakMask_C, 'Peak Wave Period'] ** 2)))))))
breakwaterPTS_times.loc[breakMask_D, 'transmitted'] = breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] * \
((np.maximum(0.05, np.minimum(np.maximum(0, -0.006 * breakwaterPTS_times.loc[breakMask_D, 'Width'] / breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] + 0.93),
(-0.35 * (breakwaterPTS_times.loc[breakMask_D, 'Crest'] - breakwaterPTS_times.loc[breakMask_D, 'Surface elevation']) /
breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] + 0.51 * (breakwaterPTS_times.loc[breakMask_D, 'Width'] /
breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height']) ** (-0.65) * (1 - np.exp(-0.41 * (1 / breakwaterPTS_times.loc[breakMask_D, 'Slope'] /
np.sqrt(2 * math.pi * breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] / 9.81 / breakwaterPTS_times.loc[breakMask_D, 'Peak Wave Period'] ** 2))))))) -
np.maximum(0, np.minimum(0.8, (-0.4 * (breakwaterPTS_times.loc[breakMask_D, 'Crest'] - breakwaterPTS_times.loc[breakMask_D, 'Surface elevation']) /
breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] + 0.64 * (breakwaterPTS_times.loc[breakMask_D, 'Width'] /
breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height']) ** (-0.31) * (1 - np.exp(-0.5 * (1 / breakwaterPTS_times.loc[breakMask_D,'Slope'] /
np.sqrt(2 * math.pi * breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] / 9.81 / breakwaterPTS_times.loc[breakMask_D,'Peak Wave Period'] ** 2)))))))) *
(breakwaterPTS_times.loc[breakMask_D,'Width'] / breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] - 8) / (12 - 8) +\
np.maximum(0, np.minimum(0.8, (-0.4 * (breakwaterPTS_times.loc[breakMask_D, 'Crest'] - breakwaterPTS_times.loc[breakMask_D, 'Surface elevation']) /
breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] + 0.64 * (breakwaterPTS_times.loc[breakMask_D, 'Width'] /
breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height']) ** (-0.31) * (1 - np.exp(-0.5 * (1 / breakwaterPTS_times.loc[breakMask_D, 'Slope'] /
np.sqrt(2 * math.pi * breakwaterPTS_times.loc[breakMask_D, 'Sign. Wave Height'] / 9.81 / breakwaterPTS_times.loc[breakMask_D, 'Peak Wave Period'] ** 2))))))))
# %% Merge with breakwater transformed waves # %% Merge with breakwater transformed waves
breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max() breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max()
# %% Read Inner Boundary Points North # %% Read Inner Boundary Points North
# innerBoundNorth = pd.read_csv(
# '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt',
# delimiter='\s+', header=None, names=['x', 'y'])
innerBoundNorth = pd.read_csv( innerBoundNorth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mesh2_North.txt', '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mesh2_North.txt',
delimiter='\s+', header=None, names=['node', 'val', 'x', 'y']) delimiter='\s+', header=None, names=['node', 'val', 'x', 'y'])
@ -169,7 +196,7 @@ for m in modelPlot:
inplace=True) inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \ dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerNorth1.dfs1' runLog['Run Name'][m] + 'TestInnerNorth_RevB.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter), items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second), ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
@ -199,7 +226,7 @@ for m in modelPlot:
inplace=True) inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \ dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerMid.dfs1' runLog['Run Name'][m] + 'TestInnerMid_RevB.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter), items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second), ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),

View File

@ -21,7 +21,7 @@ pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakw
runLog = pd.read_excel(pth.as_posix(), "HD") runLog = pd.read_excel(pth.as_posix(), "HD")
# %% Read Model Results # %% Read Model Results
modelPlot = range(2, 6) modelPlot = range(6, 7)
curSpeed_list = [None] * (max(modelPlot) + 1) curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1) curElm_list = [None] * (max(modelPlot) + 1)
@ -29,6 +29,9 @@ curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1) curV_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1) dfs_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1) ds_list = [None] * (max(modelPlot) + 1)
wl_list = [None] * (max(modelPlot) + 1)
wd_list = [None] * (max(modelPlot) + 1)
bed_list = [None] * (max(modelPlot) + 1)
for m in modelPlot: for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], dfsIN = Dfsu(pl.Path(runLog['Run Location'][m],
@ -40,7 +43,9 @@ for m in modelPlot:
curU_list[m] = ds_list[m]["U velocity"][-1] curU_list[m] = ds_list[m]["U velocity"][-1]
curV_list[m] = ds_list[m]["V velocity"][-1] curV_list[m] = ds_list[m]["V velocity"][-1]
curElm_list[m] = dfs_list[m].element_coordinates curElm_list[m] = dfs_list[m].element_coordinates
wl_list[m] = ds_list[m]["Surface elevation"][-1]
wd_list[m] = ds_list[m]["Total water depth"][-1]
bed_list[m] = wl_list[m] - wd_list[m]
# # Read Mesh for first model # # Read Mesh for first model
# mikeMeshIN = dfsIN.to_shapely() # mikeMeshIN = dfsIN.to_shapely()
# # Convert to list of polygons # # Convert to list of polygons
@ -59,14 +64,14 @@ for m in modelPlot:
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw' mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12 x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12) fontprops = fm.FontProperties(size=12)
modelPlot = range(2, 6) modelPlot = range(6, 7)
vmin = 0 vmin = 0
vmax = 1 vmax = 1
scale = 1 #5 scale = 1 #5
for m in modelPlot: for m in modelPlot:
if m < 4: if m < 4 or m == 6:
vmax = 5 vmax = 5
qmax = 4 qmax = 4
scale = 50 scale = 50
@ -134,6 +139,32 @@ for m in modelPlot:
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' + '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' +
runLog['Short Description'][m] + '_RevC_Current.png', bbox_inches='tight', dpi=300) runLog['Short Description'][m] + '_RevC_Current.png', bbox_inches='tight', dpi=300)
# %% Plot Bathymetry
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
bed_plot = (bed_list[m] * 3.28084) - 578.007
bed_nan = np.isnan(bed_plot)
axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=False, cmap='magma_r', ax=axes, levels=9,
vmin=-80, vmax=0, label='Bed Elevation (ft)',
elements=dfs_list[m].element_ids[~bed_nan])
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762500)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
fig.suptitle('Bed Elevation', fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3/Projects/13632.101 South Shore Breakwater/10_Reports&Pres/13632.101.R3 Ph I BOD/Support files/' +
'Bed_Elevation.png', bbox_inches='tight', dpi=300)
# %% Read time series # %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1) MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1) MIKEdsT_list = [None] * (max(modelPlot) + 1)

View File

@ -23,7 +23,7 @@ pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakw
runLog = pd.read_excel(pth.as_posix(), "ModelLog") runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Model Results # %% Read Model Results
modelPlot = range(20, 29) modelPlot = range(40, 47)
hmo_list = [None] * (max(modelPlot) + 1) hmo_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1) dfs_list = [None] * (max(modelPlot) + 1)
@ -53,7 +53,8 @@ for m in modelPlot:
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw' mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12 x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12) fontprops = fm.FontProperties(size=12)
modelPlot = range(28, 29) modelPlot = range(40, 47)
for m in modelPlot: for m in modelPlot:
fig, axes = plt.subplots(figsize=(8, 8)) fig, axes = plt.subplots(figsize=(8, 8))
@ -88,7 +89,7 @@ for m in modelPlot:
plt.show() plt.show()
fig.savefig( fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Images/Production/' + '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Images/Production-Mesh2/' +
runLog['Short Description'][m] + '_HM0.png', bbox_inches='tight', dpi=300) runLog['Short Description'][m] + '_HM0.png', bbox_inches='tight', dpi=300)
# %% Read time series # %% Read time series