Updated Mustique and SouthShore plotting

This commit is contained in:
Alexander Rey 2022-02-23 10:44:10 -05:00
parent 0dea526cd3
commit b08bca6217
6 changed files with 2122 additions and 0 deletions

View File

@ -0,0 +1,290 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import matplotlib.pyplot as plt
import contextily as ctx
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "HD")
# %% Read Model Results
modelPlot = range(6, 7)
curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1)
curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1)
wl_list = [None] * (max(modelPlot) + 1)
wd_list = [None] * (max(modelPlot) + 1)
bed_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m],
runLog['Run Name'][m] + '.m21fm - Result Files', 'FullDomain.dfsu').as_posix())
dfs_list[m] = dfsIN
ds_list[m] = dfs_list[m].read()
curSpeed_list[m] = ds_list[m]["Current speed"][-1]
curU_list[m] = ds_list[m]["U velocity"][-1]
curV_list[m] = ds_list[m]["V velocity"][-1]
curElm_list[m] = dfs_list[m].element_coordinates
wl_list[m] = ds_list[m]["Surface elevation"][-1]
wd_list[m] = ds_list[m]["Total water depth"][-1]
bed_list[m] = wl_list[m] - wd_list[m]
# # Read Mesh for first model
# mikeMeshIN = dfsIN.to_shapely()
# # Convert to list of polygons
# mikeMeshIN_list = [p for p in mikeMeshIN]
# Add to list
# hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list)
# Cleanup unnecessary variables
# del mikeMeshIN
# del mikeMeshIN_list
del dfsIN
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(6, 7)
vmin = 0
vmax = 1
scale = 1 #5
for m in modelPlot:
if m < 4 or m == 6:
vmax = 5
qmax = 4
scale = 50
amin = 1
else:
vmax = 1
qmax = 0.25
scale = 75
amin = 3
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
cur_plot = curSpeed_list[m]*3.28084
cur_nan = np.isnan(cur_plot)
axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes, levels=11,
vmin=vmin, vmax=vmax, label='Current Speed (ft/s)',
elements=dfs_list[m].element_ids[~cur_nan])
# Plot arrows on overset grid
overGrid = dfs_list[m].get_overset_grid(dx=50)
interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1)
# Interpolate velocity at last time step by selecting time from dataset
overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant, shape=(overGrid.ny, overGrid.nx))
overGrid_U = overGrid_Cur["U velocity"][-1]
overGrid_V = overGrid_Cur["V velocity"][-1]
# Normalize velocity above limit
overGrid_U_Scale = overGrid_U*3.28084
overGrid_V_Scale = overGrid_V*3.28084
r = np.power(np.add(np.power(overGrid_U_Scale, 2), np.power(overGrid_V_Scale, 2)), 0.5)
curPlotMask = np.sqrt((overGrid_U_Scale) ** 2 + (overGrid_V_Scale) ** 2) > qmax
overGrid_U_Plot = np.zeros(overGrid_U_Scale.shape)
overGrid_V_Plot = np.zeros(overGrid_V_Scale.shape)
# Normalize arrows to 1
overGrid_U_Plot = overGrid_U_Scale / r
overGrid_V_Plot = overGrid_V_Scale / r
# Scale arrows below qmax
overGrid_U_Plot[~curPlotMask] = overGrid_U_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
overGrid_V_Plot[~curPlotMask] = overGrid_V_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
qv = axDHI.quiver(overGrid.x, overGrid.y, overGrid_U_Plot, overGrid_V_Plot, scale=scale,
color='k', headlength=3.5, headwidth=3.5, headaxislength=3.5, width=0.0015, minlength=amin) #200
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
# axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large'
fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' +
runLog['Short Description'][m] + '_RevC_Current.png', bbox_inches='tight', dpi=300)
# %% Plot Bathymetry
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
bed_plot = (bed_list[m] * 3.28084) - 578.007
bed_nan = np.isnan(bed_plot)
axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=False, cmap='magma_r', ax=axes, levels=9,
vmin=-80, vmax=0, label='Bed Elevation (ft)',
elements=dfs_list[m].element_ids[~bed_nan])
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762500)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
fig.suptitle('Bed Elevation', fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3/Projects/13632.101 South Shore Breakwater/10_Reports&Pres/13632.101.R3 Ph I BOD/Support files/' +
'Bed_Elevation.png', bbox_inches='tight', dpi=300)
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
#%% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')

View File

@ -0,0 +1,585 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0, Dfs1
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import datetime as datetime
import matplotlib.pyplot as plt
import contextily as ctx
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "13539.101 L'Ansecoy Bay, Mustique", "06_Models",
"Model Log Mustique.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Model Results
modelPlot = range(10, 11)
curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1)
curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1)
curW_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1)
dfs2d_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1)
wl_list = [None] * (max(modelPlot) + 1)
wd_list = [None] * (max(modelPlot) + 1)
bed_list = [None] * (max(modelPlot) + 1)
z_list = [None] * (max(modelPlot) + 1)
z_profile_list = [None] * (max(modelPlot) + 1)
elm_list = [None] * (max(modelPlot) + 1)
elm_df_list = [None] * (max(modelPlot) + 1)
elm2d_list = [None] * (max(modelPlot) + 1)
elm2d_df_list = [None] * (max(modelPlot) + 1)
readPointsName = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevB.csv",
delimiter=",")
readPoints = pd.read_csv("//srv-ott3.baird.com/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/Dataset locations_RevE.csv",
delimiter=",").iloc[0:, 2:-2].values
# readPoints = np.zeros((14, 2))
# readPoints[0, :] = [693000, 1425500]
# readPoints[1, :] = [697000, 1429500]
# readPoints[2, :] = [697000, 1421500]
# readPoints[3, :] = [701000, 1425500]
#
# # readPoints[4, :] = [697576.96, 1425767.10] # RBR NEAR
# # readPoints[5, :] = [697760.89, 1426109.73] # RBR FAR
#
# # From Mathew
# readPoints[4, :] = [697858.00, 1426095.00] # RBR NEAR
# readPoints[5, :] = [697613.00, 1426509.00] # RBR FAR
#
# # Location Uncertainty
# # 500 -x
# readPoints[6, :] = [697858.00-100, 1426095.00-0] # RBR NEAR
# readPoints[7, :] = [697613.00-150, 1426509.00+150] # RBR FAR
# # 500 +x
# readPoints[8, :] = [697858.00+100, 1426095.00-0] # RBR NEAR
# readPoints[9, :] = [697613.00-50, 1426509.00+50] # RBR FAR
# # 500 -x
# readPoints[10, :] = [697858.00+150, 1426095.00+150] # RBR NEAR
# readPoints[11, :] = [697613.00+150, 1426509.00+150] # RBR FAR
# # 500 -x
# readPoints[12, :] = [697858.00-150, 1426095.00+150] # RBR NEAR
# readPoints[13, :] = [697613.00-150, 1426509.00+150] # RBR FAR
#125 +125
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain2D.dfsu').as_posix())
dfs2d_list[m] = dfsIN
# Read Map
# ds_list[m] = dfs_list[m].read()
# curU_list[m] = ds_list[m]["Depth averaged U velocity"]
# curV_list[m] = ds_list[m]["Depth averaged V velocity"]
# curElm_list[m] = dfs_list[m].element_coordinates
# wl_list[m] = ds_list[m]["Surface elevation"]
# z_list[m] = ds_list[m]["Z coordinate"]
## Read specific points in 2D
# Find nearest elements
elem_ids = dfs2d_list[m].find_nearest_elements(readPoints[:, 0], readPoints[:, 1])
# Read in data from nearest elements
elm2d_list[m] = dfs2d_list[m].read(elements=elem_ids)
# Convert to Pandas DataFrame
elm2d_df_list[m] = [None] * readPoints.shape[0]
for p in range(0, readPoints.shape[0]):
elm2d_df_list[m][p] = elm2d_list[m].isel(idx=p).to_dataframe()
## Read specific points 3D
# Setup MIKE object
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'fullDomain3D.dfsu').as_posix())
dfs_list[m] = dfsIN
# Find nearest elements
elem_ids = dfs_list[m].find_nearest_profile_elements(readPoints[:, 0], readPoints[:, 1])
# Read from elements- flatten 2d element array (xy and z) to 1d for reading
elm_list[m] = dfs_list[m].read(elements=elem_ids.flatten().astype(int))
elm_df_list[m] = [None] * readPoints.shape[0]
z_profile_list[m] = [None] * readPoints.shape[0]
# Convert to Pandas DataFrame
# Loop through points, selecting corresponding depths
for p in range(0, readPoints.shape[0]):
z_profile_list[m][p] = dfs_list[m].element_coordinates[elem_ids[p, :].astype(int), 2]
# Convert to Pandas and add zidx
for z in range(0, z_profile_list[m][p].shape[0]):
df_tmp = elm_list[m].isel(idx=p * 5 + z).to_dataframe()
df_tmp['Z'] = z_profile_list[m][p][z]
df_tmp['Z_IDX'] = z
if z == 0:
elm_df_list[m][p] = df_tmp
else:
elm_df_list[m][p] = elm_df_list[m][p].append(df_tmp)
del df_tmp
# %% Import RBR data
rbr_path = "C:/Users/arey/files/Projects/Mustique/041279_20211203_1541Ruskin.xlsx"
rbr_pd = pd.read_excel(rbr_path, sheet_name='Wave', parse_dates=True, header=1, index_col=0)
rbr_pd['WL'] = rbr_pd['Depth'] - rbr_pd['Depth'].mean() - 0.2
# Correct time zone
rbr_pd.index = rbr_pd.index + datetime.timedelta(hours=4)
# %% Import Nortek Eco data
eco_path_1 = "//srv-ott3/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/02_Nortek ECO/Eco61_20210910184606.csv"
eco_path_2 = "//srv-ott3/Projects/13539.101 L'Ansecoy Bay, Mustique/03_Data/03_Field/01_September 2021 trip/02_Nortek ECO/Eco214_20210902120703.csv"
eco1_pd = pd.read_csv(eco_path_1, sep=',', header=[28], index_col=0,
parse_dates=False)
eco2_pd = pd.read_csv(eco_path_2, sep=',', header=[28], index_col=0,
parse_dates=False)
# Drop first row
eco1_pd.drop(eco1_pd.index[0], inplace=True)
eco2_pd.drop(eco2_pd.index[0], inplace=True)
eco1_pd.index = pd.to_datetime(eco1_pd.index, format='%m/%d/%Y %I:%M:%S %p')
eco2_pd.index = pd.to_datetime(eco2_pd.index, format='%m/%d/%Y %I:%M:%S %p')
# Set water level to zero
eco1_pd['WL'] = eco1_pd['Depth'].astype(float) - eco1_pd['Depth'].astype(float).mean(skipna=True) + 0.2
eco2_pd['WL'] = eco2_pd['Depth'].astype(float) - eco2_pd['Depth'].astype(float).mean(skipna=True) + 0.2
# Correct time zone
eco1_pd.index = eco1_pd.index + datetime.timedelta(hours=4)
eco2_pd.index = eco2_pd.index + datetime.timedelta(hours=4)
# %% Read in boundary conditions
ds_wl = []
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_EastBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_NorthBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_SouthBound.dfs1")
ds_wl.append(dfs1.read())
dfs1 = Dfs1("//oak-spillway.baird.com/D/13539.101 L'Ansecoy Bay, Mustique/MIKE3/02_Bounds/NCOM2_WestBound.dfs1")
ds_wl.append(dfs1.read())
# %% Plot Water Level at a point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
# plotstart = '2021-10-01 00:00:00'
# plotend = '2021-10-21 00:00:00'
plotstart = '2021-10-05 00:00:00'
plotend = '2021-10-07 00:00:00'
for sub, p in enumerate([5, 6, 9]):
if p == 9:
rbr_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations')
elif p == 5:
eco1_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations')
elif p == 6:
eco2_pd['WL'].plot(ax=axes[sub], color='k', label='Obervations')
elm2d_df_list[m][p].plot(y='Surface elevation', ax=axes[sub], label=readPointsName.Dataset[p])
axes[sub].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[sub].set_ylim(-1.0, 1.0)
plt.show()
# %% Plot Water Level at a 3D point
plotstart = '2021-09-14 00:00:00'
# plotend = '2021-09-16 00:00:00'
plotend = '2021-10-03 00:00:00'
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(8, 8), sharex=True)
# elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == 4, :].plot(
# y='Z', ax=axes[0], label='Model')
axes[0].plot(elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == 4, :].index,
elm_list[m]['Z coordinate'][:, 30*6-1]+0.1, label='Model')
eco1_pd['WL'].plot(ax=axes[0], color='k', label='Observations')
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[0].set_ylabel('Water Level Offshore (m)')
axes[0].legend()
# elm_df_list[m][7].loc[elm_df_list[m][7]['Z_IDX'] == 4, :].plot(
# y='Z', ax=axes[1], label='Model')
axes[1].plot(elm_df_list[m][7].loc[elm_df_list[m][7]['Z_IDX'] == 4, :].index,
elm_list[m]['Z coordinate'][:, 14*6-1]+0.1, label='Model')
eco2_pd['WL'].plot(ax=axes[1], color='k', label='Observations')
axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Water Level Nearshore (m)')
axes[1].legend()
axes[1].set_xlabel('Date')
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
'/wl_TS.png', bbox_inches='tight')
# %% Velocity point
fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(8, 8), sharex=True)
eco1_pd['Upper speed'].astype(float).plot(ax=axes[0], color='tab:red', label='Observations')
elm_df_list[m][5]['Current Speed'] = np.sqrt(
elm_df_list[m][5]['U velocity'] ** 2 + elm_df_list[m][5]['V velocity'] ** 2)
elm_df_list[m][5].loc[elm_df_list[m][4]['Z_IDX'] == 4, :].plot(
y='Current Speed', ax=axes[0], label='Model')
axes[0].set_xlim(pd.Timestamp('2021-09-15 00:00:00'), pd.Timestamp('2021-10-3 00:00:00'))
axes[0].set_ylabel('Nearshore Eco Current')
axes[0].legend()
eco2_pd['Upper speed'].astype(float).plot(ax=axes[1], color='tab:red', label='Observations')
elm_df_list[m][5]['Current Speed'] = np.sqrt(
elm_df_list[m][5]['U velocity'] ** 2 + elm_df_list[m][5]['V velocity'] ** 2)
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == 4, :].plot(
y='Current Speed', ax=axes[1], label='Model')
axes[1].set_xlim(pd.Timestamp('2021-09-15 00:00:00'), pd.Timestamp('2021-10-3 00:00:00'))
axes[1].set_ylabel('Offshore Eco Current')
plt.show()
# %% Velocity point U and V
# Z_IDX = 4
# ecoVar1 = 'Upper speed U'
# ecoVar2 = 'Upper speed V'
# ecoVar3 = 'Upper speed'
# ecoVar4 = 'Upper direction'
# plotstart = '2021-09-14 00:00:00'
# # plotend = '2021-09-16 00:00:00'
# plotend = '2021-10-03 00:00:00'
plotstart = '2021-10-01 00:00:00'
# plotend = '2021-09-16 00:00:00'
plotend = '2021-10-21 00:00:00'
Z_IDX = 2
ecoVar1 = 'Middle speed U'
ecoVar2 = 'Middle speed V'
ecoVar3 = 'Middle speed'
ecoVar4 = 'Middle direction'
# Z_IDX = 1
# ecoVar1 = 'Lower speed U'
# ecoVar2 = 'Lower speed V'
# ecoVar3 = 'Lower speed'
# ecoVar4 = 'Lower direction'
fig, axes = plt.subplots(nrows=4, ncols=1, figsize=(8, 8), sharex=True)
eco1_pd[ecoVar1] = eco1_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar1].astype(float).plot(ax=axes[0], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][3]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[0], label='Model Shift')
axes[0].set_ylim(-0.5, 0.5)
axes[0].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[0].set_ylabel('Nearshore U (m/s)')
axes[0].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco1_pd[ecoVar2] = eco1_pd[ecoVar3].astype(float) * \
np.cos(np.radians(eco1_pd[ecoVar4].astype(float).to_numpy()))
eco1_pd[ecoVar2].astype(float).plot(ax=axes[1], color='k', label='Observations')
elm_df_list[m][3].loc[elm_df_list[m][3]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[1], label='Model Shift')
axes[1].set_ylim(-0.5, 0.5)
axes[1].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[1].set_ylabel('Nearshore V (m/s)')
axes[1].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco2_pd[ecoVar1] = eco2_pd[ecoVar3].astype(float) * \
np.sin(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
eco2_pd[ecoVar1].astype(float).plot(ax=axes[2], color='k', label='Observations')
elm_df_list[m][4].loc[elm_df_list[m][4]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[2], label='Model')
elm_df_list[m][6].loc[elm_df_list[m][6]['Z_IDX'] == Z_IDX, :].plot(
y='U velocity', ax=axes[2], label='Model Shift')
axes[2].set_ylim(-1.5, 1.5)
axes[2].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[2].set_ylabel('Offshore U (m/s)')
axes[2].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
eco2_pd[ecoVar2] = eco2_pd[ecoVar3].astype(float) * \
np.cos(np.radians(eco2_pd[ecoVar4].astype(float).to_numpy()))
eco2_pd[ecoVar2].astype(float).plot(ax=axes[3], color='k', label='Observations')
elm_df_list[m][4].loc[elm_df_list[m][4]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[3], label='Model')
elm_df_list[m][5].loc[elm_df_list[m][5]['Z_IDX'] == Z_IDX, :].plot(
y='V velocity', ax=axes[3], label='Model Shift')
axes[3].set_ylim(-1.00, 1.00)
axes[3].set_xlim(pd.Timestamp(plotstart), pd.Timestamp(plotend))
axes[3].set_ylabel('Offshore V (m/s)')
axes[3].legend(bbox_to_anchor=(0.90, 1), loc="upper left")
axes[3].set_xlabel('Date')
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06_Models/01_MIKE3/00_Figures/' +
# '/uVel_TS.png', bbox_inches='tight')
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(6, 7)
vmin = 0
vmax = 1
scale = 1 # 5
for m in modelPlot:
if m < 4 or m == 6:
vmax = 5
qmax = 4
scale = 50
amin = 1
else:
vmax = 1
qmax = 0.25
scale = 75
amin = 3
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
cur_plot = curSpeed_list[m] * 3.28084
cur_nan = np.isnan(cur_plot)
axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes,
levels=11,
vmin=vmin, vmax=vmax, label='Current Speed (ft/s)',
elements=dfs_list[m].element_ids[~cur_nan])
# Plot arrows on overset grid
overGrid = dfs_list[m].get_overset_grid(dx=50)
interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1)
# Interpolate velocity at last time step by selecting time from dataset
overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant,
shape=(overGrid.ny, overGrid.nx))
overGrid_U = overGrid_Cur["U velocity"][-1]
overGrid_V = overGrid_Cur["V velocity"][-1]
# Normalize velocity above limit
overGrid_U_Scale = overGrid_U * 3.28084
overGrid_V_Scale = overGrid_V * 3.28084
r = np.power(np.add(np.power(overGrid_U_Scale, 2), np.power(overGrid_V_Scale, 2)), 0.5)
curPlotMask = np.sqrt((overGrid_U_Scale) ** 2 + (overGrid_V_Scale) ** 2) > qmax
overGrid_U_Plot = np.zeros(overGrid_U_Scale.shape)
overGrid_V_Plot = np.zeros(overGrid_V_Scale.shape)
# Normalize arrows to 1
overGrid_U_Plot = overGrid_U_Scale / r
overGrid_V_Plot = overGrid_V_Scale / r
# Scale arrows below qmax
overGrid_U_Plot[~curPlotMask] = overGrid_U_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
overGrid_V_Plot[~curPlotMask] = overGrid_V_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
qv = axDHI.quiver(overGrid.x, overGrid.y, overGrid_U_Plot, overGrid_V_Plot, scale=scale,
color='k', headlength=3.5, headwidth=3.5, headaxislength=3.5, width=0.0015, minlength=amin) # 200
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
# axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large'
fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' +
runLog['Short Description'][m] + '_RevC_Current.png', bbox_inches='tight', dpi=300)
# %% Plot Bathymetry
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
bed_plot = (bed_list[m] * 3.28084) - 578.007
bed_nan = np.isnan(bed_plot)
axDHI = dfs_list[m].plot(bed_plot[~bed_nan], plot_type='contourf', show_mesh=False, cmap='magma_r', ax=axes, levels=9,
vmin=-80, vmax=0, label='Bed Elevation (ft)',
elements=dfs_list[m].element_ids[~bed_nan])
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762500)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
fig.suptitle('Bed Elevation', fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3/Projects/13632.101 South Shore Breakwater/10_Reports&Pres/13632.101.R3 Ph I BOD/Support files/' +
'Bed_Elevation.png', bbox_inches='tight', dpi=300)
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
# %% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')

View File

@ -0,0 +1,765 @@
#%% Project Setup
import os
import pandas as pd
import geopandas as gp
from scipy.signal import argrelextrema
import numpy as np
import math
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import matplotlib as mpl
import cartopy.crs as ccrs
import contextily as ctx
import cmocean.cm as cmo
from shapely.geometry import Point, LineString
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from metpy.plots import ctables
from siphon.catalog import TDSCatalog
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
importPaths = ['C:/Users/arey/files/Projects/West Coast/Pre_Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Great House/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Old Queens Fort/']
siteNames = ['Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort']
timeLabels= ['Before Construction',
'Before Construction',
'Before Construction',
'During Construction',
'During Construction',
'During Construction',
'After Construction',
'After Construction',
'After Construction',
'Monitoring',
'Monitoring',
'Monitoring']
wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
None,
None,
None]
for s in range(9,13):
## Define master import path
importPath = importPaths[s]
siteName = siteNames[s]
timeLabel = timeLabels[s]
importFiles = os.listdir(importPath)
# Initialize import variables
RBR_File = None
JFE_File = None
GPS_File = None
for i in range(0, len(importFiles)):
if '.xlsx' in importFiles[i] and 'Summary' not in importFiles[i]:
RBR_File = importFiles[i]
elif '_A.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
JFE_File = importFiles[i]
elif '.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i]
#%% RBR Import Data
if RBR_File is not None:
RBR_Obs = pd.read_excel(importPath + RBR_File,
sheet_name='Data', skiprows=0, header=1)
#%% JFE Import Data
if JFE_File is not None:
JFE_Obs = pd.read_csv(importPath + JFE_File, skiprows=30)
#%% GPS Import Data
if GPS_File is not None:
GPS = pd.read_csv(importPath + GPS_File,
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
GPS_gdf.set_index('DateTime', inplace=True)
# Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
else:
# Synthesize GPS data for great house
GPS_gdf = gp.read_file('C:/Users/arey/files/Projects/West Coast/GreatHousePath_R3.shp')
GPS_gdf['DateTime'] = pd.date_range(pd.to_datetime(RBR_Obs['Time'].iloc[0]),
pd.to_datetime(RBR_Obs['Time'].iloc[-1]),
periods=len(GPS_gdf)).values
GPS_gdf.set_index('DateTime', inplace=True)
#%% Read in site shapefile
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
#%% Merge GPS to RBR
# Process RBR into datetime
if RBR_File is not None:
RBR_Obs['DateTime'] = pd.to_datetime(RBR_Obs['Time'])
RBR_Obs.set_index('DateTime', inplace=True)
# Merge with GPS as dataframe
RBR_Obs_geo = pd.merge_asof(RBR_Obs, GPS_gdf,
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
RBR_Obs_geo = gp.GeoDataFrame(RBR_Obs_geo, geometry=RBR_Obs_geo.geometry, crs="EPSG:32621")
#%% Merge GPS to JFE
if JFE_File is not None:
# Process JFE into datetime
JFE_Obs['DateTime'] = pd.to_datetime(JFE_Obs['Date'])
JFE_Obs.set_index('DateTime', inplace=True)
# Merge with GPS as dataframe
JFE_Obs_geo = pd.merge_asof(JFE_Obs, GPS_gdf, left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('60s'))
JFE_Obs_geo = gp.GeoDataFrame(JFE_Obs_geo, geometry=JFE_Obs_geo.geometry, crs="EPSG:32621")
#%% Find and setup key plotting details
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
# Sat water
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
if siteName == 'Great House':
axXlim = (213210.7529575412, 213562.64172686986)
axYlim = (1464769.2243017585, 1465135.2219089477)
GFS_Lon = -59.6441
GFS_Lat = 13.2372
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[2, 1])
elif siteName == 'Greensleeves':
axXlim = (213269.99233348924, 213648.1643157148)
# axYlim = (1463378.1020314451, 1463843.5442048472)
axYlim = (1463378.1020314451, 1463950.5442048472)
GFS_Lon = -59.6428
GFS_Lat = 13.2289
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[1, 1])
elif siteName == 'Old Queens Fort':
axXlim = (213368.59866770002, 213745.6997016811)
axYlim = (1460192.707288096, 1460672.371780407)
GFS_Lon = -59.6419
GFS_Lat = 13.1960
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[0, 1])
# Set min and max times using conductivity
# if JFE_File is None:
if RBR_Obs_geo['inArea'].any():
# First and last times from area in shapefile
minTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[0, 0]
maxTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[-1, 0]
else:
# First and last times if no GPS data
minTime = RBR_Obs_geo.iloc[20, 0]
maxTime = RBR_Obs_geo.iloc[-20, 0]
# else:
# minTime = (RBR_Obs['Conductivity ']>5).idxmax()
# minTime = minTime + timedelta(seconds=30)
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
# maxTime = maxTime - timedelta(seconds=30)
# obsPeriod = maxTime-minTime
#
# if (obsPeriod.seconds<180) | (maxTime<minTime):
# minTime = ((RBR_Obs['Conductivity ']>5) & (RBR_Obs['Time']>(minTime+timedelta(seconds=180)))).idxmax()
# minTime = minTime + timedelta(seconds=30)
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
# maxTime = maxTime - timedelta(seconds=30)
metDate = minTime - timedelta(
hours = minTime.hour % 6,
minutes=minTime.minute,
seconds=minTime.second,
microseconds=minTime.microsecond)
#%% GFS Met Import
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
startdate = metDate - timedelta(hours=18)
enddate = metDate + timedelta(hours=6)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(10)
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Process Met Data
# 24h Precipitation Total
# Time weighted average of last two points for everything else
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
timeWeight1 = minTime-metDate
timeWeight2 = (metDate+timedelta(hours=6))-minTime
timeWeight1 = timeWeight1.seconds/21600
timeWeight2 = timeWeight2.seconds/21600
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
met_wdir = math.degrees(math.atan2(
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
#%% Read in wave conditions from BTS file
if wave_bts_file[s] is not None:
if siteName == 'Great House':
wave_bts = pd.read_csv(wave_bts_file[s],
names=['date', 'time', 'HM0', 'TP', 'TM', 'MWD', 'DPK', 'HSWL', 'TSWL', 'DPSWL', 'HSEA', 'TSEA', 'DPSEA'],
header=0, skiprows=22, delim_whitespace=True)
wave_bts['datetime'] = pd.to_datetime(wave_bts['date'] + ' ' + wave_bts['time'])
wave_bts.set_index('datetime', inplace=True)
met_hmo = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].HM0
met_tp = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].TP
met_mwd = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].MWD
else:
met_hmo = -999
met_tp = -999
met_mwd = -999
#%% Plot time series for Geo data
fontprops = fm.FontProperties(size=25)
if JFE_File is None:
fig, axesTMP = plt.subplots(nrows=1, ncols=1, figsize=(19, 5), constrained_layout=True)
RBRparam = ['Turbidity ']
RBRparamName = ['Turbidity [NTU]']
RBRparmCmap = [cmo.turbid]
RBRparamMin = [0.0]
RBRparamMax = [60.0]
dataTable = np.zeros([9, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 1, 1]
axes = []
axes.append(axesTMP)
else:
fig, axes = plt.subplots(nrows=7, ncols=1, figsize=(19, 25), constrained_layout=True)
dataTable = np.zeros([15, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
RBRparam = ['Depth ', 'Salinity ', 'Dissolved O₂ saturation ', 'Temperature ']
RBRparamName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]']
RBRparmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal]
RBRparamMin = [0.0, 34.0, 32.5, 29.0]
RBRparamMax = [1.0, 36.0, 34.0, 31.0]
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]', 'Chl-a[ug/l]']
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]']
JFEparamMin = [0.0, 0.0, 0.0]
JFEparamMax = [20.0, 1.0, 1.0]
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
dataTable[0, 0] = met_temp.m.item(0)-272.15
dataTable[1, 0] = met_wind
dataTable[2, 0] = met_wdir
dataTable[3, 0] = met_prep.m.item(0)
dataTable[4, 0] = met_hmo
dataTable[5, 0] = met_tp
dataTable[6, 0] = met_mwd
ilocs_max = []
ilocs_max_pts = []
RBR_mask = []
JFE_mask = []
for paramIDX, param in enumerate(RBRparam):
RBR_Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
# Create mask for RBR data based on time and parameter minimum
RBR_mask.append(((RBR_Obs_geo.index>minTime) &
(RBR_Obs_geo.index<maxTime) &
(RBR_Obs_geo[param]>RBRparamMin[paramIDX])))
RBR_smoothed = RBR_Obs_geo.loc[RBR_mask[paramIDX], param].rolling(
60, win_type='nuttall',center=True).mean()
RBR_smoothed.plot(
ax=axes[paramIDX], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'Turbidity ':
ilocs_max.append(argrelextrema(RBR_smoothed.values,
np.greater_equal, order=40, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(RBR_smoothed.values)-10
ilocs_max_pts.append(RBR_smoothed.iloc[ilocs_max[paramIDX]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
axes[paramIDX].scatter(RBR_smoothed.iloc[
ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
color='blue')
for a in range(0, len(ilocs_max[paramIDX])):
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 30), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
# dataTable[paramIDX+7, 0] = RBR_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
# dataTable[paramIDX+7, 1] = RBR_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
# dataTable[paramIDX+7, 2] = max(RBR_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
# dataTable[paramIDX+7, 3] = RBR_Obs_geo.loc[minTime:maxTime, param].max(skipna=True)
dataTable[paramIDX+7, 0] = RBR_smoothed.mean(skipna=True)
dataTable[paramIDX+7, 1] = RBR_smoothed.std(skipna=True)
dataTable[paramIDX+7, 2] = max(RBR_smoothed.min(skipna=True), 0)
dataTable[paramIDX+7, 3] = RBR_smoothed.max(skipna=True)
axes[paramIDX].set_ylabel(RBRparamName[paramIDX])
axes[paramIDX].set_title('RBR: ' + RBRparamName[paramIDX])
axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(RBRparamMin[paramIDX], RBRparamMax[paramIDX])
axes[paramIDX].legend(loc='upper right')
if JFE_File is not None:
for paramIDX, param in enumerate(JFEparam):
JFE_Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX+4], label='15 Second Observations', color='lightgrey')
JFE_mask.append(((JFE_Obs_geo.index > minTime) &
(JFE_Obs_geo.index < maxTime) &
(JFE_Obs_geo[param] > JFEparamMin[paramIDX])))
JFE_smoothed = JFE_Obs_geo.loc[JFE_mask[paramIDX], param].rolling(
20, win_type='nuttall', center=True).mean()
JFE_smoothed.plot(
ax=axes[paramIDX+4], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'Turb. -M[FTU]':
ilocs_max.append(argrelextrema(JFE_smoothed.values,
np.greater_equal, order=60, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(RBR_smoothed.values)-10
ilocs_max_pts.append(JFE_smoothed.iloc[ilocs_max[paramIDX+4]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
axes[paramIDX+4].scatter(JFE_smoothed.iloc[
ilocs_max[paramIDX+4]].index, np.ones(len(ilocs_max[paramIDX+4])) * 10, 75,
color='blue')
for a in range(0, len(ilocs_max[paramIDX+4])):
axes[paramIDX+4].annotate(str(a + 1), (ilocs_max_pts[paramIDX+4][a], 10), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
# dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
# dataTable[paramIDX+4+7, 1] = JFE_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
# dataTable[paramIDX+4+7, 2] = max(JFE_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
# dataTable[paramIDX+4+7, 3] = JFE_Obs_geo.loc[minTime:maxTime, param].max(skipna=True) dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
dataTable[paramIDX+4+7, 0] = JFE_smoothed.mean(skipna=True)
dataTable[paramIDX+4+7, 1] = JFE_smoothed.std(skipna=True)
dataTable[paramIDX+4+7, 2] = max(JFE_smoothed.min(skipna=True), 0)
dataTable[paramIDX+4+7, 3] = JFE_smoothed.max(skipna=True)
axes[paramIDX+4].set_ylabel(JFEparamName[paramIDX])
axes[paramIDX+4].set_title('JFE: ' + JFEparamName[paramIDX])
axes[paramIDX+4].set_xlabel('')
axes[paramIDX+4].set_ylim(JFEparamMin[paramIDX], JFEparamMax[paramIDX])
axes[paramIDX+4].legend(loc='upper right')
axes[paramIDX+4].set_xlabel(minTime.strftime('%B %#d, %Y'))
else:
axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
# Formate Data Table
dataTableFormat_mean = []
dataTableFormat_maxmin = []
for d in range(0, len(roundIDX)):
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
if dataTable[d, 3] == 0:
dataTableFormat_maxmin.append('--')
else:
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
dfOut = pd.DataFrame(dataTable[:, :])
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
# Change the column names
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
np.array(['Mean', 'Min / Max'])]
# Change the row indexes
if JFE_File is not None:
dfOut.iloc[14, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
else:
dfOut.iloc[8, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]',
'Turbidity [NTU]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
fig.suptitle(siteName + ', ' +minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
dfOut.to_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
dfOutFormat.to_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
dfOut.to_csv(importPath + '/Summary_Stats_' + siteName + '.csv')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Plot Maps
if JFE_File is None:
# Only Turbidity Data
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
ax = []
ax.append(axes)
else:
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
ax = axes.flat
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
x, y, arrow_length = 0.95, 0.93, 0.20
plt.rcParams.update({'font.size': 22})
axXlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
RBR_Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
axYlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
RBR_Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
plt.setp(axes, xlim=axXlim, ylim=axYlim)
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
# Plot the RBR observations
# Salinity
for paramIDX, param in enumerate(RBRparam):
if RBR_File is not None:
# Draw thick black line to show approx path
if GPS_File is None:
ax[paramIDX].scatter(RBR_Obs_geo.loc[minTime:maxTime].geometry.x,
RBR_Obs_geo.loc[minTime:maxTime].geometry.y,
150, marker='o', color='black', label='Approximate Path')
plt.legend(loc='upper left')
RBR_Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=RBRparamMin[paramIDX], vmax=RBRparamMax[paramIDX],
legend=True, legend_kwds={'label': RBRparamName[paramIDX]},
cmap=RBRparmCmap[paramIDX], markersize=20)
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
# Add time labels
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.x,
# RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
if (not RBR_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turbidity '):
for a in range(0, len(ilocs_max[paramIDX])):
ax[paramIDX].annotate(str(a + 1), (RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.x,
RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
ax[paramIDX].set_title(RBRparamName[paramIDX])
# ax[paramIDX].set_ylabel('UTM 21N [m]')
# ax[paramIDX].set_xlabel('UTM 21N [m]')
ax[paramIDX].locator_params(axis='y', nbins=3)
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX].get_xaxis().set_ticks([])
ax[paramIDX].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX].add_artist(scalebar)
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes)
# Plot Plot JFE Points
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]']
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]']
JFEparamCmp = [cmo.turbid, cmo.algae]
JFEparamMin = [0.0, 0.0]
JFEparamMax = [10.0, 1.0]
if JFE_File is not None:
for paramIDX, param in enumerate(JFEparam):
if JFE_File is not None:
JFE_Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX+4], vmin=JFEparamMin[paramIDX], vmax=JFEparamMax[paramIDX],
legend=True, legend_kwds={'label': JFEparamName[paramIDX]},
cmap=JFEparamCmp[paramIDX], markersize=20) # Note the space in the col name
ctx.add_basemap(ax[paramIDX+4], source=mapbox, crs='EPSG:32621')
# Add time labels
if (not JFE_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turb. -M[FTU]'):
for a in range(0, len(ilocs_max[paramIDX+4])):
ax[paramIDX+4].annotate(str(a + 1), (JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
ilocs_max[paramIDX+4][a], :].geometry.x,
JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
ilocs_max[paramIDX+4][a], :].geometry.y), fontsize=30)
ax[paramIDX+4].set_title(JFEparamName[paramIDX])
# ax[paramIDX+4].set_ylabel('UTM 21N [m]')
# ax[paramIDX+4].set_xlabel('UTM 21N [m]')
ax[paramIDX+4].locator_params(axis='y', nbins=3)
ax[paramIDX+4].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX+4].get_xaxis().set_ticks([])
ax[paramIDX+4].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX+4].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX+4].add_artist(scalebar)
ax[paramIDX+4].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=25,
xycoords=ax[paramIDX+4].transAxes)
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
if not os.path.exists(importPath + '/Figures'):
os.mkdir(importPath + '/Figures')
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Summary Sheet
plotIDXsLoop = []
plotIDXsLoop.append([0, 3, 6, 9])
plotIDXsLoop.append([1, 4, 7, 10])
plotIDXsLoop.append([2, 5, 8, 11])
for i in range(0, 3):
summTable = None
plotIDXs = plotIDXsLoop[i]
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0,1], index_col=0)
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
if s == 0:
summTable = obsStatsIN
else:
summTable = summTable.join(obsStatsIN)
# Remove -999 with nan
summTable.replace(-999, np.nan, inplace=True)
summTable.to_excel('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/Summary_StatsMerge_' + siteName + '.xlsx')
#%% Summary Plot
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height [m]', 'Salinity [PSU]',
'Dissolved O₂ [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-Flu. [ppb]']
plotIDXsLoop = []
plotIDXsLoop.append([0, 3, 6, 9])
plotIDXsLoop.append([1, 4, 7, 10])
plotIDXsLoop.append([2, 5, 8, 11])
for i in range(0, 3):
summTable = None
plotIDXs = plotIDXsLoop[i]
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
plotTable[8, s] = obsStatsIN.iloc[7, 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
else:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# Repalce zero with nan
plotTable[plotTable < 0.00001] = np.nan
for v in range(0, 10):
ax[v].scatter(plotDates, plotTable[v, :], 250)
ax[v].set_ylabel(plotvars[v])
fig.suptitle(siteName, fontsize=35)
plt.gcf().autofmt_xdate()
plt.gcf().align_ylabels()
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.png',
bbox_inches='tight', dpi=500)

View File

View File

@ -0,0 +1,225 @@
## Wave transmission through a Breakwater
# Author: AJMR
# December 14, 2021
# %% Setup Project
from mikeio import Dfs0, Dfs1
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read in Point Sh
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
# %% Read in Breakwater crest
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Read Model Results
modelPlot = range(12, 20)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds = dfsIN_read.to_dataframe()
# %% Merge with dataframe
breakwaterPTS_times = None
for t in range(0, MIKEds.shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds.shape[1]):
paramName = MIKEds.columns.values[i][MIKEds.columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
# MIKEds_dict[paramName] = []
# MIKEid_dict[paramName] = []
tmpFID = int(MIKEds.columns.values[i][1:MIKEds.columns.values[i].find('"', 1)])
tmpIND = int(MIKEds.columns.values[i][5:MIKEds.columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds.iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds.index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakwaterPTS_times.rename(columns={'index_right': 'index_right_old'}, inplace=True)
# %% Conversion
breakwaterPTS_times['transmitted'] = np.full([breakwaterPTS_times.shape[0], 1], np.nan)
gamma = 1.2
transmit_paramA = 0.81 * gamma
transmit_paramB = 0.45 * gamma - 0.3 * gamma * \
(breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height']
# Identify different overtopping regimes
breakMask_A = (breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height'] >= 1.2
breakMask_B = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA < transmit_paramB))
breakMask_C = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA > transmit_paramB))
# Updated approach
breakwaterPTS_times['transmitted'][breakMask_A] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma
breakwaterPTS_times['transmitted'][breakMask_B] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\
transmit_paramA
breakwaterPTS_times['transmitted'][breakMask_C] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \
transmit_paramB[breakMask_C]
# %% Merge with breakwater transformed waves
breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max()
# %% Read Inner Boundary Points North
innerBoundNorth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundNorth.sort_values(by=['x'], inplace=True)
innerBoundNorth_gdf = gp.GeoDataFrame(innerBoundNorth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundNorth.x, innerBoundNorth.y))
innerBoundNorth_gdf = innerBoundNorth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points Mid
innerBoundMid = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mid.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundMid.sort_values(by=['x'], inplace=True)
innerBoundMid_gdf = gp.GeoDataFrame(innerBoundMid, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundMid.x, innerBoundMid.y))
innerBoundMid_gdf = innerBoundMid_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points North
innerBoundSouth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_South.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundSouth.sort_values(by=['x'], inplace=True)
innerBoundSouth_gdf = gp.GeoDataFrame(innerBoundSouth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundSouth.x, innerBoundSouth.y))
innerBoundSouth_gdf = innerBoundSouth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Save as DFS1
boundDataNorth = pd.DataFrame(
innerBoundNorth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataNorth.set_index('Time', inplace=True)
boundDataNorth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerNorth1.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataNorth['Significant wave height'], boundDataNorth['Significant wave height'])),
np.array((boundDataNorth['Peak wave period'], boundDataNorth['Peak wave period'])),
np.array((boundDataNorth['Mean wave direction'], boundDataNorth['Mean wave direction'])),
np.ones((2, boundDataNorth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
datetimes=None,
items=items,
title=None)
# %% Save as DFS1- Mid
boundDataMid = pd.DataFrame(
innerBoundMid_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataMid.set_index('Time', inplace=True)
boundDataMid.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerMid.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataMid['Significant wave height'], boundDataMid['Significant wave height'])),
np.array((boundDataMid['Peak wave period'], boundDataMid['Peak wave period'])),
np.array((boundDataMid['Mean wave direction'], boundDataMid['Mean wave direction'])),
np.ones((2, boundDataMid.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Save as DFS1- South
boundDataSouth = pd.DataFrame(
innerBoundSouth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataSouth.set_index('Time', inplace=True)
boundDataSouth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerSouth.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataSouth['Significant wave height'], boundDataSouth['Significant wave height'])),
np.array((boundDataSouth['Peak wave period'], boundDataSouth['Peak wave period'])),
np.array((boundDataSouth['Mean wave direction'], boundDataSouth['Mean wave direction'])),
np.ones((2, boundDataSouth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Test Plot
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
breakwaterPTS_merge.plot(ax=axes, column='Sign. Wave Height', cmap='viridis')
# breakwaterPTS_merge.plot(ax=axes, column='Crest', cmap='viridis', legend=True)
innerBound_gdf.plot(ax=axes, column='transmitted', cmap='viridis')
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
plt.show()

View File

@ -0,0 +1,257 @@
## Wave transmission through a Breakwater
# Author: AJMR
# December 14, 2021
# %% Setup Project
from mikeio import Dfs0, Dfs1
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read in Point Sh
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
# %% Read in Breakwater crest
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Modify Breakwater Crest to new elevation
# From email
breakSegments = np.zeros((4, 3))
breakSegments[0, :] = [2534702.7417, 372246.1099, 0] # 6+85
breakSegments[1, :] = [2535059.6902, 371628.1622, 180] # 14+00
breakSegments[2, :] = [2535910.4820, 370655.9834, 181.1] # 27+00
breakSegments[3, :] = [2536821.5682, 369722.4850, 180.5] # 40+05
breakSegments_gdf = gp.GeoDataFrame(breakSegments,
geometry=gp.points_from_xy(breakSegments[:, 0], breakSegments[:, 1]),
crs='EPSG:8796')
breakSegments_gdf.to_crs('EPSG:32616', inplace=True)
for idx in range(0, breakwaterPTS_Crest.shape[0]):
# Check where point is (based on x coordinate) and assign new crest elevation
if breakSegments_gdf.geometry.x[0] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[1]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[1, 2]
elif breakSegments_gdf.geometry.x[1] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[2]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[2, 2]
elif breakSegments_gdf.geometry.x[2] <= breakwaterPTS_Crest.geometry.x[idx] <= breakSegments_gdf.geometry.x[3]:
breakwaterPTS_Crest.loc[idx, 'Crest'] = breakSegments[3, 2]
# %% Read Model Results
modelPlot = range(31, 32)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds = dfsIN_read.to_dataframe()
# %% Merge with dataframe
breakwaterPTS_times = None
for t in range(0, MIKEds.shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds.shape[1]):
paramName = MIKEds.columns.values[i][MIKEds.columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
# MIKEds_dict[paramName] = []
# MIKEid_dict[paramName] = []
tmpFID = int(MIKEds.columns.values[i][1:MIKEds.columns.values[i].find('"', 1)])
tmpIND = int(MIKEds.columns.values[i][5:MIKEds.columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds.iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds.index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakwaterPTS_times.rename(columns={'index_right': 'index_right_old'}, inplace=True)
# %% Conversion
breakwaterPTS_times['transmitted'] = np.full([breakwaterPTS_times.shape[0], 1], np.nan)
gamma = 1.2
transmit_paramA = 0.81 * gamma
transmit_paramB = 0.45 * gamma - 0.3 * gamma * \
(breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height']
# Identify different overtopping regimes
breakMask_A = (breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height'] >= 1.2
breakMask_B = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA < transmit_paramB))
breakMask_C = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA > transmit_paramB))
# Updated approach
breakwaterPTS_times.loc[breakMask_A, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma
breakwaterPTS_times.loc[breakMask_B, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\
transmit_paramA
breakwaterPTS_times.loc[breakMask_C, 'transmitted'] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \
transmit_paramB[breakMask_C]
# %% Merge with breakwater transformed waves
breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max()
# %% Read Inner Boundary Points North
# innerBoundNorth = pd.read_csv(
# '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt',
# delimiter='\s+', header=None, names=['x', 'y'])
innerBoundNorth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mesh2_North.txt',
delimiter='\s+', header=None, names=['node', 'val', 'x', 'y'])
innerBoundNorth.sort_values(by=['x'], inplace=True)
innerBoundNorth_gdf = gp.GeoDataFrame(innerBoundNorth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundNorth.x, innerBoundNorth.y))
innerBoundNorth_gdf = innerBoundNorth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points Mid
# innerBoundMid = pd.read_csv(
# '//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mid.txt',
# delimiter='\s+', header=None, names=['x', 'y'])
innerBoundMid = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mesh2_Mid.txt',
delimiter='\s+', header=None, names=['node', 'val', 'x', 'y'])
innerBoundMid.sort_values(by=['x'], inplace=True)
innerBoundMid_gdf = gp.GeoDataFrame(innerBoundMid, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundMid.x, innerBoundMid.y))
innerBoundMid_gdf = innerBoundMid_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points South
innerBoundSouth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_South.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundSouth.sort_values(by=['x'], inplace=True)
innerBoundSouth_gdf = gp.GeoDataFrame(innerBoundSouth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundSouth.x, innerBoundSouth.y))
innerBoundSouth_gdf = innerBoundSouth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Save as DFS1
boundDataNorth = pd.DataFrame(
innerBoundNorth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataNorth.set_index('Time', inplace=True)
boundDataNorth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerNorth1.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataNorth['Significant wave height'], boundDataNorth['Significant wave height'])),
np.array((boundDataNorth['Peak wave period'], boundDataNorth['Peak wave period'])),
np.array((boundDataNorth['Mean wave direction'], boundDataNorth['Mean wave direction'])),
np.ones((2, boundDataNorth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
datetimes=None,
items=items,
title=None)
# %% Save as DFS1- Mid
boundDataMid = pd.DataFrame(
innerBoundMid_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataMid.set_index('Time', inplace=True)
boundDataMid.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerMid.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataMid['Significant wave height'], boundDataMid['Significant wave height'])),
np.array((boundDataMid['Peak wave period'], boundDataMid['Peak wave period'])),
np.array((boundDataMid['Mean wave direction'], boundDataMid['Mean wave direction'])),
np.ones((2, boundDataMid.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Save as DFS1- South
boundDataSouth = pd.DataFrame(
innerBoundSouth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataSouth.set_index('Time', inplace=True)
boundDataSouth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerSouth.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataSouth['Significant wave height'], boundDataSouth['Significant wave height'])),
np.array((boundDataSouth['Peak wave period'], boundDataSouth['Peak wave period'])),
np.array((boundDataSouth['Mean wave direction'], boundDataSouth['Mean wave direction'])),
np.ones((2, boundDataSouth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Test Plot
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
breakwaterPTS_merge.plot(ax=axes, column='Sign. Wave Height', cmap='viridis')
# breakwaterPTS_merge.plot(ax=axes, column='Crest', cmap='viridis', legend=True)
innerBound_gdf.plot(ax=axes, column='transmitted', cmap='viridis')
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
plt.show()