Jan 2022 Baird Python Update

This commit is contained in:
Alexander Rey 2022-01-10 10:31:09 -05:00
parent 531a5fa026
commit 0dea526cd3
38 changed files with 6657 additions and 180 deletions

8
EWR_D3D/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml
# Editor-based HTTP Client requests
/httpRequests/

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
EWR_D3D/.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (D3DFM)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/EWR_D3D.iml" filepath="$PROJECT_DIR$/.idea/EWR_D3D.iml" />
</modules>
</component>
</project>

6
EWR_D3D/.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

338
EWR_D3D/DFM_Sens.ipynb Normal file

File diff suppressed because one or more lines are too long

7
EWR_Data/.idea/other.xml Normal file
View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_SCI_VIEW" value="true" />
<option name="PY_SCI_VIEW_SUGGESTED" value="true" />
</component>
</project>

File diff suppressed because one or more lines are too long

View File

@ -4,7 +4,7 @@
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/data" />
</content>
<orderEntry type="jdk" jdkName="Python 3.8 (BairdBase)" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Python 3.8 (geopandas_forge_mustique)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (BairdBase)" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (geopandas_forge_mustique)" project-jdk-type="Python SDK" />
</project>

BIN
Mustique/20211001_00_000.nc Normal file

Binary file not shown.

108
Mustique/GFS_Import.py Normal file
View File

@ -0,0 +1,108 @@
#%% Script to read in GFS data at a specific point
# Author: Alexander Rey
# November 18, 2021
#%% Project Setup
import os
import sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
from siphon.catalog import TDSCatalog
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%% GFS Import
# List hours to download
hours = [0, 6, 12, 18]
# List variables to download
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
# Precipitation variable
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
# Setup lists for data
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
startdate = datetime(year=2021, month=10, day=1, hour=0, minute=0, second=0)
enddate = datetime(year=2021, month=10, day=7, hour=0, minute=0, second=0)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(-75.6972, 45.4215).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(0) #10 m
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
query_precp.lonlat_point(-75.6972, 45.4215).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
query_precp.vertical_level(0) #10 m
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Create a new figure
fig = plt.figure(figsize=(15, 12))
fig.patch.set_facecolor('white')
plt.plot([i.values[0] for i in time_1d], [i.m.item(0) for i in temp_1d])
plt.show()

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,765 @@
#%% Project Setup
import os
import pandas as pd
import geopandas as gp
from scipy.signal import argrelextrema
import numpy as np
import math
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.pyplot as plt
import matplotlib.font_manager as fm
import matplotlib as mpl
import cartopy.crs as ccrs
import contextily as ctx
import cmocean.cm as cmo
from shapely.geometry import Point, LineString
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
import matplotlib.pyplot as plt
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from metpy.plots import ctables
from siphon.catalog import TDSCatalog
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
importPaths = ['C:/Users/arey/files/Projects/West Coast/Pre_Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Pre_Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Great House/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Post_Construction/Old Queens Fort/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Great House/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Greensleeves/',
'C:/Users/arey/files/Projects/West Coast/Monitoring_Nov/Old Queens Fort/']
siteNames = ['Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves',
'Old Queens Fort']
timeLabels= ['Before Construction',
'Before Construction',
'Before Construction',
'During Construction',
'During Construction',
'During Construction',
'After Construction',
'After Construction',
'After Construction',
'Monitoring',
'Monitoring',
'Monitoring']
wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/holetown_mid_15m',
None,
None,
None]
for s in range(9,13):
## Define master import path
importPath = importPaths[s]
siteName = siteNames[s]
timeLabel = timeLabels[s]
importFiles = os.listdir(importPath)
# Initialize import variables
RBR_File = None
JFE_File = None
GPS_File = None
for i in range(0, len(importFiles)):
if '.xlsx' in importFiles[i] and 'Summary' not in importFiles[i]:
RBR_File = importFiles[i]
elif '_A.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
JFE_File = importFiles[i]
elif '.csv' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i]
#%% RBR Import Data
if RBR_File is not None:
RBR_Obs = pd.read_excel(importPath + RBR_File,
sheet_name='Data', skiprows=0, header=1)
#%% JFE Import Data
if JFE_File is not None:
JFE_Obs = pd.read_csv(importPath + JFE_File, skiprows=30)
#%% GPS Import Data
if GPS_File is not None:
GPS = pd.read_csv(importPath + GPS_File,
header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
GPS_gdf.set_index('DateTime', inplace=True)
# Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
else:
# Synthesize GPS data for great house
GPS_gdf = gp.read_file('C:/Users/arey/files/Projects/West Coast/GreatHousePath_R3.shp')
GPS_gdf['DateTime'] = pd.date_range(pd.to_datetime(RBR_Obs['Time'].iloc[0]),
pd.to_datetime(RBR_Obs['Time'].iloc[-1]),
periods=len(GPS_gdf)).values
GPS_gdf.set_index('DateTime', inplace=True)
#%% Read in site shapefile
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
#%% Merge GPS to RBR
# Process RBR into datetime
if RBR_File is not None:
RBR_Obs['DateTime'] = pd.to_datetime(RBR_Obs['Time'])
RBR_Obs.set_index('DateTime', inplace=True)
# Merge with GPS as dataframe
RBR_Obs_geo = pd.merge_asof(RBR_Obs, GPS_gdf,
left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('300s'))
RBR_Obs_geo = gp.GeoDataFrame(RBR_Obs_geo, geometry=RBR_Obs_geo.geometry, crs="EPSG:32621")
#%% Merge GPS to JFE
if JFE_File is not None:
# Process JFE into datetime
JFE_Obs['DateTime'] = pd.to_datetime(JFE_Obs['Date'])
JFE_Obs.set_index('DateTime', inplace=True)
# Merge with GPS as dataframe
JFE_Obs_geo = pd.merge_asof(JFE_Obs, GPS_gdf, left_index=True, right_index=True, direction='nearest', tolerance=pd.Timedelta('60s'))
JFE_Obs_geo = gp.GeoDataFrame(JFE_Obs_geo, geometry=JFE_Obs_geo.geometry, crs="EPSG:32621")
#%% Find and setup key plotting details
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
# Sat water
# mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckekcw3pn08am19qmqbhtq8sb/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
if siteName == 'Great House':
axXlim = (213210.7529575412, 213562.64172686986)
axYlim = (1464769.2243017585, 1465135.2219089477)
GFS_Lon = -59.6441
GFS_Lat = 13.2372
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[2, 1])
elif siteName == 'Greensleeves':
axXlim = (213269.99233348924, 213648.1643157148)
# axYlim = (1463378.1020314451, 1463843.5442048472)
axYlim = (1463378.1020314451, 1463950.5442048472)
GFS_Lon = -59.6428
GFS_Lat = 13.2289
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[1, 1])
elif siteName == 'Old Queens Fort':
axXlim = (213368.59866770002, 213745.6997016811)
axYlim = (1460192.707288096, 1460672.371780407)
GFS_Lon = -59.6419
GFS_Lat = 13.1960
RBR_Obs_geo['inArea'] = RBR_Obs_geo.within(siteShp.iloc[0, 1])
# Set min and max times using conductivity
# if JFE_File is None:
if RBR_Obs_geo['inArea'].any():
# First and last times from area in shapefile
minTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[0, 0]
maxTime = RBR_Obs_geo[RBR_Obs_geo['inArea']==True].iloc[-1, 0]
else:
# First and last times if no GPS data
minTime = RBR_Obs_geo.iloc[20, 0]
maxTime = RBR_Obs_geo.iloc[-20, 0]
# else:
# minTime = (RBR_Obs['Conductivity ']>5).idxmax()
# minTime = minTime + timedelta(seconds=30)
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
# maxTime = maxTime - timedelta(seconds=30)
# obsPeriod = maxTime-minTime
#
# if (obsPeriod.seconds<180) | (maxTime<minTime):
# minTime = ((RBR_Obs['Conductivity ']>5) & (RBR_Obs['Time']>(minTime+timedelta(seconds=180)))).idxmax()
# minTime = minTime + timedelta(seconds=30)
# maxTime = ((RBR_Obs['Conductivity ']<5) & (RBR_Obs['Time']>minTime)).idxmax()
# maxTime = maxTime - timedelta(seconds=30)
metDate = minTime - timedelta(
hours = minTime.hour % 6,
minutes=minTime.minute,
seconds=minTime.second,
microseconds=minTime.microsecond)
#%% GFS Met Import
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
startdate = metDate - timedelta(hours=18)
enddate = metDate + timedelta(hours=6)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(10)
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Process Met Data
# 24h Precipitation Total
# Time weighted average of last two points for everything else
met_prep = prep_1d[0] + prep_1d[1] + prep_1d[2] + prep_1d[3]
timeWeight1 = minTime-metDate
timeWeight2 = (metDate+timedelta(hours=6))-minTime
timeWeight1 = timeWeight1.seconds/21600
timeWeight2 = timeWeight2.seconds/21600
met_gust = gust_1d[3] * timeWeight1 + gust_1d[4] * timeWeight2
met_temp = temp_1d[3] * timeWeight1 + temp_1d[4] * timeWeight2
met_wind = math.sqrt((wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2) ** 2 +
(wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2) **2 )
met_wdir = math.degrees(math.atan2(
wndv_1d[3].m.item(0) * timeWeight1 + wndv_1d[4].m.item(0)* timeWeight2,
wndu_1d[3].m.item(0) * timeWeight1 + wndu_1d[4].m.item(0)* timeWeight2)) % 360
#%% Read in wave conditions from BTS file
if wave_bts_file[s] is not None:
if siteName == 'Great House':
wave_bts = pd.read_csv(wave_bts_file[s],
names=['date', 'time', 'HM0', 'TP', 'TM', 'MWD', 'DPK', 'HSWL', 'TSWL', 'DPSWL', 'HSEA', 'TSEA', 'DPSEA'],
header=0, skiprows=22, delim_whitespace=True)
wave_bts['datetime'] = pd.to_datetime(wave_bts['date'] + ' ' + wave_bts['time'])
wave_bts.set_index('datetime', inplace=True)
met_hmo = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].HM0
met_tp = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].TP
met_mwd = wave_bts.iloc[wave_bts.index.get_loc(minTime, method='nearest'), :].MWD
else:
met_hmo = -999
met_tp = -999
met_mwd = -999
#%% Plot time series for Geo data
fontprops = fm.FontProperties(size=25)
if JFE_File is None:
fig, axesTMP = plt.subplots(nrows=1, ncols=1, figsize=(19, 5), constrained_layout=True)
RBRparam = ['Turbidity ']
RBRparamName = ['Turbidity [NTU]']
RBRparmCmap = [cmo.turbid]
RBRparamMin = [0.0]
RBRparamMax = [60.0]
dataTable = np.zeros([9, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 1, 1]
axes = []
axes.append(axesTMP)
else:
fig, axes = plt.subplots(nrows=7, ncols=1, figsize=(19, 25), constrained_layout=True)
dataTable = np.zeros([15, 4])
roundIDX = [1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1]
RBRparam = ['Depth ', 'Salinity ', 'Dissolved O₂ saturation ', 'Temperature ']
RBRparamName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]']
RBRparmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal]
RBRparamMin = [0.0, 34.0, 32.5, 29.0]
RBRparamMax = [1.0, 36.0, 34.0, 31.0]
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]', 'Chl-a[ug/l]']
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]']
JFEparamMin = [0.0, 0.0, 0.0]
JFEparamMax = [20.0, 1.0, 1.0]
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
dataTable[0, 0] = met_temp.m.item(0)-272.15
dataTable[1, 0] = met_wind
dataTable[2, 0] = met_wdir
dataTable[3, 0] = met_prep.m.item(0)
dataTable[4, 0] = met_hmo
dataTable[5, 0] = met_tp
dataTable[6, 0] = met_mwd
ilocs_max = []
ilocs_max_pts = []
RBR_mask = []
JFE_mask = []
for paramIDX, param in enumerate(RBRparam):
RBR_Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX], label='1 Second Observations', color='lightgrey') # Note the space in the col name
# Create mask for RBR data based on time and parameter minimum
RBR_mask.append(((RBR_Obs_geo.index>minTime) &
(RBR_Obs_geo.index<maxTime) &
(RBR_Obs_geo[param]>RBRparamMin[paramIDX])))
RBR_smoothed = RBR_Obs_geo.loc[RBR_mask[paramIDX], param].rolling(
60, win_type='nuttall',center=True).mean()
RBR_smoothed.plot(
ax=axes[paramIDX], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'Turbidity ':
ilocs_max.append(argrelextrema(RBR_smoothed.values,
np.greater_equal, order=40, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(RBR_smoothed.values)-10
ilocs_max_pts.append(RBR_smoothed.iloc[ilocs_max[paramIDX]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
axes[paramIDX].scatter(RBR_smoothed.iloc[
ilocs_max[paramIDX]].index, np.ones(len(ilocs_max[paramIDX])) * 30, 75,
color='blue')
for a in range(0, len(ilocs_max[paramIDX])):
axes[paramIDX].annotate(str(a+1), (ilocs_max_pts[paramIDX][a], 30), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
# dataTable[paramIDX+7, 0] = RBR_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
# dataTable[paramIDX+7, 1] = RBR_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
# dataTable[paramIDX+7, 2] = max(RBR_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
# dataTable[paramIDX+7, 3] = RBR_Obs_geo.loc[minTime:maxTime, param].max(skipna=True)
dataTable[paramIDX+7, 0] = RBR_smoothed.mean(skipna=True)
dataTable[paramIDX+7, 1] = RBR_smoothed.std(skipna=True)
dataTable[paramIDX+7, 2] = max(RBR_smoothed.min(skipna=True), 0)
dataTable[paramIDX+7, 3] = RBR_smoothed.max(skipna=True)
axes[paramIDX].set_ylabel(RBRparamName[paramIDX])
axes[paramIDX].set_title('RBR: ' + RBRparamName[paramIDX])
axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(RBRparamMin[paramIDX], RBRparamMax[paramIDX])
axes[paramIDX].legend(loc='upper right')
if JFE_File is not None:
for paramIDX, param in enumerate(JFEparam):
JFE_Obs_geo.loc[minTime:maxTime, param].plot(
ax=axes[paramIDX+4], label='15 Second Observations', color='lightgrey')
JFE_mask.append(((JFE_Obs_geo.index > minTime) &
(JFE_Obs_geo.index < maxTime) &
(JFE_Obs_geo[param] > JFEparamMin[paramIDX])))
JFE_smoothed = JFE_Obs_geo.loc[JFE_mask[paramIDX], param].rolling(
20, win_type='nuttall', center=True).mean()
JFE_smoothed.plot(
ax=axes[paramIDX+4], label='1 Minute Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'Turb. -M[FTU]':
ilocs_max.append(argrelextrema(JFE_smoothed.values,
np.greater_equal, order=60, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
# ilocs_max[-1] = len(RBR_smoothed.values)-10
ilocs_max_pts.append(JFE_smoothed.iloc[ilocs_max[paramIDX+4]].index.values)
# Add labels if GPS data is available
if GPS_File is not None:
axes[paramIDX+4].scatter(JFE_smoothed.iloc[
ilocs_max[paramIDX+4]].index, np.ones(len(ilocs_max[paramIDX+4])) * 10, 75,
color='blue')
for a in range(0, len(ilocs_max[paramIDX+4])):
axes[paramIDX+4].annotate(str(a + 1), (ilocs_max_pts[paramIDX+4][a], 10), fontsize=30)
else:
ilocs_max.append(None)
ilocs_max_pts.append(None)
# dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
# dataTable[paramIDX+4+7, 1] = JFE_Obs_geo.loc[minTime:maxTime, param].std(skipna=True)
# dataTable[paramIDX+4+7, 2] = max(JFE_Obs_geo.loc[minTime:maxTime, param].min(skipna=True), 0)
# dataTable[paramIDX+4+7, 3] = JFE_Obs_geo.loc[minTime:maxTime, param].max(skipna=True) dataTable[paramIDX+4+7, 0] = JFE_Obs_geo.loc[minTime:maxTime, param].mean(skipna=True)
dataTable[paramIDX+4+7, 0] = JFE_smoothed.mean(skipna=True)
dataTable[paramIDX+4+7, 1] = JFE_smoothed.std(skipna=True)
dataTable[paramIDX+4+7, 2] = max(JFE_smoothed.min(skipna=True), 0)
dataTable[paramIDX+4+7, 3] = JFE_smoothed.max(skipna=True)
axes[paramIDX+4].set_ylabel(JFEparamName[paramIDX])
axes[paramIDX+4].set_title('JFE: ' + JFEparamName[paramIDX])
axes[paramIDX+4].set_xlabel('')
axes[paramIDX+4].set_ylim(JFEparamMin[paramIDX], JFEparamMax[paramIDX])
axes[paramIDX+4].legend(loc='upper right')
axes[paramIDX+4].set_xlabel(minTime.strftime('%B %#d, %Y'))
else:
axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
# Formate Data Table
dataTableFormat_mean = []
dataTableFormat_maxmin = []
for d in range(0, len(roundIDX)):
dataTableFormat_mean.append(round(dataTable[d, 0], roundIDX[d]))
if dataTable[d, 3] == 0:
dataTableFormat_maxmin.append('--')
else:
dataTableFormat_maxmin.append(str(round(dataTable[d, 2], roundIDX[d])) + ' / ' + str(round(dataTable[d, 3], roundIDX[d])))
dfOut = pd.DataFrame(dataTable[:, :])
dfOutFormat = pd.DataFrame([dataTableFormat_mean, dataTableFormat_maxmin]).transpose()
# Change the column names
dfOut.columns =['Mean', 'Standard Deviation', 'Min', 'Max']
dfOutFormat.columns = [np.array([minTime.strftime('%B %#d, %Y'), minTime.strftime('%B %#d, %Y')]),
np.array(['Mean', 'Min / Max'])]
# Change the row indexes
if JFE_File is not None:
dfOut.iloc[14, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]', 'Depth [m]', 'Salinity [PSU]',
'Dissolved O₂ saturation [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-Flu. [ppb]', 'Chl-a [ug/l]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
else:
dfOut.iloc[8, 0] = minTime.strftime('%B %#d, %Y')
rowNames = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height Offshore [m]' ,'Peak Wave Period Offshore [s]',
'Mean Wave Direction Offshore [deg]',
'Turbidity [NTU]', 'Observation Date']
dfOut.index = rowNames
dfOutFormat.index = rowNames[0:-1]
fig.suptitle(siteName + ', ' +minTime.strftime('%B %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
dfOut.to_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
dfOutFormat.to_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
dfOut.to_csv(importPath + '/Summary_Stats_' + siteName + '.csv')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Plot Maps
if JFE_File is None:
# Only Turbidity Data
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
ax = []
ax.append(axes)
else:
fig, axes = plt.subplots(nrows=3, ncols=2, figsize=(19, 25), constrained_layout=True)
ax = axes.flat
fig.patch.set_facecolor('white')
# fig.tight_layout(pad=1.05)
fontprops = fm.FontProperties(size=25)
x, y, arrow_length = 0.95, 0.93, 0.20
plt.rcParams.update({'font.size': 22})
axXlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.x.min()-100,
RBR_Obs_geo.loc[minTime:maxTime].geometry.x.max()+100)
axYlimTT = (RBR_Obs_geo.loc[minTime:maxTime].geometry.y.min()-100,
RBR_Obs_geo.loc[minTime:maxTime].geometry.y.max()+100)
plt.setp(axes, xlim=axXlim, ylim=axYlim)
# plt.setp(axes, xlim=axXlimTT, ylim=axYlimTT)
# Plot the RBR observations
# Salinity
for paramIDX, param in enumerate(RBRparam):
if RBR_File is not None:
# Draw thick black line to show approx path
if GPS_File is None:
ax[paramIDX].scatter(RBR_Obs_geo.loc[minTime:maxTime].geometry.x,
RBR_Obs_geo.loc[minTime:maxTime].geometry.y,
150, marker='o', color='black', label='Approximate Path')
plt.legend(loc='upper left')
RBR_Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX], vmin=RBRparamMin[paramIDX], vmax=RBRparamMax[paramIDX],
legend=True, legend_kwds={'label': RBRparamName[paramIDX]},
cmap=RBRparmCmap[paramIDX], markersize=20)
ctx.add_basemap(ax[paramIDX], source=mapbox, crs='EPSG:32621')
# Add time labels
# plt.scatter(RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.x,
# RBR_Obs_geo.loc[RBR_mask].iloc[
# ilocs_max, :].geometry.y, 75, marker='o', color='black')
if (not RBR_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turbidity '):
for a in range(0, len(ilocs_max[paramIDX])):
ax[paramIDX].annotate(str(a + 1), (RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.x,
RBR_Obs_geo.loc[RBR_mask[paramIDX]].iloc[
ilocs_max[paramIDX][a], :].geometry.y), fontsize=30)
ax[paramIDX].set_title(RBRparamName[paramIDX])
# ax[paramIDX].set_ylabel('UTM 21N [m]')
# ax[paramIDX].set_xlabel('UTM 21N [m]')
ax[paramIDX].locator_params(axis='y', nbins=3)
ax[paramIDX].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX].get_xaxis().set_ticks([])
ax[paramIDX].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX].add_artist(scalebar)
ax[paramIDX].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes)
# Plot Plot JFE Points
JFEparam = ['Turb. -M[FTU]', 'Chl-Flu.[ppb]']
JFEparamName = ['Turbidity [FTU]', 'Chl-Flu. [ppb]']
JFEparamCmp = [cmo.turbid, cmo.algae]
JFEparamMin = [0.0, 0.0]
JFEparamMax = [10.0, 1.0]
if JFE_File is not None:
for paramIDX, param in enumerate(JFEparam):
if JFE_File is not None:
JFE_Obs_geo.loc[minTime:maxTime].plot(
column=param, ax=ax[paramIDX+4], vmin=JFEparamMin[paramIDX], vmax=JFEparamMax[paramIDX],
legend=True, legend_kwds={'label': JFEparamName[paramIDX]},
cmap=JFEparamCmp[paramIDX], markersize=20) # Note the space in the col name
ctx.add_basemap(ax[paramIDX+4], source=mapbox, crs='EPSG:32621')
# Add time labels
if (not JFE_Obs_geo.geometry.isnull().all()) & (GPS_File is not None) & (param == 'Turb. -M[FTU]'):
for a in range(0, len(ilocs_max[paramIDX+4])):
ax[paramIDX+4].annotate(str(a + 1), (JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
ilocs_max[paramIDX+4][a], :].geometry.x,
JFE_Obs_geo.loc[JFE_mask[paramIDX]].iloc[
ilocs_max[paramIDX+4][a], :].geometry.y), fontsize=30)
ax[paramIDX+4].set_title(JFEparamName[paramIDX])
# ax[paramIDX+4].set_ylabel('UTM 21N [m]')
# ax[paramIDX+4].set_xlabel('UTM 21N [m]')
ax[paramIDX+4].locator_params(axis='y', nbins=3)
ax[paramIDX+4].ticklabel_format(useOffset=False, style='plain', axis='both')
ax[paramIDX+4].get_xaxis().set_ticks([])
ax[paramIDX+4].get_yaxis().set_ticks([])
#Add scale-bar
scalebar = AnchoredSizeBar(ax[paramIDX+4].transData,
100, '100 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
ax[paramIDX+4].add_artist(scalebar)
ax[paramIDX+4].annotate('N', xy=(x, y), xytext=(x, y-arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=30),
ha='center', va='center', fontsize=25,
xycoords=ax[paramIDX+4].transAxes)
fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
plt.show()
if not os.path.exists(importPath + '/Figures'):
os.mkdir(importPath + '/Figures')
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.png',
bbox_inches='tight', dpi=500)
#%% Summary Sheet
plotIDXsLoop = []
plotIDXsLoop.append([0, 3, 6, 9])
plotIDXsLoop.append([1, 4, 7, 10])
plotIDXsLoop.append([2, 5, 8, 11])
for i in range(0, 3):
summTable = None
plotIDXs = plotIDXsLoop[i]
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx', header=[0,1], index_col=0)
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
obsStatsIN.rename({'Turbidity [NTU]': 'Turbidity [FTU]'}, inplace=True)
if s == 0:
summTable = obsStatsIN
else:
summTable = summTable.join(obsStatsIN)
# Remove -999 with nan
summTable.replace(-999, np.nan, inplace=True)
summTable.to_excel('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/Summary_StatsMerge_' + siteName + '.xlsx')
#%% Summary Plot
plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]', '24h Precipitation [mm]',
'Significant Wave Height [m]', 'Salinity [PSU]',
'Dissolved O₂ [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-Flu. [ppb]']
plotIDXsLoop = []
plotIDXsLoop.append([0, 3, 6, 9])
plotIDXsLoop.append([1, 4, 7, 10])
plotIDXsLoop.append([2, 5, 8, 11])
for i in range(0, 3):
summTable = None
plotIDXs = plotIDXsLoop[i]
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
importPath = importPaths[plotIDX]
siteName = siteNames[plotIDX]
obsStatsIN = pd.read_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx')
if any((plotIDX == 9, plotIDX == 10, plotIDX == 11)):
plotTable[0:3, s] = obsStatsIN.iloc[0:3, 1]
plotTable[8, s] = obsStatsIN.iloc[7, 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[8, 1], '%B %d, %Y'))
else:
plotTable[:, s] = obsStatsIN.iloc[[0, 1, 2, 3, 4, 8, 9, 10, 11, 12], 1]
plotDates.append(datetime.strptime(obsStatsIN.iloc[14, 1], '%B %d, %Y'))
fig, axes = plt.subplots(nrows=5, ncols=2, figsize=(19, 25), sharex=True)
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# Repalce zero with nan
plotTable[plotTable < 0.00001] = np.nan
for v in range(0, 10):
ax[v].scatter(plotDates, plotTable[v, :], 250)
ax[v].set_ylabel(plotvars[v])
fig.suptitle(siteName, fontsize=35)
plt.gcf().autofmt_xdate()
plt.gcf().align_ylabels()
plt.show()
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.pdf',
bbox_inches='tight')
fig.savefig('//srv-ott3.baird.com/Projects/13033.201 Great House - Coastal Structures/05_Analyses/01_WQ Monitoring/CombinedStats/' + siteName + '.png',
bbox_inches='tight', dpi=500)

View File

@ -22,7 +22,7 @@ import os
from shapely.geometry import Point
import pickle
# %% read in data
# %% Read in data
dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/'
df = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_mobile_2013_05_02.xlsx',

File diff suppressed because one or more lines are too long

8
SouthShore/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.8 (geopandas_forge)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="renderExternalDocumentation" value="true" />
</component>
</module>

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (geopandas_forge)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/SouthShore.iml" filepath="$PROJECT_DIR$/.idea/SouthShore.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_SCI_VIEW" value="true" />
<option name="PY_SCI_VIEW_SUGGESTED" value="true" />
</component>
</project>

6
SouthShore/.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@ -0,0 +1,43 @@
## Wave transmission through a Breakwater
# Author: AJMR
# December 14, 2021
#%% Setup Project
from mikeio import Dfs0
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
#%% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models", "13632.101.W.zzAJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
#%% Read in Point Sh
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
#%% Read Model Results
modelPlot = [8]
# MIKEds = [None] * (max(modelPlot)+1)
for i in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][i],
runLog['Run Name'][i] + '.sw - Result Files','BreakPts.dfs0'))
dfsIN_read = dfsIN.read()
A = dfsIN_read.to_dataframe()
MIKEnp = np.empty((dfsIN_read.n_timesteps, dfsIN_read.shape[1], dfsIN_read.n_items-1))
MIKEname = []
for j in range(0, dfsIN_read.n_items-1):
MIKEname.append(dfsIN_read.items[j].name)
MIKEnp[:, :, j] = dfsIN_read[j]
MIKEds = pd.DataFrame(np.squeeze(MIKEnp[1:, :, :]), columns=MIKEname)

View File

@ -0,0 +1,242 @@
## Wave transmission through a Breakwater
# Author: AJMR
# December 14, 2021
# %% Setup Project
from mikeio import Dfs0, Dfs1
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.zzAJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read in Point Sh
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
# %% Read in Breakwater crest
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Read Model Results
modelPlot = [8]
for i in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][i],
runLog['Run Name'][i] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds = dfsIN_read.to_dataframe()
# %% Merge with dataframe
breakwaterPTS_times = None
for t in range(0, MIKEds.shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds.shape[1]):
paramName = MIKEds.columns.values[i][MIKEds.columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
# MIKEds_dict[paramName] = []
# MIKEid_dict[paramName] = []
tmpFID = int(MIKEds.columns.values[i][1:MIKEds.columns.values[i].find('"', 1)])
tmpIND = int(MIKEds.columns.values[i][5:MIKEds.columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds.iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds.index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakwaterPTS_times.rename(columns={'index_right': 'index_right_old'}, inplace=True)
# %% Conversion
breakwaterPTS_times['transmitted'] = np.full([breakwaterPTS_times.shape[0], 1], np.nan)
gamma = 1.2
transmit_paramA = 0.81 * gamma
transmit_paramB = 0.45 * gamma - 0.3 * gamma * \
(breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height']
# Identify different overtopping regimes
breakMask_A = (breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height'] >= 1.2
breakMask_B = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA < transmit_paramB))
breakMask_C = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA > transmit_paramB))
# breakMask_B = (breakwaterPTS_times['Crest']-breakwaterPTS_times['Surface elevation'])/ \
# breakwaterPTS_times['Sign. Wave Height']<=-1.2
# breakMask_C = (((breakwaterPTS_times['Crest']-breakwaterPTS_times['Surface elevation'])/
# breakwaterPTS_times['Sign. Wave Height']>=-1.2) &
# ((breakwaterPTS_times['Crest']-breakwaterPTS_times['Surface elevation'])/
# breakwaterPTS_times['Sign. Wave Height']<1.2))
# breakwaterPTS_times['transmitted'][breakMask_A] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A]* 0.1
# breakwaterPTS_times['transmitted'][breakMask_B] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B]* 0.82
# breakwaterPTS_times['transmitted'][breakMask_C] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C]* \
# (0.46-0.3*(breakwaterPTS_times['Crest'][breakMask_C]
# -breakwaterPTS_times['Surface elevation'][breakMask_C])/
# breakwaterPTS_times['Sign. Wave Height'][breakMask_C])
# Updated approach
breakwaterPTS_times['transmitted'][breakMask_A] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma
breakwaterPTS_times['transmitted'][breakMask_B] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\
transmit_paramA
breakwaterPTS_times['transmitted'][breakMask_C] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \
transmit_paramB[breakMask_C]
# %% Merge with breakwater transformed waves
breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max()
# innerBound_gdf = innerBound_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end,:])
# %% Read Inner Boundary Points North
innerBoundNorth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundNorth.sort_values(by=['x'], inplace=True)
innerBoundNorth_gdf = gp.GeoDataFrame(innerBoundNorth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundNorth.x, innerBoundNorth.y))
innerBoundNorth_gdf = innerBoundNorth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points Mid
innerBoundMid = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mid.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundMid.sort_values(by=['x'], inplace=True)
innerBoundMid_gdf = gp.GeoDataFrame(innerBoundMid, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundMid.x, innerBoundMid.y))
innerBoundMid_gdf = innerBoundMid_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points North
innerBoundSouth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_South.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundSouth.sort_values(by=['x'], inplace=True)
innerBoundSouth_gdf = gp.GeoDataFrame(innerBoundSouth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundSouth.x, innerBoundSouth.y))
innerBoundSouth_gdf = innerBoundSouth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Save as DFS1
boundDataNorth = pd.DataFrame(
innerBoundNorth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataNorth.set_index('Time', inplace=True)
boundDataNorth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][modelPlot[0]] + 'ProdInnerNorth1.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataNorth['Significant wave height'], boundDataNorth['Significant wave height'])),
np.array((boundDataNorth['Peak wave period'], boundDataNorth['Peak wave period'])),
np.array((boundDataNorth['Mean wave direction'], boundDataNorth['Mean wave direction'])),
np.ones((2, boundDataNorth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
datetimes=None,
items=items,
title=None)
# %% Save as DFS1- Mid
boundDataMid = pd.DataFrame(
innerBoundMid_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataMid.set_index('Time', inplace=True)
boundDataMid.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][modelPlot[0]] + 'ProdInnerMid.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataMid['Significant wave height'], boundDataMid['Significant wave height'])),
np.array((boundDataMid['Peak wave period'], boundDataMid['Peak wave period'])),
np.array((boundDataMid['Mean wave direction'], boundDataMid['Mean wave direction'])),
np.ones((2, boundDataMid.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Save as DFS1- South
boundDataSouth = pd.DataFrame(
innerBoundSouth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataSouth.set_index('Time', inplace=True)
boundDataSouth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][modelPlot[0]] + 'ProdInnerSouth.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataSouth['Significant wave height'], boundDataSouth['Significant wave height'])),
np.array((boundDataSouth['Peak wave period'], boundDataSouth['Peak wave period'])),
np.array((boundDataSouth['Mean wave direction'], boundDataSouth['Mean wave direction'])),
np.ones((2, boundDataSouth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Test Plot
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
breakwaterPTS_merge.plot(ax=axes, column='Sign. Wave Height', cmap='viridis')
# breakwaterPTS_merge.plot(ax=axes, column='Crest', cmap='viridis', legend=True)
innerBound_gdf.plot(ax=axes, column='transmitted', cmap='viridis')
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
plt.show()

View File

@ -0,0 +1,225 @@
## Wave transmission through a Breakwater
# Author: AJMR
# December 14, 2021
# %% Setup Project
from mikeio import Dfs0, Dfs1
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read in Point Sh
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
# %% Read in Breakwater crest
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Read Model Results
modelPlot = range(12, 20)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds = dfsIN_read.to_dataframe()
# %% Merge with dataframe
breakwaterPTS_times = None
for t in range(0, MIKEds.shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds.shape[1]):
paramName = MIKEds.columns.values[i][MIKEds.columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
# MIKEds_dict[paramName] = []
# MIKEid_dict[paramName] = []
tmpFID = int(MIKEds.columns.values[i][1:MIKEds.columns.values[i].find('"', 1)])
tmpIND = int(MIKEds.columns.values[i][5:MIKEds.columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds.iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds.index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakwaterPTS_times.rename(columns={'index_right': 'index_right_old'}, inplace=True)
# %% Conversion
breakwaterPTS_times['transmitted'] = np.full([breakwaterPTS_times.shape[0], 1], np.nan)
gamma = 1.2
transmit_paramA = 0.81 * gamma
transmit_paramB = 0.45 * gamma - 0.3 * gamma * \
(breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height']
# Identify different overtopping regimes
breakMask_A = (breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) / \
breakwaterPTS_times['Sign. Wave Height'] >= 1.2
breakMask_B = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA < transmit_paramB))
breakMask_C = (((breakwaterPTS_times['Crest'] - breakwaterPTS_times['Surface elevation']) /
breakwaterPTS_times['Sign. Wave Height'] < 1.2) &
(transmit_paramA > transmit_paramB))
# Updated approach
breakwaterPTS_times['transmitted'][breakMask_A] = breakwaterPTS_times['Sign. Wave Height'][breakMask_A] * 0.09 * gamma
breakwaterPTS_times['transmitted'][breakMask_B] = breakwaterPTS_times['Sign. Wave Height'][breakMask_B] *\
transmit_paramA
breakwaterPTS_times['transmitted'][breakMask_C] = breakwaterPTS_times['Sign. Wave Height'][breakMask_C] * \
transmit_paramB[breakMask_C]
# %% Merge with breakwater transformed waves
breakwaterPTS_end = breakwaterPTS_times['Time'] == breakwaterPTS_times['Time'].max()
# %% Read Inner Boundary Points North
innerBoundNorth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_North.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundNorth.sort_values(by=['x'], inplace=True)
innerBoundNorth_gdf = gp.GeoDataFrame(innerBoundNorth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundNorth.x, innerBoundNorth.y))
innerBoundNorth_gdf = innerBoundNorth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points Mid
innerBoundMid = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_Mid.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundMid.sort_values(by=['x'], inplace=True)
innerBoundMid_gdf = gp.GeoDataFrame(innerBoundMid, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundMid.x, innerBoundMid.y))
innerBoundMid_gdf = innerBoundMid_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Read Inner Boundary Points North
innerBoundSouth = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/InnerBound_South.txt',
delimiter='\s+', header=None, names=['x', 'y'])
innerBoundSouth.sort_values(by=['x'], inplace=True)
innerBoundSouth_gdf = gp.GeoDataFrame(innerBoundSouth, crs='EPSG:32616',
geometry=gp.points_from_xy(innerBoundSouth.x, innerBoundSouth.y))
innerBoundSouth_gdf = innerBoundSouth_gdf.sjoin_nearest(breakwaterPTS_times.loc[breakwaterPTS_end, :])
# %% Save as DFS1
boundDataNorth = pd.DataFrame(
innerBoundNorth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataNorth.set_index('Time', inplace=True)
boundDataNorth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerNorth1.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataNorth['Significant wave height'], boundDataNorth['Significant wave height'])),
np.array((boundDataNorth['Peak wave period'], boundDataNorth['Peak wave period'])),
np.array((boundDataNorth['Mean wave direction'], boundDataNorth['Mean wave direction'])),
np.ones((2, boundDataNorth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
datetimes=None,
items=items,
title=None)
# %% Save as DFS1- Mid
boundDataMid = pd.DataFrame(
innerBoundMid_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataMid.set_index('Time', inplace=True)
boundDataMid.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerMid.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataMid['Significant wave height'], boundDataMid['Significant wave height'])),
np.array((boundDataMid['Peak wave period'], boundDataMid['Peak wave period'])),
np.array((boundDataMid['Mean wave direction'], boundDataMid['Mean wave direction'])),
np.ones((2, boundDataMid.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Save as DFS1- South
boundDataSouth = pd.DataFrame(
innerBoundSouth_gdf.loc[:, ['Time', 'transmitted', 'Peak Wave Period', 'Mean Wave Direction']])
boundDataSouth.set_index('Time', inplace=True)
boundDataSouth.rename(columns={'transmitted': 'Significant wave height', 'Peak Wave Period': 'Peak wave period',
'Mean Wave Direction': 'Mean wave direction'},
inplace=True)
dfsfilename = '//oak-spillway.baird.com/F/13632.101 South Shore Breakwater/SetupFiles/InnerBounds/' + \
runLog['Run Name'][m] + 'ProdInnerSouth.dfs1'
items = [ItemInfo("Significant wave height", EUMType.Significant_wave_height, EUMUnit.meter),
ItemInfo("Peak wave period", EUMType.Wave_period, EUMUnit.second),
ItemInfo("Mean wave direction", EUMType.Mean_Wave_Direction, EUMUnit.degree),
ItemInfo("Directional Standard Deviation, n", EUMType.Directional_Standard_Deviation, EUMUnit.degree)]
dfs = Dfs1()
dfs.write(filename=dfsfilename,
data=[np.array((boundDataSouth['Significant wave height'], boundDataSouth['Significant wave height'])),
np.array((boundDataSouth['Peak wave period'], boundDataSouth['Peak wave period'])),
np.array((boundDataSouth['Mean wave direction'], boundDataSouth['Mean wave direction'])),
np.ones((2, boundDataSouth.shape[0])) * 23.28],
start_time=breakwaterPTS_times['Time'].min(),
dt=86400,
items=items)
# %% Test Plot
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 9), constrained_layout=True)
breakwaterPTS_merge.plot(ax=axes, column='Sign. Wave Height', cmap='viridis')
# breakwaterPTS_merge.plot(ax=axes, column='Crest', cmap='viridis', legend=True)
innerBound_gdf.plot(ax=axes, column='transmitted', cmap='viridis')
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
plt.show()

View File

@ -0,0 +1,221 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Model Results
modelPlot = range(28, 29)
hmo_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'ResultsMap.dfsu').as_posix())
# Read Mesh for first model
mikeMeshIN = dfsIN.to_shapely()
# Convert to list of polygons
mikeMeshIN_list = [p for p in mikeMeshIN]
ds = dfsIN.read()
hm0 = ds["Sign. Wave Height"][1]
# Add to list
hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list)
# Cleanup unnecessary variables
del mikeMeshIN
del mikeMeshIN_list
del hm0
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
for m in modelPlot:
fig, axes = plt.subplots(figsize=(6, 6))
# centroids = hmo_list[m].representative_point()
# hmo_plot = gp.GeoDataFrame.set_geometry(hmo_list[m], centroids)
# hmo_plot = hmo_plot.set_crs(crs='EPSG:32616')
# hmo_plot = hmo_plot.to_crs(epsg=4326)
# hmo_list[m].plot(ax=axes, column=0, antialiased=False, scheme='equal_interval', k=12, cmap='turbo',
# legend=True, legend_kwds={'fmt': "{:.2f}"})
dfsIN.plot(hm0, plot_type='contourf', show_mesh=False, cmap='turbo', ax=axes, levels=12, label='HM0 [m]')
# axes2 = gplt.polyplot(hmo_list[m], projection=gcrs.AlbersEqualArea)
# gplt.kdeplot(hmo_plot, cmap='turbo', shade=True, ax=axes)
# hmo_plot.plot(ax=axes, column=0, antialiased=False)
# # Add in scale bar and North Arrow
# scalebar = AnchoredSizeBar(axes.transData,
# 500, '500 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
# axes.add_artist(scalebar)
axes.annotate('N', xy=(x, y), xytext=(x, y - arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=20),
ha='center', va='center', fontsize=15,
xycoords=axes.transAxes)
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
plt.suptitle(runLog['Short Description'][m])
axes.title.set_text('Significant Wave Height (m)')
axes.titlesize = 'small'
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Images/Production/' +
runLog['Short Description'][m] + '_HM0.png', bbox_inches='tight')
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
#%% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')

View File

@ -0,0 +1,220 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "StormRuns")
# %% Read Model Results
modelPlot = range(0, 4)
hmo_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'ResultsMap.dfsu').as_posix())
# Read Mesh for first model
mikeMeshIN = dfsIN.to_shapely()
# Convert to list of polygons
mikeMeshIN_list = [p for p in mikeMeshIN]
ds = dfsIN.read()
hm0 = ds["Sign. Wave Height"]
# Add to list
hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list)
print(m)
# Cleanup unnecessary variables
del mikeMeshIN
del mikeMeshIN_list
del hm0
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
for m in modelPlot:
fig, axes = plt.subplots(figsize=(6, 6))
plt.xlim([427800, 431000])
plt.ylim([4758000, 4762000])
hmo_list[m].plot(ax=axes, column=0, antialiased=False, scheme='equal_interval', k=7, cmap='turbo',
legend=True, legend_kwds={'fmt': "{:.2f}"}, vmax=6, vmin=0)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
plt.suptitle(runLog['Short Description'][m])
axes.title.set_text('Significant Wave Height (m)')
axes.titlesize = 'small'
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
# # Add in scale bar and North Arrow
# scalebar = AnchoredSizeBar(axes.transData,
# 500, '500 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
# axes.add_artist(scalebar)
axes.annotate('N', xy=(x, y), xytext=(x, y - arrow_length),
arrowprops=dict(facecolor='black', width=6, headwidth=20),
ha='center', va='center', fontsize=15,
xycoords=axes.transAxes)
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Images/Production/' +
runLog['Short Description'][m] + '_HM0.png', bbox_inches='tight')
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m],
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m],
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
print(m)
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
print(i)
print(m)
breakPointsOut[m] = breakwaterPTS_merge
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakPoints_TOut[m] = breakwaterPTS_merge
breakTimes_TOut[m] = breakwaterPTS_times
#%% Format and save
for m in modelPlot:
saveTmp = breakPointsOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakPoints_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')

View File

@ -0,0 +1,269 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import matplotlib.pyplot as plt
import contextily as ctx
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "HD")
# %% Read Model Results
modelPlot = range(2, 6)
curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1)
curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m],
runLog['Run Name'][m] + '.m21fm - Result Files', 'FullDomain.dfsu').as_posix())
dfs_list[m] = dfsIN
ds_list[m] = dfs_list[m].read()
curSpeed_list[m] = ds_list[m]["Current speed"][-1]
curU_list[m] = ds_list[m]["U velocity"][-1]
curV_list[m] = ds_list[m]["V velocity"][-1]
curElm_list[m] = dfs_list[m].element_coordinates
# # Read Mesh for first model
# mikeMeshIN = dfsIN.to_shapely()
# # Convert to list of polygons
# mikeMeshIN_list = [p for p in mikeMeshIN]
# Add to list
# hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list)
# Cleanup unnecessary variables
# del mikeMeshIN
# del mikeMeshIN_list
del dfsIN
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(2, 6)
vmin = 0
vmax = 1
scale = 1 #5
for m in modelPlot:
if m < 4:
vmax = 5
qmax = 4
qfac = 1.5
scale = 250
else:
vmax = 1
qmax = 10
qfac = 10
scale = 250
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
cur_plot = curSpeed_list[m]*3.28084
cur_nan = np.isnan(cur_plot)
axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes, levels=11,
vmin=vmin, vmax=vmax, label='Current Speed (ft/s)',
elements=dfs_list[m].element_ids[~cur_nan])
# Plot arrows on overset grid
overGrid = dfs_list[m].get_overset_grid(dx=50)
interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1)
# Interpolate velocity at last time step by selecting time from dataset
overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant, shape=(overGrid.ny, overGrid.nx))
overGrid_U = overGrid_Cur["U velocity"][-1]
overGrid_V = overGrid_Cur["V velocity"][-1]
# Normalize velocity above limit
overGrid_U_Scale = overGrid_U*3.28084
overGrid_V_Scale = overGrid_V*3.28084
r = np.power(np.add(np.power(overGrid_U_Scale * qfac, 2), np.power(overGrid_V_Scale * qfac, 2)), 0.5)
curPlotMask = np.sqrt((overGrid_U_Scale * qfac) ** 2 + (overGrid_V_Scale * qfac) ** 2) > qmax
overGrid_U_Plot = np.zeros(overGrid_U_Scale.shape)
overGrid_V_Plot = np.zeros(overGrid_V_Scale.shape)
# Scale arrows:
# Arrows above qmax are scaled to qfac, multiplied by qmax, since the scaling factor (r) sets the magnitude to 1
# Arrows below qmax are scaled by qface
overGrid_U_Plot[curPlotMask] = ((overGrid_U_Scale[curPlotMask] * qfac)/r[curPlotMask]) * qmax
overGrid_V_Plot[curPlotMask] = ((overGrid_V_Scale[curPlotMask] * qfac)/r[curPlotMask]) * qmax
overGrid_U_Plot[~curPlotMask] = (overGrid_U_Scale[~curPlotMask])*qfac
overGrid_V_Plot[~curPlotMask] = (overGrid_V_Scale[~curPlotMask])*qfac
qv = axDHI.quiver(overGrid.x, overGrid.y, overGrid_U_Plot, overGrid_V_Plot, scale=scale, color='k') #200
# # Add in scale bar and North Arrow
# scalebar = AnchoredSizeBar(axes.transData,
# 500, '500 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
# axes.add_artist(scalebar)
# axes.annotate('N', xy=(x, y), xytext=(x, y - arrow_length),
# arrowprops=dict(facecolor='black', width=6, headwidth=20),
# ha='center', va='center', fontsize=15,
# xycoords=axes.transAxes)
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
# axes.set_xlim(left=428000, right=430500)
# axes.set_ylim(bottom=4758750, top=4761000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
# axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large'
fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' +
runLog['Short Description'][m] + '_RevB_Current.png', bbox_inches='tight', dpi=300)
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
#%% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')

View File

@ -0,0 +1,259 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import matplotlib.pyplot as plt
import contextily as ctx
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "HD")
# %% Read Model Results
modelPlot = range(2, 6)
curSpeed_list = [None] * (max(modelPlot) + 1)
curElm_list = [None] * (max(modelPlot) + 1)
curU_list = [None] * (max(modelPlot) + 1)
curV_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m],
runLog['Run Name'][m] + '.m21fm - Result Files', 'FullDomain.dfsu').as_posix())
dfs_list[m] = dfsIN
ds_list[m] = dfs_list[m].read()
curSpeed_list[m] = ds_list[m]["Current speed"][-1]
curU_list[m] = ds_list[m]["U velocity"][-1]
curV_list[m] = ds_list[m]["V velocity"][-1]
curElm_list[m] = dfs_list[m].element_coordinates
# # Read Mesh for first model
# mikeMeshIN = dfsIN.to_shapely()
# # Convert to list of polygons
# mikeMeshIN_list = [p for p in mikeMeshIN]
# Add to list
# hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list)
# Cleanup unnecessary variables
# del mikeMeshIN
# del mikeMeshIN_list
del dfsIN
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(2, 6)
vmin = 0
vmax = 1
scale = 1 #5
for m in modelPlot:
if m < 4:
vmax = 5
qmax = 4
scale = 50
amin = 1
else:
vmax = 1
qmax = 0.25
scale = 75
amin = 3
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
cur_plot = curSpeed_list[m]*3.28084
cur_nan = np.isnan(cur_plot)
axDHI = dfs_list[m].plot(cur_plot[~cur_nan], plot_type='contourf', show_mesh=False, cmap='viridis', ax=axes, levels=11,
vmin=vmin, vmax=vmax, label='Current Speed (ft/s)',
elements=dfs_list[m].element_ids[~cur_nan])
# Plot arrows on overset grid
overGrid = dfs_list[m].get_overset_grid(dx=50)
interpolant = dfs_list[m].get_2d_interpolant(overGrid.xy, n_nearest=1)
# Interpolate velocity at last time step by selecting time from dataset
overGrid_Cur = dfs_list[m].interp2d(ds_list[m][ds_list[m].time[-1]:], *interpolant, shape=(overGrid.ny, overGrid.nx))
overGrid_U = overGrid_Cur["U velocity"][-1]
overGrid_V = overGrid_Cur["V velocity"][-1]
# Normalize velocity above limit
overGrid_U_Scale = overGrid_U*3.28084
overGrid_V_Scale = overGrid_V*3.28084
r = np.power(np.add(np.power(overGrid_U_Scale, 2), np.power(overGrid_V_Scale, 2)), 0.5)
curPlotMask = np.sqrt((overGrid_U_Scale) ** 2 + (overGrid_V_Scale) ** 2) > qmax
overGrid_U_Plot = np.zeros(overGrid_U_Scale.shape)
overGrid_V_Plot = np.zeros(overGrid_V_Scale.shape)
# Normalize arrows to 1
overGrid_U_Plot = overGrid_U_Scale / r
overGrid_V_Plot = overGrid_V_Scale / r
# Scale arrows below qmax
overGrid_U_Plot[~curPlotMask] = overGrid_U_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
overGrid_V_Plot[~curPlotMask] = overGrid_V_Plot[~curPlotMask] * (r[~curPlotMask] / qmax)
qv = axDHI.quiver(overGrid.x, overGrid.y, overGrid_U_Plot, overGrid_V_Plot, scale=scale,
color='k', headlength=3.5, headwidth=3.5, headaxislength=3.5, width=0.0015, minlength=amin) #200
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
# axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large'
fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/04_MIKE21_HD/Images/Production/' +
runLog['Short Description'][m] + '_RevC_Current.png', bbox_inches='tight', dpi=300)
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
#%% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')

View File

@ -0,0 +1,216 @@
## Plotting Mike21 SW results for SouthShore
# Author: AJMR
# December 22, 2021
# %% Setup Project
from mikeio import Dfsu, Mesh, Dfs2, Dfs0
from mikeio.eum import ItemInfo, EUMUnit, EUMType
import pandas as pd
import pathlib as pl
import numpy as np
import geopandas as gp
import math
import matplotlib.pyplot as plt
import contextily as ctx
from mpl_toolkits.axes_grid1.anchored_artists import AnchoredSizeBar, AnchoredDirectionArrows
import matplotlib.font_manager as fm
# %% Read Model Log
pth = pl.Path("//srv-mad3.baird.com/", "Projects", "13632.101 South Shore Breakwater", "06_Models",
"13632.101.W.AJMR.Rev0_ModelLog.xlsx")
runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Model Results
modelPlot = range(20, 29)
hmo_list = [None] * (max(modelPlot) + 1)
dfs_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'ResultsMap.dfsu').as_posix())
dfs_list[m] = dfsIN
ds = dfsIN.read()
hmo_list[m] = ds["Sign. Wave Height"][1]
# # Read Mesh for first model
# mikeMeshIN = dfsIN.to_shapely()
# # Convert to list of polygons
# mikeMeshIN_list = [p for p in mikeMeshIN]
# Add to list
# hmo_list[m] = gp.GeoDataFrame(hm0, geometry=mikeMeshIN_list)
# Cleanup unnecessary variables
# del mikeMeshIN
# del mikeMeshIN_list
del dfsIN
# %% Plotting
# Shaded Water
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = range(28, 29)
for m in modelPlot:
fig, axes = plt.subplots(figsize=(8, 8))
# Convert to feet and ignore missing
hm0_plot = hmo_list[m]*3.28084
hm0_nan = np.isnan(hm0_plot)
dfs_list[m].plot(hm0_plot[~hm0_nan], plot_type='contourf', show_mesh=False, cmap='turbo', ax=axes, levels=21,
vmin=0, vmax=20, label='Spectral significant wave height (ft)',
elements=dfs_list[m].element_ids[~hm0_nan])
# # Add in scale bar and North Arrow
# scalebar = AnchoredSizeBar(axes.transData,
# 500, '500 m', 'lower right', pad=0.5, size_vertical=10, fontproperties=fontprops)
# axes.add_artist(scalebar)
# axes.annotate('N', xy=(x, y), xytext=(x, y - arrow_length),
# arrowprops=dict(facecolor='black', width=6, headwidth=20),
# ha='center', va='center', fontsize=15,
# xycoords=axes.transAxes)
axes.set_xlim(left=427800, right=431000)
axes.set_ylim(bottom=4758000, top=4762000)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32616')
# axes.title.set_text(runLog['Short Description'][m])
# axes.titlesize = 'x-large'
fig.suptitle(runLog['Short Description'][m], fontsize=18)
axes.set_xlabel('Easting [m]')
axes.set_ylabel('Northing [m]')
plt.show()
fig.savefig(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Images/Production/' +
runLog['Short Description'][m] + '_HM0.png', bbox_inches='tight', dpi=300)
# %% Read time series
MIKEds_list = [None] * (max(modelPlot) + 1)
MIKEdsT_list = [None] * (max(modelPlot) + 1)
for m in modelPlot:
dfsIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'BreakPts.dfs0').as_posix())
dfsIN_read = dfsIN.read()
MIKEds_list[m] = dfsIN_read.to_dataframe()
dfsTIN = Dfs0(pl.Path(runLog['Run Location'][m], str(runLog['Number'][m]) + '_' +
runLog['Run Name'][m] + '.sw - Result Files', 'TransectPTS.dfs0').as_posix())
dfsTIN_read = dfsTIN.read()
MIKEdsT_list[m] = dfsTIN_read.to_dataframe()
# Cleanup unnecessary variables
del dfsIN_read
del dfsIN
del dfsTIN_read
del dfsTIN
# %% Read in Toe and Crest Shapefiles
breakwaterPTS = gp.read_file("//srv-mad3.baird.com/Projects/"
"13632.101 South Shore Breakwater/08_CADD/Outgoing/"
"20211211_Toe Extents (to Alexander)/"
"20211211_Toe_Extents_NAD83_WISStatePlaneSZn_USFt_Lines_OffshoreClipSimple_10m_vertexUTM.shp")
breakCrest = pd.read_csv(
'//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/08_CADD/Outgoing/20211214_Crest Points (to Alexander)/20211214_Crest_Points_m.csv',
header=None, names=['x', 'y', 'z'])
breakCrest_gdf = gp.GeoDataFrame(breakCrest, crs='EPSG:32154',
geometry=gp.points_from_xy(breakCrest.x, breakCrest.y))
breakCrest_gdf.to_crs('EPSG:32616', inplace=True)
breakwaterPTS_Crest = breakwaterPTS.sjoin_nearest(breakCrest_gdf)
breakwaterPTS_Crest.rename(columns={'x': 'breakCrest_x', 'y': 'breakCrest_y', 'z': 'Crest'}, inplace=True)
# %% Merge with data
breakPointsOut = [None] * (max(modelPlot) + 1)
breakTimesOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEds_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterPTS_Crest
for i in range(0, MIKEds_list[m].shape[1]):
paramName = MIKEds_list[m].columns.values[i][MIKEds_list[m].columns.values[i].find('"', 5, -1) + 3:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpFID = int(MIKEds_list[m].columns.values[i][1:MIKEds_list[m].columns.values[i].find('"', 1)])
tmpIND = int(MIKEds_list[m].columns.values[i][5:MIKEds_list[m].columns.values[i].find('"', 5)])
breakwaterPTS_merge.loc[((breakwaterPTS_merge.FID == tmpFID) &
(breakwaterPTS_merge.vertex_ind == tmpIND)),
paramName] = MIKEds_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEds_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimesOut[m] = breakwaterPTS_times
# %% Read in Breakwater Transect Points Sh
breakwaterT = pd.read_csv("C:/Users/arey/files/Projects/SouthShore/Bathy/BreakTransectPTS_Names.csv", sep='\t')
breakwaterT_PTS = gp.GeoDataFrame(breakwaterT, geometry=gp.points_from_xy(breakwaterT.X, breakwaterT.Y),
crs='EPSG:32616')
# %% Merge with data
breakPoints_TOut = [None] * (max(modelPlot) + 1)
breakTimes_TOut = [None] * (max(modelPlot) + 1)
for m in modelPlot:
breakwaterPTS_times = None
for t in range(0, MIKEdsT_list[m].shape[0]):
breakwaterPTS_merge = None
breakwaterPTS_merge = breakwaterT_PTS
for i in range(0, MIKEdsT_list[m].shape[1]):
paramName = MIKEdsT_list[m].columns.values[i][MIKEdsT_list[m].columns.values[i].find(':', 5, -1) + 2:]
if paramName not in breakwaterPTS_merge.columns:
breakwaterPTS_merge[paramName] = np.full([breakwaterPTS_merge.shape[0], 1], np.nan)
tmpName = MIKEdsT_list[m].columns.values[i][0:MIKEdsT_list[m].columns.values[i].find(':', 1)]
breakwaterPTS_merge.loc[(breakwaterPTS_merge.Name == tmpName),
paramName] = MIKEdsT_list[m].iloc[t, i]
breakwaterPTS_merge['Time'] = MIKEdsT_list[m].index[t]
breakwaterPTS_times = pd.concat([breakwaterPTS_times, breakwaterPTS_merge], ignore_index=True)
breakTimes_TOut[m] = breakwaterPTS_times
#%% Format and save
for m in modelPlot:
saveTmp = breakTimesOut[m].copy()
saveTmp['X'] = saveTmp.geometry.x
saveTmp['Y'] = saveTmp.geometry.y
saveTmp.drop(['vertex_par', 'vertex_p_1', 'angle', 'geometry', 'index_right',
'breakCrest_x', 'breakCrest_y', 'Length'], axis=1, inplace=True)
saveTmp.sort_values(by=['FID', 'vertex_ind'], inplace=True, ignore_index=True)
# Reorder columns
colNames = saveTmp.columns.values
saveTmp = saveTmp[['X', 'Y', *colNames[0:-2]]]
saveTmpT = saveTmp.transpose(copy=True)
# Transect points
saveTmp2 = breakTimes_TOut[m].copy()
saveTmp2.drop(['geometry'], axis=1, inplace=True)
saveTmp2T = saveTmp2.transpose(copy=True)
saveTmpT.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Toe' + runLog['Short Description'][m] + '.csv')
saveTmp2T.to_csv('//srv-mad3.baird.com/Projects/13632.101 South Shore Breakwater/06_Models/02_Mike21SW/Results/' +
'Transect' + runLog['Short Description'][m] + '.csv')