diff --git a/EWR_Data/EWR_DataProc.py b/EWR_Data/EWR_DataProc.py
new file mode 100644
index 0000000..813e98c
--- /dev/null
+++ b/EWR_Data/EWR_DataProc.py
@@ -0,0 +1,511 @@
+#%% Plotting EWR Flume Tests
+
+# Alexander Rey, 2022
+
+#%% Import
+import pandas as pd
+import numpy as np
+import math
+import matplotlib.pyplot as plt
+import matplotlib.cm as cm
+import matplotlib.colors as mcolors
+import geopandas as gp
+gp.options.use_pygeos = True
+from shapely import geometry, ops
+
+# Map Plotting
+import cartopy.crs as ccrs
+import contextily as ctx
+
+# Interpolation
+import scipy as sp
+from scipy.interpolate import griddata
+from scipy.interpolate import LinearNDInterpolator, interp1d
+
+# Lowess interpolation
+import statsmodels.api as sm
+
+import pathlib as pl
+
+import datetime
+
+#%% Read in centerline shapefile
+river_centerline = gp.read_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15.shp')
+
+river_centerlineExploded = river_centerline.explode(ignore_index=True)
+river_centerlineExploded.reset_index(inplace=True)
+
+tempMulti = river_centerlineExploded.iloc[[5,0,1,2,3,4,6,7,9], 4]
+# Put the sub-line coordinates into a list of sublists
+outcoords = [list(i.coords) for i in tempMulti]
+# Flatten the list of sublists and use it to make a new line
+river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
+river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
+
+river_centerline_merge_gpd2 =\
+ gp.GeoDataFrame(geometry=gp.points_from_xy(
+ river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
+
+# Add distance along centerline
+river_centerline_merge_gpd2['DistanceFromPrevious'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
+river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFromPrevious'].cumsum()
+river_centerline_merge_gpd2.iloc[0, 1] = 0
+river_centerline_merge_gpd2.iloc[0, 2] = 0
+
+#%% Import Observations from database
+
+# Read in combined dataset
+obs_IN = pd.read_csv("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/output/combined dataset-SGJ.csv")
+# Add Datetime
+obs_IN['DateTime'] = pd.to_datetime(obs_IN.Sampledate_x)
+
+# Add months
+obs_IN['Month'] = obs_IN['DateTime'].dt.month
+
+# Convert to geodataframe
+obs_gdf = gp.GeoDataFrame(obs_IN, geometry=gp.points_from_xy(
+ obs_IN.loc[:, 'Longitude'], obs_IN.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
+
+# Adjust Units
+obs_gdf.loc[obs_gdf.Unit == 'NG/G', 'Sample_NG/G'] = obs_gdf.Samplevalue
+obs_gdf.loc[obs_gdf.Unit == 'ng/g', 'Sample_NG/G'] = obs_gdf.Samplevalue
+obs_gdf.loc[obs_gdf.Unit == 'UG/G', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
+obs_gdf.loc[obs_gdf.Unit == 'ug/g', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
+obs_gdf.loc[obs_gdf.Unit == 'MG/KG', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
+obs_gdf.loc[obs_gdf.Unit == 'mg/kg', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
+obs_gdf.loc[obs_gdf.Unit == 'NG/L', 'Sample_NG/L'] = obs_gdf.Samplevalue
+obs_gdf.loc[obs_gdf.Unit == 'ng/L', 'Sample_NG/L'] = obs_gdf.Samplevalue
+obs_gdf.loc[obs_gdf.Unit == 'UG/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000
+obs_gdf.loc[obs_gdf.Unit == 'ug/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000
+obs_gdf.loc[obs_gdf.Unit == 'MG/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000000
+obs_gdf.loc[obs_gdf.Unit == 'mg/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000000
+
+# Add centerline and reset index
+obs_gdf = gp.sjoin_nearest(river_centerline_merge_gpd2, obs_gdf, how='right', max_distance=250).reset_index()
+
+# Sediment Hg GeoDataFrame where media is Sediment
+obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
+ ((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
+ | (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
+ (obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
+
+obs_HgSed = obs_gdf.loc[obsMask, :]
+
+# Sediment Hg GeoDataFrame for surface sediment
+obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
+ ((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
+ | (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
+ (obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & (obs_gdf.RiverKM.notna()) &
+ ((obs_gdf.TopDepth_x < 5) | (obs_gdf.TopDepth_x.isna())))
+
+obs_HgSurfaceSed = obs_gdf.loc[obsMask, :]
+
+# obs_HgSurfaceSed.to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgSurfaceSed.geojson', driver="GeoJSON")
+obs_HgSurfaceSed.loc[:, ['Sample_NG/G', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgSurfaceSed2.shp')
+
+# Group sediment core data to take average/min/max
+obs_HgSurfaceSed_Avg = obs_HgSurfaceSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
+ 'RiverKM': ['mean'],
+ 'Longitude': ['mean'],
+ 'Latitude': ['mean'],
+ 'DateTime': ['mean']}).reset_index()
+# Flatten Multiindex
+obs_HgSurfaceSed_Avg.columns = ["_".join(a) for a in obs_HgSurfaceSed_Avg.columns.to_flat_index()]
+
+# Rename for shapefile
+obs_HgSurfaceSed_Avg.rename(columns={"Sample_NG/G_mean": "Mean_NG/G",
+ "Sample_NG/G_max": "Max_NG/G",
+ "Sample_NG/G_min": "Min_NG/G"}, inplace=True)
+
+# Convert to geodataframe and save
+obs_HgSurfaceSed_Avg_gdf = gp.GeoDataFrame(obs_HgSurfaceSed_Avg, geometry=gp.points_from_xy(
+ obs_HgSurfaceSed_Avg.loc[:, 'Longitude_mean'], obs_HgSurfaceSed_Avg.loc[:, 'Latitude_mean'],
+ crs="EPSG:4326")).to_crs(crs="EPSG:32615")
+
+obs_HgSurfaceSed_Avg_gdf.loc[:, ['Mean_NG/G', 'Max_NG/G', 'Min_NG/G', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgSurfaceSed_Avg.shp')
+
+
+# Group sediment core data to take average/min/max
+obs_HgSed_Avg = obs_HgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
+ 'RiverKM': ['mean'],
+ 'Longitude': ['mean'],
+ 'Latitude': ['mean'],
+ 'DateTime': ['mean']}).reset_index()
+# Merge Index
+obs_HgSed_Avg.columns = obs_HgSed_Avg.columns.map('|'.join).str.strip('|')
+
+# Fix Names
+obs_HgSed_Avg.rename(columns={"RiverKM|mean": "RiverKM", "Longitude|mean": "Longitude",
+ "Latitude|mean": "Latitude", "DateTime|mean": "DateTime"}, inplace=True)
+
+# Create new GeoDataFrame
+obs_HgSed_Avg = gp.GeoDataFrame(obs_HgSed_Avg, geometry=gp.points_from_xy(
+ obs_HgSed_Avg.loc[:, 'Longitude'], obs_HgSed_Avg.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
+
+
+
+# Water Hg GeoDataFrame where media is SurfaceWater (SW)
+obsMask = (((obs_gdf.Media_x == 'SW')) &
+ ((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)-Total')
+ | (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
+ (obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
+
+obs_HgWater = obs_gdf.loc[obsMask, :]
+
+# Save
+# obs_HgWater.loc[:, ['Sample_NG/L', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgWater.shp')
+
+# Sediment surface MeHg GeoDataFrame
+obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
+ ((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
+ (obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna() &
+ ((obs_gdf.TopDepth_x < 5) | (obs_gdf.TopDepth_x.isna())))
+
+obs_MeHgSed = obs_gdf.loc[obsMask, :]
+
+# Save
+# obs_MeHgSed.loc[:, ['Sample_NG/G', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_MeHgSed.shp')
+
+
+# Group sediment core data to take average/min/max
+obs_MeHgSed_Avg = obs_MeHgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
+ 'RiverKM': ['mean'],
+ 'Longitude': ['mean'],
+ 'Latitude': ['mean'],
+ 'DateTime': ['mean']}).reset_index()
+# Flatten Multiindex
+obs_MeHgSed_Avg.columns = ["_".join(a) for a in obs_MeHgSed_Avg.columns.to_flat_index()]
+
+# Rename for shapefile
+obs_MeHgSed_Avg.rename(columns={"Sample_NG/G_mean": "Mean_NG/G",
+ "Sample_NG/G_max": "Max_NG/G",
+ "Sample_NG/G_min": "Min_NG/G"}, inplace=True)
+
+# Create new GeoDataFrame
+obs_MeHgSed_Avg_gdf = gp.GeoDataFrame(obs_MeHgSed_Avg, geometry=gp.points_from_xy(
+ obs_MeHgSed_Avg.loc[:, 'Longitude_mean'], obs_MeHgSed_Avg.loc[:, 'Latitude_mean'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
+
+obs_MeHgSed_Avg_gdf.loc[:, ['Mean_NG/G', 'Max_NG/G', 'Min_NG/G', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_MeHgSed_Avg.shp')
+
+
+
+# MeHg in Water
+obsMask = (((obs_gdf.Media_x == 'SW')) &
+ ((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
+ (obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
+
+obs_MeHgWater = obs_gdf.loc[obsMask, :]
+
+# Save
+obs_MeHgWater.loc[:, ['Sample_NG/L', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_MeHgWater.shp')
+
+#%% Plot Observations
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(30, 15))
+
+axes.set_xlim(494649, 513647)
+axes.set_ylim(5514653, 5525256)
+
+# Add basemap
+mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
+ctx.add_basemap(axes, source=mapbox, crs='EPSG:26915')
+
+obs_HgSed_Avg.plot(column='Sample_NG/G|mean', axes=axes, vmin=0, vmax=10000)
+
+plt.show()
+
+#%% Plot River Profile
+name = "Set1"
+cmap = cm.get_cmap(name)
+colors = cmap.colors
+
+# Create a HSV colormap sifted by 180 degrees
+# create the "hsv" colormap
+hsv = plt.get_cmap('hsv')
+
+# shift the colormap so that red is in the middle
+shifted_hsv = mcolors.LinearSegmentedColormap.from_list('shifted_hsv',
+ np.roll(hsv(np.linspace(0.0, 1.0, 256)), 128, axis=0))
+
+interp_xvals = np.linspace(0, 100000, num=1000)
+
+# Bin averaging function
+def average_in_bins(df, bin_width):
+ binned_df = pd.cut(df.index, bins=range(0, int(df.index.max() + bin_width), bin_width), right=False)
+ return df.groupby(binned_df).mean()
+
+
+fig, axes = plt.subplots(nrows=4, ncols=1, figsize=(8, 12), sharex=True)
+axes[0].set_prop_cycle(color=colors)
+axes[1].set_prop_cycle(color=colors)
+axes[2].set_prop_cycle(color=colors)
+axes[3].set_prop_cycle(color=colors)
+
+# Plot surface Hg
+
+# Plot using a different color for each month
+obs_HgSurfaceSed.plot.scatter('RiverKM', 'Sample_NG/G', ax=axes[0], c='Month',
+ label='All Samples', vmin=1, vmax=12, cmap=shifted_hsv)
+
+# obs_HgSurfaceSed.plot.scatter('RiverKM', 'Sample_NG/G', ax=axes[0], color='grey', label='All Samples')
+# obs_HgSurfaceSed_Avg_gdf.plot.scatter('RiverKM_mean', 'Mean_NG/G', ax=axes[0], color='magenta', label='Site Averaged')
+
+# obs_HgSurfaceSed_Index = obs_HgSurfaceSed.copy()
+# obs_HgSurfaceSed_Index['RiverKM2'] = obs_HgSurfaceSed['RiverKM']
+# obs_HgSurfaceSed_Index = obs_HgSurfaceSed_Index.set_index('RiverKM')
+#
+# binwidths = [10, 100, 1000, 10000]
+# for binWidthIDX, binWidth in enumerate(binwidths):
+# average_in_bins(obs_HgSurfaceSed_Index, binWidth).dropna(subset=['Sample_NG/G']).\
+# plot.line('RiverKM2', 'Sample_NG/G', ax=axes[0],
+# label=str(binWidth) + ' m binned Samples')
+
+# Find and plot Lowess regression for each season
+SeasonMonths = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]]
+SeasonNames = ['Winter', 'Spring', 'Summer', 'Fall']
+SeasonColors = ['tab:blue', 'tab:green', 'darkred', 'tab:orange']
+
+for seasonIDX, season in enumerate(SeasonMonths):
+ # Skip Winter
+ if seasonIDX == 0:
+ continue
+
+ seasonMask = obs_HgSurfaceSed['DateTime'].dt.month.isin(season)
+ lowess = sm.nonparametric.lowess(obs_HgSurfaceSed.loc[seasonMask, 'Sample_NG/G'],
+ obs_HgSurfaceSed.loc[seasonMask, 'RiverKM'],
+ frac=0.5, xvals=interp_xvals, it=2)
+ axes[0].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
+ linewidth=4, color=SeasonColors[seasonIDX])
+
+# lowess = sm.nonparametric.lowess(obs_HgSurfaceSed['Sample_NG/G'], obs_HgSurfaceSed['RiverKM'],
+# frac=0.3, xvals=interp_xvals, it=2)
+# axes[0].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4, color='black')
+
+
+axes[0].set_ylabel('Surface Sediment Total Hg [ng/g]')
+axes[0].set_ylim([0, 20000])
+axes[0].set_title('Surface Sediment Total Hg')
+axes[0].legend()
+
+obs_MeHgSed.plot.scatter('RiverKM', 'Sample_NG/G', ax=axes[1], c='Month',
+ label='All Samples', vmin=1, vmax=12, cmap=shifted_hsv)
+
+# obs_MeHgSed_Avg.plot.scatter('RiverKM_mean', 'Mean_NG/G', ax=axes[1], color='magenta', label='Site Averaged')
+axes[1].set_ylabel('Surface Sediment Total MeHg [ng/g]')
+
+# obs_MeHgSed_Index = obs_MeHgSed.copy()
+# obs_MeHgSed_Index['RiverKM2'] = obs_MeHgSed_Index['RiverKM']
+# obs_MeHgSed_Index = obs_MeHgSed_Index.set_index('RiverKM')
+#
+# binwidths = [10, 100, 1000, 10000]
+# for binWidthIDX, binWidth in enumerate(binwidths):
+# average_in_bins(obs_MeHgSed_Index, binWidth).dropna(subset=['Sample_NG/G']).\
+# plot.line('RiverKM2', 'Sample_NG/G', ax=axes[1],
+# label=str(binWidth) + ' m binned Samples')
+
+# Find and plot Lowess regression
+# lowess = sm.nonparametric.lowess(obs_MeHgSed['Sample_NG/G'], obs_MeHgSed['RiverKM'],
+# frac=0.2, xvals=interp_xvals, it=2)
+# axes[1].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4)
+
+# Find and plot Lowess regression for each season
+for seasonIDX, season in enumerate(SeasonMonths):
+ # Skip Winter
+ if seasonIDX == 0:
+ continue
+
+ seasonMask = obs_MeHgSed['DateTime'].dt.month.isin(season)
+ lowess = sm.nonparametric.lowess(obs_MeHgSed.loc[seasonMask, 'Sample_NG/G'],
+ obs_MeHgSed.loc[seasonMask, 'RiverKM'],
+ frac=0.5, xvals=interp_xvals, it=2)
+ axes[1].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
+ linewidth=4, color=SeasonColors[seasonIDX])
+
+# axes[0].set_ylim([0, 20000])
+axes[1].set_title('Surface Sediment Total MeHg')
+axes[1].legend()
+
+
+obs_HgWater.plot.scatter('RiverKM', 'Sample_NG/L', ax=axes[2], c='Month',
+ label='All Samples', vmin=1, vmax=12,
+ cmap=shifted_hsv)
+# Find and plot Lowess regression
+# lowess = sm.nonparametric.lowess(obs_HgWater['Sample_NG/L'], obs_HgWater['RiverKM'],
+# frac=0.2, xvals=interp_xvals, it=2)
+# axes[2].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4)
+# Find and plot Lowess regression for each season
+for seasonIDX, season in enumerate(SeasonMonths):
+ # Skip Winter
+ if seasonIDX == 0:
+ continue
+
+ seasonMask = obs_HgWater['DateTime'].dt.month.isin(season)
+ lowess = sm.nonparametric.lowess(obs_HgWater.loc[seasonMask, 'Sample_NG/L'],
+ obs_HgWater.loc[seasonMask, 'RiverKM'],
+ frac=0.4, xvals=interp_xvals, it=2)
+ axes[2].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
+ linewidth=4, color=SeasonColors[seasonIDX])
+
+axes[2].set_ylabel('Water Total Hg [ng/L]')
+axes[2].set_ylim([0, 50])
+axes[2].set_title('Water Total Hg')
+axes[2].legend()
+
+obs_MeHgWater.plot.scatter('RiverKM', 'Sample_NG/L', ax=axes[3], c='Month',
+ label='All Samples', vmin=1, vmax=12, cmap=shifted_hsv)
+
+# Find and plot Lowess regression
+# lowess = sm.nonparametric.lowess(obs_MeHgWater['Sample_NG/L'], obs_MeHgWater['RiverKM'],
+# frac=0.2, xvals=interp_xvals, it=2)
+# axes[3].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4)
+for seasonIDX, season in enumerate(SeasonMonths):
+ # Skip Winter
+ if seasonIDX == 0:
+ continue
+
+ seasonMask = obs_MeHgWater['DateTime'].dt.month.isin(season)
+ lowess = sm.nonparametric.lowess(obs_MeHgWater.loc[seasonMask, 'Sample_NG/L'],
+ obs_MeHgWater.loc[seasonMask, 'RiverKM'],
+ frac=0.25, xvals=interp_xvals, it=2)
+ axes[3].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
+ linewidth=4, color=SeasonColors[seasonIDX])
+
+axes[3].set_ylabel('Water Total MeHg [ng/L]')
+axes[3].set_ylim([0, 5])
+axes[3].set_title('Water Total MeHg')
+axes[3].legend()
+
+
+axes[3].set_xlim([0, 100000])
+axes[3].set_xlabel('Distance Along River [m]')
+
+
+plt.show()
+fig.savefig("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/Figures/RiverDataProfiles_Lowess_Months_RevB.png",
+ bbox_inches='tight', dpi=200)
+
+
+#%% Plot Profiles
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(7, 7))
+obs_HgSed.plot.scatter('RiverKM', 'TopDepth_x', 12, 'Sample_NG/G', ax=axes, vmax=20000)
+
+axes.invert_yaxis()
+axes.set_xlabel('Distance Along River [m]')
+axes.set_ylabel('Sample Depth [m]')
+axes.set_xlim([0, 100000])
+
+plt.show()
+
+fig.savefig("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/Figures/HgDepthMonth.png",
+ bbox_inches='tight', dpi=200)
+
+
+#%% Interpolate lowess to grid for hg
+# Read in model points and roughness
+bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
+ names=['x', 'y', 'z'], header=0, delim_whitespace=True)
+rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
+ names=['x', 'y', 'z'], header=0, delim_whitespace=True)
+
+
+gridInterpNearest = griddata(np.array([obs_HgSurfaceSed.geometry.x, obs_HgSurfaceSed.geometry.y]).T,
+ obs_HgSurfaceSed['Sample_NG/G'], np.array([bathy_xyz.x, bathy_xyz.y]).T,
+ method='nearest')
+
+gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterpNearest))
+gridInterpOut = np.transpose(gridInterpOut)
+
+# Set Wetlands to zero
+# Interpolate Roughness
+RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z,
+ np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
+ method='nearest')
+
+gridInterpOut[RoughInterp == 0.05, 2] = 0
+
+np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Nearest.xyz',
+ gridInterpOut, delimiter=" ")
+
+# Linear interpolation
+gridInterpNearest = griddata(np.array([obs_HgSurfaceSed.geometry.x, obs_HgSurfaceSed.geometry.y]).T,
+ obs_HgSurfaceSed['Sample_NG/G'], np.array([bathy_xyz.x, bathy_xyz.y]).T,
+ method='linear')
+
+gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterpNearest))
+gridInterpOut = np.transpose(gridInterpOut)
+
+# Set Wetlands to zero
+gridInterpOut[RoughInterp == 0.05, 2] = 0
+
+np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Linear.xyz',
+ gridInterpOut, delimiter=" ")
+
+# Find and plot Lowess regression
+lowess = sm.nonparametric.lowess(obs_HgSurfaceSed['Sample_NG/G'], obs_HgSurfaceSed['RiverKM'],
+ frac=0.3, it=2)
+
+gridInterpLowess= griddata(np.array([obs_HgSurfaceSed.geometry.x, obs_HgSurfaceSed.geometry.y]).T,
+ lowess[:, 1], np.array([bathy_xyz.x, bathy_xyz.y]).T,
+ method='nearest')
+
+gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterpLowess))
+gridInterpOut = np.transpose(gridInterpOut)
+
+# Set Wetlands to zero
+gridInterpOut[RoughInterp == 0.05, 2] = 0
+
+np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Lowess.xyz',
+ gridInterpOut, delimiter=" ")
+
+#%% Interpolate Hg to grid
+
+# Read in model points and roughness
+bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
+ names=['x', 'y', 'z'], header=0, delim_whitespace=True)
+
+rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
+ names=['x', 'y', 'z'], header=0, delim_whitespace=True)
+
+# Loop through parameters
+for i in range(0, 5):
+ if i == 0:
+ dataOUT = obs_HgSed_Avg['Sample_NG/G|mean']
+ dataOutx = obs_HgSed_Avg.geometry.x
+ dataOuty = obs_HgSed_Avg.geometry.y
+ dateFileName = 'HgSed_meanDB'
+ elif i == 1:
+ dataOUT = obs_MeHgSed_Avg['Sample_NG/G|mean']
+ dataOutx = obs_MeHgSed_Avg.geometry.x
+ dataOuty = obs_MeHgSed_Avg.geometry.y
+ dateFileName = 'MeHgSed_meanDB'
+ elif i == 2:
+ dataOUT = obs_HgWater['Sample_NG/L']
+ dataOutx = obs_HgWater.geometry.x
+ dataOuty = obs_HgWater.geometry.y
+ dateFileName = 'HgWater_allDB'
+ elif i == 3:
+ dataOUT = obs_MeHgWater['Sample_NG/L']
+ dataOutx = obs_MeHgWater.geometry.x
+ dataOuty = obs_MeHgWater.geometry.y
+ dateFileName = 'MeHgWater_allDB'
+ elif i == 4:
+ # Synthetic from Reed's Sheet
+ dataOUT = ((10 - 1) * np.exp(-0.08 * np.array(river_centerline_merge_gpd2.loc[:, 'RiverKM']) / 1000) + 1) * 1000
+ dataOutx = river_centerline_merge_gpd2.geometry.x
+ dataOuty = river_centerline_merge_gpd2.geometry.y
+ dateFileName = 'ReedHgSed'
+
+ gridInterp = griddata(np.array([dataOutx, dataOuty]).T, dataOUT, np.array([bathy_xyz.x, bathy_xyz.y]).T,
+ method='nearest')
+
+ gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterp))
+ gridInterpOut = np.transpose(gridInterpOut)
+
+ # Set Wetlands to zero
+ # Interpolate Roughness
+ RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z, np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
+ method='nearest')
+
+ gridInterpOut[RoughInterp==0.05, 2] = 0
+
+ np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/' + dateFileName + '.xyz',
+ gridInterpOut, delimiter=" ")
\ No newline at end of file
diff --git a/EWR_Data/waves.py b/EWR_Data/waves.py
new file mode 100644
index 0000000..67537cc
--- /dev/null
+++ b/EWR_Data/waves.py
@@ -0,0 +1,1437 @@
+"""
+
+Series of functions to manage wave information.
+
+Author:
+-------
+Nearshore Modeling Group
+Gabriel Garcia Medina
+ggarcia@coas.oregonstate.edu
+
+Log of edits:
+-------------
+April 2014 - Created module
+ Gabriel Garcia Medina (ggarcia@coas.oregonstate.edu)
+
+External dependencies:
+ numpy
+ scipy
+
+Internal dependencies:
+ gsignal
+
+"""
+
+from __future__ import division, print_function
+
+# Import generic modules
+import numpy as np
+import scipy
+import scipy.optimize
+import scipy.signal
+
+# Internal modules
+import pynmd.data.signal as gsignal
+import pynmd.data.angles as _gangles
+
+
+# ===============================================================================
+# Compute radian frequency from wave number
+# ===============================================================================
+def idispersion(k, h):
+ '''
+ Inverse linear dispersion relation
+
+ USAGE:
+ ------
+ sigma = idispersion(k,h)
+
+ INPUT:
+ ------
+ k : Wave number [m**-1]
+ h : water depth [m]
+
+ OUTPUT:
+ -------
+ sigma : radian frequency [Hz]
+
+ '''
+
+ # Get radian wave frequency
+ sigma = (9.81 * k * np.tanh(k * h)) ** 0.5
+
+ # Return stokes velocities
+ return sigma
+
+
+# ===============================================================================
+# Compute wave number
+# ===============================================================================
+def dispersion(period, h, u=None):
+ '''
+
+ Computes the linear dispersion relation.
+
+ USAGE:
+ ------
+ k = dispersion(period,h,u)
+
+ Input:
+ ------
+ Period : Wave period [s]
+ h : Water depth [m]
+ u : (Optional) Ambient current velocity [m/s]
+
+ RETURNS:
+ --------
+ k : Wave number (2*pi/wave_length) [m**-1]
+
+ '''
+
+ if u is None:
+ u = 0.0
+
+ # Initialize wave number
+ # k = np.zeros(len(period))
+ sigma = (2.0 * np.pi) / period
+ # kinit = (((sigma**2)*h/9.81)*
+ # ((1/np.tanh(((sigma**2)/9.81*h)**0.75))**(2.0/3.0)))
+ kinit = 0.0001
+
+ def f(k, sigma, h, u):
+ y = (9.81 * k * np.tanh(k * h)) ** 0.5 - sigma + u * k
+ return y
+
+ if h <= 0:
+ k = np.NAN
+ else:
+ try:
+ k = scipy.optimize.newton(f, kinit, fprime=None,
+ args=(sigma, h, u), maxiter=100)
+ except RuntimeError:
+ k = np.NAN
+
+ return k
+
+
+# ===============================================================================
+# Compute wave number using the Kirby and Dalrymple (1986) composite equation
+# ===============================================================================
+def dispersion_kd86(period, h, wh, u=None):
+ '''
+
+ Return the wave number from the composite dispersion relation by
+ Kirby and Dalrymple (1986).
+
+ USAGE:
+ ------
+ k = dispersion_kd86(period,h,wh,u)
+
+ Input:
+ ------
+ Period : Wave period [s]
+ h : Water depth [m]
+ wh : Wave height [m]
+ u : (Optional) Ambient current velocity [m/s]
+
+ RETURNS:
+ --------
+ k : Wave number (2*pi/wave_length) [m**-1]
+
+ CELERITY EQUATION:
+ ------------------
+ (c-u)^2 = g / k * (1 + f1 * eps^2 * D) tanh( k * h + f2 * eps)
+ f1 = tanh(k*h)^5
+ f2 = (k*h/sinh(k*h))^4
+ eps = k*wh/2
+ D = (8 + cosh(4*k*h) - 2 * tanh(k*h)^2)/(8*sinh(k*h)^4)
+
+ REFERENCES:
+ -----------
+ Catalan, P., and M. C. Haller, 2008: Remote sensing of breaking wave phase
+ speeds with application to non-linear depth inversions. Coastal
+ Engineering, 55, 93 - 111.
+ Kirby, J. T., and R. A. Dalrymple, 1986: An approximate model for nonlinear
+ dispersion in monochromatic wave propagation models. Coastal
+ Engineering, 9, 545-561.
+ '''
+
+ # Depth averaged ambient velocity
+ if u is None:
+ u = 0
+
+ # Compute radian frequency
+ sigma = (2.0 * np.pi) / period
+
+ # Initialize wave number
+ # kinit = 0.0001 # Manually initialize
+ kinit = dispersion(period, h, u) # Initialize with Airy dispersion relation
+
+ # Kirby and Dalrymple (1986) dispersion function
+ def f(k, sigma, h, wh, u):
+ eps = k * wh / 2
+ f1 = np.tanh(k * h) ** 5
+ f2 = (k * h / np.sinh(k * h)) ** 4
+ D = ((8.0 + np.cosh(4.0 * k * h) - 2.0 * (np.tanh(k * h) ** 2)) /
+ (8.0 * (np.sinh(k * h) ** 4)))
+ y = ((9.81 * k * np.tanh(k * h + f2 * eps) * (1.0 + f1 * (eps ** 2) * D)) ** 0.5 +
+ u * k - sigma)
+ return y
+
+ if h <= 0:
+ k = np.NAN
+ else:
+ try:
+ k = scipy.optimize.newton(f, kinit, fprime=None,
+ args=(sigma, h, wh, u), maxiter=100)
+ except RuntimeError:
+ k = np.NAN
+
+ return k
+
+
+# ===============================================================================
+# Compute wave number using the Booij
+# ===============================================================================
+def dispersion_booij(period, h, wh, u=None):
+ '''
+
+ Return the wave number from the composite dispersion relation by
+ Booij (1981).
+
+ USAGE:
+ ------
+ k = dispersion_booij(period,h,wh,u)
+
+ Input:
+ ------
+ Period : Wave period [s]
+ h : Water depth [m]
+ wh : Wave height [m]
+ u : (Optional) Ambient current velocity [m/s]
+
+ RETURNS:
+ --------
+ k : Wave number (2*pi/wave_length) [m**-1]
+
+ CELERITY EQUATION:
+ ------------------
+ (c-u)^2 = g / k * tanh(k*(h+wh/2))
+
+ REFERENCES:
+ -----------
+ Booij, N. 1981: Gravity waves on water with non-uniform depth and current.
+ Tech. Rep. No. 81-1. Dept. Civil Engineering, Delft University of
+ Technology.
+ Catalan, P., and M. C. Haller, 2008: Remote sensing of breaking wave phase
+ speeds with application to non-linear depth inversions. Coastal
+ Engineering, 55, 93 - 111.
+ '''
+
+ # Depth averaged ambient velocity
+ if u is None:
+ u = 0.0
+
+ # Compute radian frequency
+ sigma = (2.0 * np.pi) / period
+
+ # Initialize wave number
+ # kinit = 0.0001 # Manually initialize
+ kinit = dispersion(period, h + wh / 2, u) # Initialize with Airy dispersion relation
+
+ # Booij (1981) dispersion function
+ def f(k, sigma, h, wh, u):
+ y = (9.81 * k * np.tanh(k * (h + wh / 2.0))) ** 0.5 + u * k - sigma
+ return y
+
+ if h <= 0:
+ k = np.NAN
+ else:
+ try:
+ k = scipy.optimize.newton(f, kinit, fprime=None,
+ args=(sigma, h, wh, u), maxiter=100)
+ except RuntimeError:
+ k = np.NAN
+
+ return k
+
+
+# ===============================================================================
+# Compute dispersion relation from Nwogu's equations
+# ===============================================================================
+def dispersion_nwogu(period, h, alpha):
+ '''
+ Compute dispersion relation for the Boussinesq equations based on
+ Nwogu 1993.
+
+ PARAMETERS:
+ -----------
+ period : Wave period [s]
+ h : Water depth [m]
+ alpha : Non-dimensional wave steepness parameter (see notes)
+
+ RETURNS:
+ --------
+ k : Wave number [m**-1]
+
+ NOTES:
+ ------
+ alpha = 0 for celerity at the surface
+ alpha = -1/3 solves the traditional depth averaged equations.
+ alpha = -0.39 gives similar results to linear wave theory (Nwogu 1993)
+
+ REFERENCES:
+ -----------
+ Nwogu, O., 1993: Alternative Form of Boussinesq Equations for Nearshore
+ Wave Propagation. Journal of Waterway, Port, Coastal, and Ocean
+ Engineering, 119, 618-638.
+
+ '''
+
+ # Compute randian frequency
+ sigma = (2.0 * np.pi) / period
+
+ # Initialize wave number
+ # kinit = dispersion(period,h) # Linear wave theory
+ kinit = sigma ** 2 / 9.81 # Deep water equivalent
+
+ # kinit = 0.05 # Hard code parameter
+
+ # Define the dispersion relation function
+ def bd(k, sigma, h, alpha):
+ # y = 9.81 * (k**2) * h * (1.0 - 1.0/3.0 * (k*h)**2) - sigma**2
+ y = (9.81 * (k ** 2) * h * (1.0 - (alpha + 1.0 / 3.0) * ((k * h) ** 2)) /
+ (1.0 - alpha * ((k * h) ** 2)) - sigma ** 2)
+ return y
+
+ # Use Newton-Rhapson method to find the wave number
+ if h <= 0:
+ k = np.NAN
+ else:
+ try:
+ k = scipy.optimize.newton(bd, kinit, fprime=None,
+ args=(sigma, h, alpha), maxiter=1000)
+ except RuntimeError:
+ k = np.NAN
+
+ return k
+
+
+# ===============================================================================
+# Linear wave approximations
+# ===============================================================================
+def shallow_water_depth(period):
+ '''
+
+ h_shallow = shallow_water_depth(period)
+
+ Find where the waves with the given period enter shallow water according to
+ linear wave theory
+
+ PARAMETERS:
+ -----------
+ period : wave period [s]
+
+ RETURNS:
+ --------
+ h_shallow : Water depth where shallow water approximation is valid [m]
+
+ '''
+
+ # Initialize shallow water depth
+ hinit = 2.0
+
+ # Define shallow water limit
+ def swd(h, period):
+ y = dispersion(period, h) * h - np.pi / 10.0
+ return y
+
+ # Find zeros of the swd function
+ try:
+ h = scipy.optimize.newton(swd, hinit, fprime=None,
+ args=(period,), maxiter=100)
+ except RuntimeError:
+ h = np.NAN
+
+ return h
+
+
+def deep_water_period(depth):
+ '''
+
+ t_deep = deep_water_depth(depth)
+
+ Find the period of waves entering intermediate water depth at a give depth
+ according to linear wave theory (i.e. kh = pi)
+
+ PARAMETERS:
+ -----------
+ depth : water depth [m]
+
+ RETURNS:
+ --------
+ t_deep : waves of this period and shorter do not feel the bottom [s]
+
+ '''
+
+ return 2.0 * np.pi / idispersion(np.pi / depth, depth)
+
+
+# ===============================================================================
+# Wave length
+# ===============================================================================
+def wave_length(period, h, verbose=True):
+ '''
+ Compute wave length using linear wave theory
+
+ Parameters
+ ----------
+ period : wave period [s]
+ h : water depth [m]
+
+ Results
+ -------
+ wl_int : real wave length [m]
+
+ Screen output
+ -------------
+ wl_deep : deep water wave length [m]
+ wl_sha : shallow water wave length [m]
+
+ '''
+
+ wl_deep = 9.81 * period ** 2 / 2.0 / np.pi
+ wl_sha = period * np.sqrt(9.81 * h)
+ k = dispersion(period, h)
+ wl_int = 9.81 / 2.0 / np.pi * period ** 2 * np.tanh(k * h)
+
+ if verbose:
+ print(' ')
+ print('---------------------------------------------------------')
+ print('Wave Length deep water approx = ' + np.str(wl_deep) + ' m')
+ print('Wave Length shallow water approx = ' + np.str(wl_sha) + ' m')
+ print('Wave Length linear wave theory = ' + np.str(wl_int) + ' m')
+ print('---------------------------------------------------------')
+ print(' ')
+
+ return wl_int
+
+
+# ===============================================================================
+# Compute wave number using the Kirby and Dalrymple (1986) composite equation
+# ===============================================================================
+def celerity(period, h, u=None):
+ '''
+
+ Find celerity and group velocity from linear wave theory
+
+ USAGE:
+ ------
+ c,n,cg = celerity(period,h,u)
+
+ Input:
+ ------
+ period : Wave period [s]
+ h : Water depth [m]
+ u : (Optional) Ambient current velocity [m/s]
+
+ RETURNS:
+ --------
+ c : celerity [m/s]
+ n : cg = c*n
+ cg : group velocity [m/s]
+
+ '''
+
+ # Radian frequency
+ sigma = 2.0 * np.pi / period
+
+ # Find wave number
+ k = dispersion(period, h, u)
+
+ # Celerity
+ c = sigma / k
+
+ # Group velocity
+ n = 0.5 + k * h / np.sinh(2 * k * h)
+ cg = c * n
+
+ return c, n, cg
+
+
+# ===============================================================================
+# Compute stokes velocities from linear wave theory
+# ===============================================================================
+def uvstokes(Hwave, Dwave, Lwave, WDepth, VertDisc):
+ '''
+ Compute stokes velocities
+
+ Parameters
+ ----------
+ Hwave : Wave height [m]
+ Dwave : Wave direction
+ Lwave : Wave length [m]
+ WDepth : Water depth [m]
+ VertDisc : Number of vertical layers
+
+ Returns
+ -------
+ ustokes, vstokes
+
+ '''
+
+ k = 2 * np.pi / Lwave # Compute wave number
+ deno = np.sinh(2.0 * k * WDepth) # sinh(2kh)
+ sigma = idispersion(k, WDepth) # Radian frequency
+
+ # Vertical discretization
+ z = np.linspace(0, WDepth, VertDisc)
+ nume = np.cosh(2.0 * k * (WDepth - z))
+
+ # Get zonal and meridional components
+ kx = k * np.cos((270 - Dwave) * np.pi / 180)
+ ky = k * np.sin((270 - Dwave) * np.pi / 180)
+
+ # Stokes drift
+ ustokes = (Hwave ** 2) / 4.0 * 1027.0 * 9.81 / sigma * kx * nume / deno
+ vstokes = (Hwave ** 2) / 4.0 * 1027.0 * 9.81 / sigma * ky * nume / deno
+
+ # Return stokes velocities
+ return ustokes, vstokes
+
+
+# ===============================================================================
+# TMA Spectrum
+# ===============================================================================
+def tma(freq_peak, gamma, h, Hmo, freq_min=0.01, freq_max=1.0, freq_int=0.001,
+ zeroth=True):
+ '''
+ Function to generate TMA spectrum
+
+ Parameters
+ ----------
+ peak_freq : Peak frequency [Hz]
+ gamma : Peak enhancement factor (3.3 is a good guess)
+ h : water depth [m]
+ Hmo : Significant wave height [m]
+
+ Optional Parameters
+ -------------------
+ freq_min : Minimum frequency to compute the spectrum [Hz]
+ freq_max : Maximum frequency to compute the spectrum [Hz]
+ freq_int : Frequency inteval [Hz]
+ zeroth : Prepend zeroth frequency (Default = True)
+
+ Default values are 0.01, 1.0, and 0.001 Hz, respectively.
+
+ Returns
+ -------
+ s_tma : Tma spectrum as a function of frequency.
+ freq : Frequency axis [Hz]
+
+ References
+ ----------
+ Bouws, E., H. Gunther, W. Rosenthal, and C. L. Vincent, 1985: Similarity
+ of the wind wave spectrum in finite depth water 1. Spectral form.
+ Journal of Geophysical Research, 90 (C1), 975-986.
+ Hughes, S. 1985: The TMA shallow-water spectrum, description and
+ applications. Technical Report CERC-84-7.
+
+ Notes
+ -----
+ - The units of the spectrum still do not make sense to me, must be verified.
+ - No scaling applied, alpha = 1
+ - Zeroth frequency is added to the spectrum
+
+ '''
+
+ # For testing only
+ # freq_peak = 0.1
+ # gamma = 3.3
+ # freq_min = 0.01
+ # freq_max = 1.0
+ # freq_int = 0.001
+ # h = 10.0
+ # Hmo = 1.0
+
+ # Compute frequency vector
+ freq = np.arange(freq_min, freq_max + freq_int, freq_int)
+
+ # Constants for peak enhancement factor
+ delta = np.ones_like(freq) * 0.07
+ delta[freq > freq_peak] = 0.09
+
+ # Compute alpha parameter (equation 26,27) TMA report
+ # wlen = 2.0*np.pi/dispersion(freq_peak**-1,h)
+ # alpha = (2.0 * np.pi* Hmo / wlen)**2
+ alpha = 1.0
+
+ # TMA scaling factor
+ omh = 2.0 * np.pi * freq * (h / 9.81) ** 0.5
+ phi = 1.0 - 0.5 * (2.0 - omh) ** 2
+ phi[omh < 1.0] = (0.5 * omh ** 2)[omh < 1.0]
+ phi[omh > 2.0] = 1.0
+
+ # Generate spectrum
+ s_tma = (alpha * 9.81 ** 2 * (2.0 * np.pi) ** -4 * (freq ** -5) * phi *
+ np.exp(-1.25 * (freq_peak / freq) ** 4) *
+ gamma ** np.exp(-1.0 * ((freq - freq_peak) ** 2)
+ / (2.0 * (delta ** 2) * freq_peak ** 2)))
+
+ # Add zeroth frequency
+ if zeroth:
+ s_tma = np.r_[np.array([0.0]), spec_tma]
+ freq = np.r_[np.array([0.0]), freq]
+
+ # End of function
+ return s_tma, freq
+
+
+# ===============================================================================
+# JONSWAP Spectrum
+# ===============================================================================
+def jonswap(freq_peak, Hmo, gamma=3.3, freq_min=0.01, freq_max=1.0,
+ freq_int=0.001, goda=False, zeroth=True):
+ '''
+ Function to generate JONSWAP spectrum
+
+ Parameters
+ ----------
+ peak_freq : Peak frequency [Hz]
+ Hmo : Significant wave height [m]
+
+ Optional Parameters
+ -------------------
+ gamma : Peak enhancement factor (defaults to 3.3)
+ freq_min : Minimum frequency to compute the spectrum [Hz]
+ freq_max : Maximum frequency to compute the spectrum [Hz]
+ freq_int : Frequency inteval [Hz]
+ goda : Use Y. Goda's approximation to the Jonswap spectrum.
+ If false we force the scaling of the spectrum to give the
+ same wave height passed as argument.
+ zeroth : Prepend zeroth frequency (Default = True)
+
+ Returns
+ -------
+ spec_jonswap : JONSWAP spectrum as a function of frequency.
+ freq : Frequency vector [Hz]
+
+ References
+ ----------
+ Many, but a good description can be found in:
+ Y. Goda, Random Seas and Design of Maritime Structures.
+
+ Notes
+ -----
+ - Default frequency values are 0.01, 1.0, and 0.001 Hz.
+
+ '''
+
+ # For code development only -----------------------------------------------
+ # freq_peak = 1.0/10.0
+ # gamma = 3.3
+ # freq_min = 0.01
+ # freq_max = 0.5
+ # freq_int = 0.001
+ # Hmo = 1.0
+ # -------------------------------------------------------------------------
+
+ # Create frequency vector
+ freq = np.arange(freq_min, freq_max + freq_int, freq_int)
+
+ # Constants for peak enhancement factor
+ sigma = np.ones_like(freq) * 0.07
+ sigma[freq > freq_peak] = 0.09
+
+ # Goda's formulation
+ if goda:
+ # Beta parameter
+ beta = (0.0624 / (0.230 + 0.0336 * gamma - 0.185 * ((1.9 + gamma) ** -1)) *
+ (1.094 - 0.01915 * np.log(gamma)))
+
+ # Generate spectrum
+ spec_jonswap = (beta * (Hmo ** 2) * (freq_peak ** 4) * (freq ** -5) *
+ np.exp(-1.25 * ((freq / freq_peak) ** -4)) *
+ gamma ** (np.exp(-1.0 * (freq / freq_peak - 1.0) ** 2 /
+ (2.0 * sigma ** 2))))
+
+ else:
+
+ # Generate spectrum
+ spec_jonswap = ((freq ** -5) *
+ np.exp(-1.25 * ((freq / freq_peak) ** -4)) *
+ gamma ** (np.exp(-1.0 * (freq / freq_peak - 1.0) ** 2 /
+ (2.0 * sigma ** 2))))
+
+ # Scale parameter to match wave height energy in deep water
+ # I am not sure this is the right way to proceed but I'll still do it.
+ alpha = 1.0 / 16.0 * Hmo ** 2 / np.trapz(spec_jonswap, freq)
+ spec_jonswap = spec_jonswap * alpha
+
+ # Add zeroth frequency and expand the spectrum
+ if zeroth:
+ spec_jonswap_old = np.r_[np.array([0.0]), spec_jonswap]
+ freq_old = np.r_[np.array([0.0]), freq]
+
+ freq = np.arange(0.0, freq_max + freq_int, freq_int)
+ spec_jonswap = np.interp(freq, freq_old, spec_jonswap_old)
+
+ # End of function
+ return spec_jonswap, freq
+
+
+# ===============================================================================
+# Add directional distribution to spectrum
+# ===============================================================================
+def directional_spreading(spec, peak_dir, m, dirs=None):
+ """
+ This function computes a directionally spread spectrum from a frequency
+ spectrum passed to the function.
+
+ PARAMETERS:
+ -----------
+ spec : frequency spectrum (as vector)
+ peak_dir : Peak wave direction in Nautical convention [degrees]
+ m : Directional width [cos(theta)**2m]
+ dirs : (Optional) vector of directions. If not given, the spectrum
+ will be computed every 5 degrees.
+
+ RETURNS:
+ --------
+ dir_spec : Directional spectrum in the same units given by the input
+ spectrum by degrees.
+ dirs : Vector with directions
+
+ NOTES:
+ ------
+ Nautical convention refers to the direction the waves (wind) are coming
+ (is blowing) from with respect to the true north measured clockwise.
+ To recover the significant wave height
+ 4.004 * np.trapz(np.sum(dir_spec,axis=-1)*(dirs[2]-dirs[1]),freq)**0.5
+ """
+
+ # If direction vector is not passed as argument the directional distribution
+ # will be computed every five degrees.
+ try:
+ if dirs == None:
+ dirs = np.arange(0, 360, 5)
+ except:
+ pass
+
+ # Change directions to radians for internal computations
+ peak_dir_r = np.pi / 180.0 * peak_dir
+ dirs_r = np.pi / 180.0 * dirs
+
+ # Compute directional spread
+ g = np.cos(0.5 * (dirs_r - peak_dir_r)) ** (2 * m)
+
+ # Normalize the directional spread so that no energy is added
+ # Scaling is done in direction space to account for the units
+ # In other workds dir_spec will be of [m2/Hz-deg]
+ dth = dirs[2] - dirs[1]
+ g /= (np.sum(g) * dth)
+
+ # Generate the directionally spread spectrum
+ dir_spec = np.zeros((spec.shape[0], dirs.shape[0]))
+ for aa in range(spec.shape[0]):
+ dir_spec[aa, :] = spec[aa] * g
+
+ # Return directional spectrum
+ return dir_spec, dirs
+
+
+# ===============================================================================
+# Compute bulk wave parameters from water surface elevation time series
+# ===============================================================================
+def eta_bulk_params(eta, ot, band_ave=False, window=False, IGBand=[0.005, 0.05]):
+ """
+ Compute bulk wave parameters from water surface elevation time series.
+
+ Parameters:
+ -----------
+ eta : Water surface elevation time series at a point [m]
+ ot : Time vector [s]
+ band_ave : (Optional) Bin average stencil
+ Window : (Optional) Application of a hanning window (True or False)
+ IGBand : (Optional) Infragravity wave frequency band
+ defaults to 0.005-0.05 Hz
+ Output:
+ -------
+ Dictionary containing
+ freq : Spectral frequencies [Hz]
+ spec : Wave variance spectrum [m**2/Hz]
+ cl : 95% confidence levels on the spectral estimates
+ Hs : Significant wave height [m]
+ H1 : Mean wave height [m]
+ Tp : Peak wave period [s]
+ Tp_fit : Peak wave period computed from second order polynomial fit
+ near Tp[s]
+ Tm01 : First moment wave period [s]
+ Tm02 : Second moment wave period [s]
+ Te : Energy period [s]
+ Sw : Spectral width (m0*m2/m1/m1 - 1)**2
+ H1_t : Mean wave height computed in the time domain [m]
+ T1_t : Mean wave period computed in the time domain [s]
+ Tm01IG : First moment wave period over the infragravity frequency band
+ TpIG : Peak wave period over the infragravity frequency band [s]
+ skew_t : Wave skewness from time domain
+
+ Notes:
+ ------
+ mn are the different spectral moments
+
+ """
+
+ # Remove mean from the data
+ etaw = eta - eta.mean()
+
+ # Compute variance of original time series
+ var_ts = np.var(etaw)
+
+ # Compute in time domain
+ [wh, wp, zcind] = whwpts(ot, eta)
+
+ # Wave skewness
+ skew_t = np.mean(etaw ** 3) / (np.mean(etaw ** 2) ** 1.5)
+
+ # Data windowing
+ if window:
+ tmpwindow = scipy.signal.hanning(etaw.shape[0])
+ etaw *= tmpwindow
+
+ # Compute variance spectrum
+ freq, spec = gsignal.psdraw(etaw, np.mean(ot[1:] - ot[:-1]))
+
+ # If data has been windowed we must boost the variance of the spectrum
+ # to match the original time series
+ if window:
+ # Compute variance of from the psd
+ var_psd = np.sum(spec) * (freq[2] - freq[1])
+
+ # Adjust variance from the windowed time series
+ spec *= var_ts / var_psd
+
+ # Band averaging if requested
+ if band_ave:
+ [freq, spec] = gsignal.band_averaging(spec, freq, band_ave)
+ else:
+ band_ave = 1
+
+ # Compute confidence levels on the spectral parameters, to estimate noise
+ # threshold.
+ conf_lev = 0.95
+ alpha = 1.0 - conf_lev
+ cl_upper = scipy.stats.chi2.ppf(alpha / 2, band_ave * 2)
+ cl_lower = scipy.stats.chi2.ppf(1.0 - alpha / 2.0, band_ave * 2)
+ cl = np.array([spec - spec * band_ave * 2.0 / cl_lower,
+ spec * band_ave * 2.0 / cl_upper - spec])
+
+ # Compute bulk wave parameters
+ bwp = fspec_bulk_params(freq, spec, IGBand)
+ bwp['freq'] = freq
+ bwp['spec'] = spec
+ bwp['cl'] = cl
+
+ bwp['H1_t'] = np.nanmean(wh)
+ bwp['T1_t'] = np.nanmean(wp)
+ bwp['skew_t'] = skew_t
+
+ return bwp
+
+
+# ===============================================================================
+# Function to compute bulk wave parameters from frequency spectrum
+# ===============================================================================
+def fspec_bulk_params(freq, spec, IGBand=[0.005, 0.05], zeroth=True):
+ """
+ Function to compute bulk wave parameters from frequency spectrum
+
+ Parameters:
+ -----------
+ freq : Vector of spectral frequencies [Hz]
+ spec : Frequency spectrum [m2/Hz]
+ IGBand : Infragravity wave frequency cutoff
+ defaults to 0.005-0.05 Hz
+ zeroth : If true will remove the first frequency from the analysis
+
+ Returns:
+ --------
+ Dictionary containing bulk wave parameters
+ Hs : Significant wave height [m]
+ H1 : Mean wave height [m]
+ Tp : Peak wave period [s]
+ Tp_fit : Peak wave period computed from second order polynomial fit
+ near Tp[s]
+ Tm01 : First moment wave period [s]
+ Tm02 : Second moment wave period [s]
+ Te : Energy period [s]
+ Sw : Spectral width (m0*m2/m1/m1 - 1)**2
+ Tm01IG : First moment wave period over the infragravity frequency band
+ TpIG : Peak wave period over the infragravity frequency band [s]
+ HsIG : Significant wave height in the infragravity frequency band [m]
+
+ Notes:
+ ------
+ - mn are the different spectral moments
+
+ """
+
+ # Remove zeroth frequencies ------------------------------------------------
+ if zeroth:
+ spec = spec[1:]
+ freq = freq[1:]
+
+ # Compute spectral moments
+ moment0 = np.trapz(spec, freq, axis=-1)
+ moment1 = np.trapz(spec * freq, freq, axis=-1)
+ moment2 = np.trapz(spec * (freq) ** 2, freq, axis=-1)
+ momentn1 = np.trapz(spec * (freq) ** -1, freq, axis=-1)
+
+ # Wave heights
+ Hs = 4.004 * (moment0) ** 0.5
+ H1 = Hs * 2.0 / 3.0
+
+ # Spectral width
+ Sw = (moment0 * moment2 / moment1 / moment1 - 1) ** 0.5
+
+ # Spectral periods -----------------------
+ # Energy period
+ Te = momentn1 / moment0
+
+ # Mean wave period
+ Tm01 = moment0 / moment1
+
+ # Second moment period
+ Tm02 = (moment0 / moment2) ** 0.5
+
+ # Peak wave period
+ freq_max_ind = np.argmax(spec)
+ Tp = freq[freq_max_ind] ** -1
+
+ # Peak wave period using a quadratic fit over the largest frequencies
+ if freq_max_ind == 0:
+ Tp_fit = np.nan
+ elif freq_max_ind == freq.shape[0] - 1:
+ Tp_fit = np.nan
+ else:
+ minfreq = freq_max_ind - 1
+ maxfreq = freq_max_ind + 2
+ tmp_fit = np.polyfit(freq[minfreq:maxfreq], spec[minfreq:maxfreq], 2)
+ Tp_fit = (-1.0 * tmp_fit[1] / (2.0 * tmp_fit[0])) ** -1
+
+ # Infragravity wave periods ------------------------------------------------
+ igInd = np.logical_and(freq >= IGBand[0], freq < IGBand[1])
+ moment0 = np.trapz(spec[igInd], freq[igInd], axis=-1)
+ moment1 = np.trapz(spec[igInd] * freq[igInd], freq[igInd], axis=-1)
+ Tm01IG = moment0 / moment1
+
+ HsIG = 4.004 * (moment0) ** 0.5
+
+ freq_max_ind = np.argmax(spec[igInd])
+ TpIG = freq[igInd][freq_max_ind] ** -1
+
+ # Exit function
+ return {'Hs': Hs, 'H1': H1, 'Tp': Tp, 'Tp_fit': Tp_fit, 'Tm01': Tm01, 'Tm02': Tm02,
+ 'Te': Te, 'Sw': Sw, 'Tm01IG': Tm01IG, 'TpIG': TpIG, 'HsIG': HsIG}
+
+
+# ===============================================================================
+# Bulk parameters from a directional spectrum
+# ===============================================================================
+def spec_bulk_params(freq, dirs, spec, IGBand=[0.005, 0.05], zeroth=True):
+ """
+ Function to compute bulk wave parameters from frequency-direction spectrum
+
+ Parameters:
+ -----------
+ freq : Vector of spectral frequencies [Hz]
+ dirs : Vector of directions (nautical convention) [Deg or Rad]
+ spec : Wave spectrum [m2/Hz/Deg or m2/Hz/Deg]
+ The shape must be [freq,dirs]
+ IGBand : Infragravity wave frequency cutoff
+ defaults to 0.005-0.05 Hz
+ zeroth : If true will remove the first frequency from the analysis
+
+ Returns:
+ --------
+ Dictionary containing bulk wave parameters
+ Hs : Significant wave height [m]
+ H1 : Mean wave height [m]
+ Tp : Peak wave period [s]
+ Tp_fit : Peak wave period computed from second order polynomial fit
+ near Tp[s]
+ Tm01 : First moment wave period [s]
+ Tm02 : Second moment wave period [s]
+ Te : Energy period [s]
+ Sw : Spectral width (m0*m2/m1/m1 - 1)**2
+ Tm01IG : First moment wave period over the infragravity frequency band
+ TpIG : Peak wave period over the infragravity frequency band [s]
+ HsIG : Significant wave height in the infragravity frequency band [m]
+ Dm : Mean wave direction, second moment (Kuik et al. 1988)
+ Dp : Peak wave direction (computed from directional spectrum)
+ Dp_fit : Peak wave direction (computed using second order
+ polynomial fit around the peak of the directional spectrum)
+
+ Notes:
+ ------
+ - mn are the different spectral moments
+ - First frequency will be discarded from the analysis. It is assumed to be
+ the zeroth-frequency.
+
+ REFERENCES:
+ -----------
+ Kuik, A.J., G.P. Van Vledder, and L.H. Holthuijsen, 1988: A method for the
+ routine analysis of pitch-and-roll buoy wave data. Journal of Physical
+ Oceanography, 18(7), pp.1020-1034.
+ """
+
+ # Get directional properties
+ dirSpec = np.trapz(spec, freq, axis=0)
+ dth = np.abs(dirs[2] - dirs[1])
+
+ # Find peak wave directon
+ Dp = dirs[np.argmax(dirSpec)]
+ dirDict = dict(Dp=Dp)
+
+ # Peak wave direction using a quadratic fit over the largest directions
+ dir_max_ind = np.argmax(dirSpec)
+ if dir_max_ind == 0:
+ tmp_fit = np.polyfit([dirs[0] - dth, dirs[0], dirs[1]],
+ [dirSpec[-1], dirSpec[0], dirSpec[1]], 2)
+ elif dir_max_ind == dirs.shape[0] - 1:
+ tmp_fit = np.polyfit([dirs[-2], dirs[-1], dirs[-1] + dth],
+ [dirSpec[-2], dirSpec[-1], dirSpec[0]], 2)
+ else:
+ minDirInd = dir_max_ind - 1
+ maxDirInd = dir_max_ind + 2
+ tmp_fit = np.polyfit(dirs[minDirInd:maxDirInd], dirSpec[minDirInd:maxDirInd], 2)
+ Dp_fit = -1.0 * tmp_fit[1] / (2.0 * tmp_fit[0])
+ dirDict.update(Dp_fit=_gangles.wrapto360(Dp_fit))
+
+ # Find mean wave direction (Kuik et al 1988)
+ a1 = np.trapz(dirSpec * np.cos((270.0 - dirs) * np.pi / 180), dirs)
+ b1 = np.trapz(dirSpec * np.sin((270.0 - dirs) * np.pi / 180), dirs)
+ # Mean wave direction in nautical coordinates
+ Dm = _gangles.wrapto360(270.0 - np.arctan2(b1, a1) * 180.0 / np.pi)
+ dirDict.update(Dm=Dm)
+
+ # Get the parameter from the frequency spectrum
+ # freqSpec = np.trapz(spec,dirs,axis=-1)
+ freqSpec = np.sum(spec, axis=-1) * dth
+ bp = fspec_bulk_params(freq, freqSpec, IGBand, zeroth=zeroth)
+ bp.update(dirDict)
+
+ return bp
+
+
+# ===============================================================================
+# Wave Height and Period From time series
+# ===============================================================================
+def whwpts(t, x, d='up'):
+ '''
+ Function to compute the wave height and wave period from a given time
+ series.
+
+ USAGE:
+ ------
+ [wh,wp,zcind] = whwpts(t,x,d)
+
+ PARAMETERS:
+ -----------
+ t : Time vector
+ x : Water surface elevation time series
+ d : Accepts 'up' or 'down' for zero upcrossing or downcrossing,
+ respectively.
+
+ RETURNS:
+ --------
+ wh : Wave height time series
+ wp : Wave period time series
+ zcind : zero-crossing index
+
+ METHODS:
+ --------
+ 1. Find zero crossings
+ 2. Find time difference between zero crossing to get wp
+ 3. Find the amplitude of the time series between each zero crossing to find
+ wh.
+
+ NOTES:
+ ------
+ - x and t must have the same length
+
+ '''
+
+ # Find the zero crossings
+ zcross = gsignal.zero_crossing(x, d=d)
+
+ # Find the wave period
+ wp = t[zcross[1:]] - t[zcross[:-1]]
+
+ # Find wave height
+ wh = np.zeros_like(wp)
+ for aa in range(wh.shape[0]):
+ wh[aa] = (np.max(x[zcross[aa]:zcross[aa + 1]]) -
+ np.min(x[zcross[aa]:zcross[aa + 1]]))
+
+ return wh, wp, zcross
+
+
+# ===============================================================================
+# Iribarrren number
+# ===============================================================================
+def iribarren(m, H, wl, verbose=False):
+ """
+ Function to compute the Iribarren number (aka surf similarity parameter) for
+ deep water conditions
+
+ USAGE:
+ ------
+ ssp = iribarren(m,H,wl,verbose)
+
+ PARAMETERS:
+ -----------
+ m : Beach slope
+ H : Wave height [m]
+ wl : Wave length [m]
+ verbose : Display result in the terminal (Optional defaults to False)
+
+ RETURNS:
+ --------
+ ssp : surf similarity parameter
+
+ FORMULA:
+ --------
+ ssp = tan(m)/(H/wl)**0.5
+
+ NOTES:
+ ------
+ - ssp > 3.3: Reflective beach (surging or collapsing breakers)
+ - 0.5 < ssp < 3.3 : Intermediate beach (plunging breaker)
+ - ssp < 0.5 : Dissipative beach (spilling breaker)
+
+ """
+
+ # Compute iribarren number
+ ssp = np.tan(m) / (H / wl) ** 0.5
+
+ # Display message
+ if verbose:
+ if ssp >= 3.3:
+ print('ssp = ' + np.str(ssp) + ': Reflective conditions')
+ elif ssp < 0.5:
+ print('ssp = ' + np.str(ssp) + ': Dissipative conditions')
+ else:
+ print('ssp = ' + np.str(ssp) + ': Intermediate conditions')
+
+ return ssp
+
+
+# ===============================================================================
+# Battjes Parameter
+# ===============================================================================
+def battjes04(m, h, igFreq, verbose=False):
+ '''
+ Function to compute the normalized bed slope proposed by Battjes et al 2004
+
+ USAGE:
+ ------
+ nbd = battjes04(m,h,igFreq,verbose)
+
+ PARAMETERS:
+ -----------
+ m : Beach slope
+ h : Characteristic water depth in shoaling region [m]
+ igFreq : infragravity wave frequency [1/s]
+ verbose : Display result in the terminal (Optional defaults to False)
+
+ RETURNS:
+ --------
+ nbd : Normalized bed slope
+
+ FORMULA:
+ --------
+ nbd = m/(2 * pi * igFreq) * (g/h)**0.5
+
+ NOTES:
+ ------
+ The next values are given if the breaking depth is substituted for h
+ - nbd < 0.3: Mild slope regime, were breaking generation is not as important
+ - nbd > 0.3: Steep slope regime
+
+ References:
+ -----------
+ Battjes , J. A., H. J. Bakkenes, T. T. Janssen, and A. R. van Dongeren,
+ 2004: Shoaling of subharmonic gravity waves. Journal of Geophysical
+ Research, 109, C02009, doi:10.1029/2003JC001863.
+
+ '''
+
+ # Compute iribarren number
+ nbd = m / (2 * np.pi * igFreq) * (9.81 / h) ** 0.5
+
+ # Display message
+ if verbose:
+ if nbd < 0.3:
+ print('nbd = ' + np.str(nbd) + ': Mild slope')
+ else:
+ print('nbd = ' + np.str(nbd) + ': Steep slope')
+
+ return nbd
+
+
+# ===============================================================================
+# Baldock Parameter
+# ===============================================================================
+def baldock12(m, h, igFreq, H, wl, verbose=False):
+ '''
+ Function to compute the surfbeat similarity parameter by Baldock 2012
+
+ USAGE:
+ ------
+ sbs = baldock12(m,h,igFreq,H,wl,verbose)
+
+ PARAMETERS:
+ -----------
+ m : Beach slope
+ h : Characteristic water depth in shoaling region [m]
+ igFreq : infragravity wave frequency [1/s]
+ H : Shortwave offshore wave height [m]
+ wl : Offshore wave length [m]
+ verbose : Display result in the terminal (Optional defaults to False)
+
+ RETURNS:
+ --------
+ sbs : Surfbeat similarity parameter
+
+ FORMULA:
+ --------
+ sbs = m/(2 * pi * igFreq) * (g/h)**0.5 * (H/wl)**0.5
+
+ NOTES:
+ ------
+ Really no clear guidance is given, however the smallest the number the least
+ effective breakpoint generation is.
+
+ References:
+ -----------
+ Baldock, T. E., 2012: Dissipation of incident forced long waves in the
+ surf zone - Implications for the concept of "bound" wave realease at
+ short wave breaking. Coastal Engineering, 276-285.
+
+
+ '''
+
+ # Compute iribarren number
+ sbs = m / (2 * np.pi * igFreq) * (9.81 / h) ** 0.5 * (H / wl) ** 0.5
+
+ # Display message
+ if verbose:
+ print('sbs = ' + np.str(sbs))
+
+ return sbs
+
+
+# ===============================================================================
+# Reverse shoal waves
+# ===============================================================================
+def deep_water_equivalent(H, h, period):
+ """
+ Code to find the equivalent deep water linear wave parameters based on
+ intermediate to shallow water conditions
+
+ PARAMETERS:
+ -----------
+ H : Wave height [m]
+ h : Water depth [m]
+ period : Wave period [s]
+
+ RETURNS:
+ --------
+ Deep water equivalents
+ H0 : Wave height [m]
+ k0 : Wave number [m-1]
+
+ NOTES:
+ ------
+ 1. Will not work if waves have broken already
+
+ """
+
+ # Find the wave celerity
+ c1, n1, cg1 = celerity(period, h) # Intermediate water
+ cg0 = (9.81 * period) / (4 * np.pi) # Deep water
+
+ # Deep water wave length
+ H0 = H * ((cg1 / cg0) ** 0.5)
+
+ # Deep water wave number
+ k0 = ((2.0 * np.pi / period) ** 2) / 9.81
+
+ return H0, k0
+
+
+def shoal(H1, h1, period, h0):
+ """
+ Code to shoal waves based on linear wave theory
+
+ PARAMETERS:
+ -----------
+ H1 : Wave height [m]
+ h1 : Water depth [m]
+ period : Wave period [s]
+ h0 : Water depth to find equivalent conditions [m]
+
+ RETURNS:
+ --------
+ Equivalent parameters at h0
+ H0 : Wave height [m]
+ k0 : Wave number [m-1]
+
+ NOTES:
+ ------
+ 1. Will not work if waves have broken already
+
+ """
+
+ # Find the wave celerity
+ c1, n1, cg1 = celerity(period, h1) # Current conditions
+ c0, n0, cg0 = celerity(period, h0) # Desired conditions
+
+ # Deep water wave length
+ H0 = H1 * ((cg1 / cg0) ** 0.5)
+
+ # Deep water wave number
+ k0 = dispersion(period, h0)
+
+ return H0, k0
+
+
+# ===============================================================================
+# Compute IEC Bulk Parameters from Spectrum
+# ===============================================================================
+def iec_params(freq, dirs, spec, dpt, rho=1025.0):
+ """
+ Function to compute IEC recommended wave energy parameters from
+ frequency-direction spectrum
+
+ Parameters:
+ -----------
+ freq : Vector of spectral frequencies [Hz]
+ dirs : Vector of directions (nautical convention) [Deg]
+ spec : Wave spectrum [m2/Hz/Deg]
+ The shape must be [time,npts,freq,dirs]
+ dpt : Water depth of shape [npts]
+ rho : (Optional) Density of water
+
+ Returns:
+ --------
+ Dictionary containing bulk wave parameters
+ Hs : Significant wave height [m]
+ OWP : Omnidirectional wave power [W/m]
+ Jth : Maximum directionally resolved wave power [W/m]
+ Th : Direction of maximum directionally resolved wave power [deg]
+ d : Directionality coefficient (Jth/OWP)
+ Te : Energy period [s]
+ Sw : Spectral width (m0*m-2/m-1/m-1 - 1)**2
+
+ Notes:
+ ------
+ - mn are the different spectral moments
+ - Directional grid is assumed to be regular
+ need to implement a trapz for when this is not true.
+ - Pass everything as arrays:
+ >>> dpt = np.array([100.0]) # If you only have one point
+
+ REFERENCES:
+ -----------
+ Marine energy - Wave, tidal and other water current converters -
+ Part 101: Wave energy resoure assessment and characterization.
+ IEC TS 62600-101
+ """
+
+ # Figure out the size of the array and adjust if necessary
+ _, npts, nfreq, ndirs = np.shape(spec)
+
+ # Directions must be sorted (if they exist)
+ if ndirs > 1:
+ sortInd = np.argsort(dirs)
+ spec = spec[..., sortInd]
+ dirs = dirs[sortInd]
+
+ # Compute the group velocity for each point
+ cg = np.zeros((npts, nfreq)) * np.NAN
+ for aa in range(cg.shape[0]):
+ for bb in range(cg.shape[1]):
+ _, _, cg[aa, bb] = celerity(1 / freq[bb], dpt[aa])
+
+ # Non directional parameters -----------------------------------------------
+ if ndirs > 1:
+ # freqSpec = np.trapz(spec,dirs*np.pi/180,axis=-1)
+ dth = dirs[1] - dirs[0]
+ freqSpec = np.sum(spec, axis=-1) * dth
+ else:
+ freqSpec = spec[..., 0]
+
+ # Compute omnidirectional wave power
+ moment0 = np.trapz(freqSpec, freq, axis=-1)
+ momentn1 = np.trapz(freqSpec * (freq) ** -1, freq, axis=-1)
+ momentn2 = np.trapz(freqSpec * (freq) ** -2, freq, axis=-1)
+
+ # Significant wave height
+ bp = {}
+ bp['Hs'] = 4.004 * (moment0 ** 0.5)
+
+ # Energy Period
+ bp['Te'] = momentn1 / moment0
+
+ # Spectral width
+ bp['Sw'] = (moment0 * momentn2 / momentn1 / momentn1 - 1.0) ** 0.5
+
+ # Omnidirectional wave power (loop extravaganza here)
+ owp = np.zeros_like(bp['Hs']) * np.NAN
+ # Time loop
+ for aa in range(spec.shape[0]):
+ # Point loop
+ for bb in range(spec.shape[1]):
+ # Omnidirectional wave power
+ owp[aa, bb] = np.trapz(freqSpec[aa, bb, :] * cg[bb, :], freq)
+
+ # Get directional properties -----------------------------------------------
+
+ if ndirs > 1:
+ # Omidirectional wave power through a plane
+ cg = np.repeat(np.expand_dims(cg, axis=-1), ndirs, axis=-1)
+ jth = np.zeros_like(owp)
+ th = np.zeros_like(owp) * np.NAN
+ # Time loop
+ for aa in range(spec.shape[0]):
+ # Point loop
+ for bb in range(spec.shape[1]):
+ # Directional wave power
+ tmpJth = 0.0
+ dirSpec = np.trapz(spec[aa, bb, ...] * cg[bb, ...], freq, axis=-2)
+ # Direction loop
+ for cc in range(ndirs):
+ fac = np.cos(np.pi / 180.0 * (dirs[cc] - dirs))
+ fac[fac < 0] = 0.0
+ tmpJth = np.sum(dirSpec * fac, axis=-1) * dth
+ if tmpJth > jth[aa, bb]:
+ jth[aa, bb] = tmpJth
+ th[aa, bb] = dirs[cc]
+
+ else:
+ th = np.zeros_like(owp) * np.NAN
+ jth = np.zeros_like(owp) * np.NAN
+
+ # Take care of units here
+ owp *= 9.81 * rho
+ jth *= 9.81 * rho
+
+ # Allocate in arrays
+ bp['Th'] = th
+ bp['OWP'] = owp
+ bp['Jth'] = jth
+ bp['d'] = jth / owp # Directionality coefficient
+
+ return bp
diff --git a/MEDS/.idea/.gitignore b/MEDS/.idea/.gitignore
new file mode 100644
index 0000000..13566b8
--- /dev/null
+++ b/MEDS/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/MEDS/.idea/MEDS.iml b/MEDS/.idea/MEDS.iml
new file mode 100644
index 0000000..d0876a7
--- /dev/null
+++ b/MEDS/.idea/MEDS.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/MEDS/.idea/inspectionProfiles/Project_Default.xml b/MEDS/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..7c85937
--- /dev/null
+++ b/MEDS/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/MEDS/.idea/inspectionProfiles/profiles_settings.xml b/MEDS/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/MEDS/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/MEDS/.idea/misc.xml b/MEDS/.idea/misc.xml
new file mode 100644
index 0000000..df8259a
--- /dev/null
+++ b/MEDS/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/MEDS/.idea/modules.xml b/MEDS/.idea/modules.xml
new file mode 100644
index 0000000..eca0000
--- /dev/null
+++ b/MEDS/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/MEDS/.idea/vcs.xml b/MEDS/.idea/vcs.xml
new file mode 100644
index 0000000..6c0b863
--- /dev/null
+++ b/MEDS/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Mustique/WestCoastDataTemplate_V5.py b/Mustique/WestCoastDataTemplate_V5.py
index 6959fcc..271cfb8 100644
--- a/Mustique/WestCoastDataTemplate_V5.py
+++ b/Mustique/WestCoastDataTemplate_V5.py
@@ -76,7 +76,12 @@ importPaths = ['//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Resear
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Old Queens Fort',
- '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221127 WQ Sampling/Crane']
+ '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221127 WQ Sampling/Crane',
+ '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Great House',
+ None,
+ '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Old Queens Fort',
+ '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Crane',
+ '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Caribee']
siteNames = ['Great House',
'Greensleeves',
@@ -111,7 +116,12 @@ siteNames = ['Great House',
'Great House',
None,
'Old Queens Fort',
- 'Crane']
+ 'Crane',
+ 'Great House',
+ None,
+ 'Old Queens Fort',
+ 'Crane',
+ 'Caribee']
timeLabels= ['Before Construction',
'Before Construction',
@@ -146,7 +156,12 @@ timeLabels= ['Before Construction',
'November',
None,
'November',
- 'November']
+ 'November',
+ 'February',
+ None,
+ 'February',
+ 'February',
+ 'February']
wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
@@ -182,13 +197,18 @@ wave_bts_file = [
None,
None,
None,
+ None,
+ None,
+ None,
+ None,
+ None,
None]
# %% Read in site shapefile
-siteShp = gp.read_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/SitePolygons_Crane.shp')
+siteShp = gp.read_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/SitePolygons_Crane_Caribee.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
-for s in [33]: #range(15, 27):
+for s in [34, 36, 37, 38]: #range(15, 27):
## Define master import path
importPath = importPaths[s]
siteName = siteNames[s]
@@ -217,7 +237,7 @@ for s in [33]: #range(15, 27):
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
- if s < 15 or s == 30 or s == 32 or s == 33:
+ if s < 15 or s >= 30:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
else:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
@@ -233,7 +253,7 @@ for s in [33]: #range(15, 27):
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
- if s == 30 or s == 32 or s == 33:
+ if s >= 30:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date1'].astype(str) + ' ' + GPS_gdf['Time1'].astype(str))
else:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
@@ -295,7 +315,12 @@ for s in [33]: #range(15, 27):
GFS_Lon = -59.442
GFS_Lat = 13.1075
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[3, 1])
-
+ elif siteName == 'Caribee':
+ axXlim = (217929, 218345)
+ axYlim = (1446528, 1446818)
+ GFS_Lon = -59.442
+ GFS_Lat = 13.1075
+ Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[4, 1])
# Set min and max times using conductivity
if Obs_geo['inArea'].any():
@@ -457,8 +482,8 @@ for s in [33]: #range(15, 27):
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
- paramMin = [0.0, 32.0, 100, 26.0, 0, 0]
- paramMax = [6, 36.0, 130, 34.0, 150.0, 12000]
+ paramMin = [0.0, 32.0, 100, 24.0, 0, 0]
+ paramMax = [6, 36.5, 130, 34.0, 150.0, 12000]
fig.patch.set_facecolor('white')
@@ -496,8 +521,9 @@ for s in [33]: #range(15, 27):
# Find the local maximums for Turbidity
if param == 'CH6:Turbidity(NTU)':
+ ##### Adjust "order" to control the number of turbidity points that are selected
ilocs_max.append(argrelextrema(OBS_smoothed.values,
- np.greater_equal, order=8, mode='wrap')[0])
+ np.greater_equal, order=12, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
@@ -676,7 +702,11 @@ plotIDXsLoop = []
for i in range(0, 1):
summTable = None
# plotIDXs = plotIDXsLoop[i]
- plotIDXs = np.arange(i, 25, 3)
+ # plotIDXs = np.arange(i, 25, 3)
+
+ #plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24, 27, 30, 34] # Great House
+ # plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32, 36] # Old Queen's Fort
+ plotIDXs = [1, 4, 7, 10, 13, 16, 19] # Greensleevs
for s, plotIDX in enumerate(plotIDXs):
## Define master import path
@@ -712,7 +742,9 @@ for i in range(2, 3):
summTable = None
# plotIDXs = np.arange(i, 27, 3)
#plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32]
- plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24, 27, 30]
+ # plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32, 36] # Old Queen's Fort
+ plotIDXs = [1, 4, 7, 10, 13, 16, 19] # Greensleevs
+
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])
diff --git a/NTC_DFM/.idea/.name b/NTC_DFM/.idea/.name
index 1bba3a5..8a230fb 100644
--- a/NTC_DFM/.idea/.name
+++ b/NTC_DFM/.idea/.name
@@ -1 +1 @@
-EFDC_compare.ipynb
\ No newline at end of file
+NTC_PlottingD3D_2023.py
\ No newline at end of file
diff --git a/NTC_DFM/.idea/NTC_DFM.iml b/NTC_DFM/.idea/NTC_DFM.iml
index d9578f6..8f0515e 100644
--- a/NTC_DFM/.idea/NTC_DFM.iml
+++ b/NTC_DFM/.idea/NTC_DFM.iml
@@ -2,7 +2,7 @@
-
+
diff --git a/NTC_DFM/.idea/misc.xml b/NTC_DFM/.idea/misc.xml
index aa659e7..b189f70 100644
--- a/NTC_DFM/.idea/misc.xml
+++ b/NTC_DFM/.idea/misc.xml
@@ -1,4 +1,4 @@
-
+
\ No newline at end of file
diff --git a/NTC_DFM/ADCP_Plot_v4_AJMR.py b/NTC_DFM/ADCP_Plot_v4_AJMR.py
index 783dddd..a2803b8 100644
--- a/NTC_DFM/ADCP_Plot_v4_AJMR.py
+++ b/NTC_DFM/ADCP_Plot_v4_AJMR.py
@@ -12,16 +12,21 @@ import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import geopandas as gp
-gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
+# gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
import scipy as sp
import scipy.ndimage
#from astropy.convolution import Gaussian2DKernel
import contextily as ctx
import os
-from shapely.geometry import Point
+from shapely.geometry import Point, MultiPoint
+from shapely.ops import nearest_points
+
+import datetime as dt
+import pytz
import pickle
+
# %% Read in data
dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/'
@@ -134,67 +139,71 @@ gdf.loc[:, df.columns != 'geometry'].to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/Transects.nc')
# %% Load in moored data
-df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
- sheet_name='mag_all_moored')
+gdf_moored = dict()
-# Shift col names to put second back in
-colIN = list(df_moored_data.columns)
-colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
-df_moored_data.columns = colOUT
+# Loop through directions
+for direction in ['u_all_moored', 'u_all_moored', 'mag_all_moored' , 'dir_all_moored']:
+ df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
+ sheet_name=direction)
-### Moored current meter locations from here:
-### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
-### Table 3-1, PDF page 65
-### Locations change between deployment 1-9 and 10/11
+ # Shift col names to put second back in
+ colIN = list(df_moored_data.columns)
+ colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
+ df_moored_data.columns = colOUT
-### YSI from 2021 FRMR report Pg 385
+ ### Moored current meter locations from here:
+ ### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
+ ### Table 3-1, PDF page 65
+ ### Locations change between deployment 1-9 and 10/11
-df_moored = pd.DataFrame(
- {'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
- 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
- 'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
- 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
- 'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
- 208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
- 'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
- 996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
- 'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
-gdf_moored_loc = gp.GeoDataFrame(
- df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
-gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
+ ### YSI from 2021 FRMR report Pg 385
-# Loop through Station IDs for deployments 1-9 and assign locations
-# for d in range(1,10):
-# depMask = df_moored_data['deployment'] == ('dep' + str(d))
-# for stationIDX, station in enumerate(df_moored['depOrig']):
-# stationMask = (df_moored_data['station'] == station) & depMask
-#
-# # Assign geography to station plus deployment
-# df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
-# df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
+ df_moored = pd.DataFrame(
+ {'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
+ 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
+ 'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
+ 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
+ 'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
+ 208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
+ 'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
+ 996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
+ 'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
+ gdf_moored_loc = gp.GeoDataFrame(
+ df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
+ gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
-# Loop through Station IDs for deployments 10-11 and assign locations
-for d in range(1,12):
- depMask = df_moored_data['deployment'] == 'dep' + str(d)
- for stationIDX, station in enumerate(df_moored['depCurr']):
- stationMask = (df_moored_data['station'] == station) & depMask
+ # Loop through Station IDs for deployments 1-9 and assign locations
+ for d in range(1, 11):
+ depMask = df_moored_data['deployment'] == ('dep' + str(d))
+ for stationIDX, station in enumerate(df_moored['depOrig']):
+ stationMask = (df_moored_data['station'] == station) & depMask
- # Assign geography to station plus deployment
- df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
- df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
+ # Assign geography to station plus deployment
+ df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
+ df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
-# Create geodataframe and convert to USSP
-gdf_moored = gp.GeoDataFrame(
- df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
+ # Loop through Station IDs for deployments 10-11 and assign locations
+ for d in range(10,12):
+ depMask = df_moored_data['deployment'] == 'dep' + str(d)
+ for stationIDX, station in enumerate(df_moored['depCurr']):
+ stationMask = (df_moored_data['station'] == station) & depMask
-#convert data to CRS of D3D
-gdf_moored.geometry = gdf_moored.geometry.to_crs("EPSG:32118")
+ # Assign geography to station plus deployment
+ df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
+ df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
-gdf_moored['date'] = pd.to_datetime(gdf_moored['year'].astype(str) + '-' +
- gdf_moored['month'].astype(str).str.zfill(2) + '-' +
- gdf_moored['day'].astype(str).str.zfill(2) + ' ' +
- gdf_moored['hour'].astype(str).str.zfill(2) + ':' +
- gdf_moored['minute'].astype(str).str.zfill(2))
+ # Create geodataframe and convert to USSP
+ gdf_moored[direction] = gp.GeoDataFrame(
+ df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
+
+ #convert data to CRS of D3D
+ gdf_moored[direction].geometry = gdf_moored[direction].geometry.to_crs("EPSG:32118")
+
+ gdf_moored[direction]['date'] = pd.to_datetime(gdf_moored[direction]['year'].astype(str) + '-' +
+ gdf_moored[direction]['month'].astype(str).str.zfill(2) + '-' +
+ gdf_moored[direction]['day'].astype(str).str.zfill(2) + ' ' +
+ gdf_moored[direction]['hour'].astype(str).str.zfill(2) + ':' +
+ gdf_moored[direction]['minute'].astype(str).str.zfill(2))
# %% ADCP1
@@ -214,7 +223,7 @@ fig, axes = plt.subplots(nrows=5, ncols=1, figsize=(16, 8))
fig.tight_layout(pad=2)
# Loop through Station IDs for deployments 1-9 and assign locations
-for d in range(1,12):
+for d in range(11):
depMask = gdf_moored['deployment'] == ('dep' + str(d))
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (gdf_moored['station'] == station) & depMask
@@ -244,6 +253,112 @@ plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2012.png',
# bbox_inches='tight', dpi=300)
+#%% Read in report obs
+reportObs = pd.read_csv('C:/Users/arey/files/Projects/Newtown/NTC_Obs_NC_086_Report.csv',
+ header=None, parse_dates=[0], index_col=0)
+
+# Avergae duplicate dates
+reportObs = reportObs.groupby(reportObs.index).mean()
+
+#%% Read in report EFDC
+reportEFDC = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/06_Models/02_EFDC/04_Model_Comparisons/NC086_DAV_Report_2016_EFDC.csv',
+ header=None, parse_dates=[0], index_col=0)
+
+
+#%% Read in EFDC data
+efdc7_df = pd.read_csv(
+ '//ott-athena.baird.com/D/11934.201_Newtown_Creek_TPP/EFDC_Results/RevisedEFDC/2012_Mon7/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
+
+
+# Read in EFDC grid
+efdc_grid_df = pd.read_csv('//ott-athena.baird.com/F/2019_FMRM_Deliverable/INPUT_FILES/GRID_FILES/NC_ER_lxly_20140129.inp',
+ delim_whitespace=True, skiprows=4,
+ names=['I', 'J', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
+
+efdc_grid_gdf = gp.GeoDataFrame(
+ efdc_grid_df, geometry=gp.points_from_xy(efdc_grid_df.X, efdc_grid_df.Y), crs="EPSG:32118")
+
+
+efdc7_merged = pd.merge(efdc7_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
+ right_on = ['I','J']).drop(columns=[
+ ' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
+efdc7_merged['date'] = pd.to_datetime([dt.datetime(2012, 7, 1, 00, 00, 00) +
+ dt.timedelta(hours=h) for h in efdc7_merged['ST_hr']])
+efdc7_merged.set_index('date', inplace=True)
+efdc7_merged.index = efdc7_merged.index.tz_localize(
+ pytz.timezone('EST')) #Convert to UTC
+del efdc7_df
+
+efdc_merged_gdf = gp.GeoDataFrame(
+ efdc7_merged, geometry=efdc7_merged['geometry'], crs="EPSG:32118")
+del efdc7_merged
+
+#%% Find nearest grid point to EFDC ADCP Station
+station = 'NC086CM'
+stationLoc = gdf_moored_loc.loc[0, 'geometry']
+nearest_geoms = nearest_points(stationLoc, MultiPoint(efdc_grid_gdf.geometry))
+station_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry']==nearest_geoms[1]]
+
+#%% Plot single deplyment for comparision
+fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(12, 8), sharex=True)
+
+station = 'NC086CM'
+stationMask = (gdf_moored['mag_all_moored']['station'] == station)
+# axes.plot(gdf_moored.loc[stationMask, 'date'],
+# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
+
+
+# Plot vector component in arbitrary direction
+for plotDir in [250]:
+ ADCPtime = pd.to_datetime(gdf_moored['mag_all_moored'].loc[stationMask, 'date'].values).tz_localize(
+ pytz.timezone('EST'))
+ axes[0].plot(ADCPtime, gdf_moored['mag_all_moored'].iloc[:, 8:50].loc[stationMask].mean(
+ axis=1).multiply(3.28084).multiply(
+ np.cos(np.deg2rad(gdf_moored['dir_all_moored'].iloc[:, 8:50].loc[
+ stationMask].mean(axis=1)) - np.deg2rad(plotDir))), label =
+ 'ADCP Obs at ' + str(plotDir) + ' deg')
+
+# axes.plot(gdf_moored.loc[stationMask, 'date'],
+# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
+
+# Plot EFDC
+axes[1].plot(station_gdf.index,
+ np.sqrt(station_gdf.loc[:, ['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1)**2+
+ station_gdf.loc[:, ['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)**2)*3.28084 *
+ np.cos(np.arctan2(station_gdf.loc[:,
+ ['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1),
+ station_gdf.loc[:,
+ ['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)) - np.deg2rad(plotDir)),
+ 'r', label='EFDC Model at ' + str(plotDir) + ' deg')
+
+axes[1].plot(station_gdf.index,
+ np.sqrt(station_gdf.loc[:, ['U_VEL5']].mean(axis=1)**2+
+ station_gdf.loc[:, ['V_VEL5']].mean(axis=1)**2)*3.28084 *
+ np.cos(np.arctan2(station_gdf.loc[:,
+ ['U_VEL5']].mean(axis=1),
+ station_gdf.loc[:,
+ ['V_VEL5']].mean(axis=1)) - np.deg2rad(plotDir)),
+ label='EFDC Model Layer 5 at ' + str(plotDir) + ' deg')
+
+
+# Plot report obs
+axes[0].plot(reportObs.index.tz_localize(
+ pytz.timezone('EST')), reportObs[1], 'k', label='Report Obs')
+
+# plot report efdc
+axes[1].plot(reportEFDC.index.tz_localize(
+ pytz.timezone('EST')) - dt.timedelta(hours=0), reportEFDC[1], 'k', label='Report EFDC')
+
+
+# Add legend
+axes[0].legend()
+axes[1].legend()
+
+axes[1].set_xlim([dt.datetime(2012, 7, 17, 12, 0, 0), dt.datetime(2012, 7, 24, 12, 0, 0)])
+axes[0].set_ylim([-1, 1])
+axes[1].set_ylim([-1, 1])
+
+plt.show()
# %% Load in ADCP2 data
diff --git a/NTC_DFM/ADCP_ReportTesting.py b/NTC_DFM/ADCP_ReportTesting.py
new file mode 100644
index 0000000..a2803b8
--- /dev/null
+++ b/NTC_DFM/ADCP_ReportTesting.py
@@ -0,0 +1,887 @@
+# -*- coding: utf-8 -*-
+"""
+@author: aforsythe
+
+Copied from "P:/11934.201 Newtown Creek TPP – Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/ADCP_Plot_v4.py"
+_AJMR
+"""
+
+
+import pandas as pd
+import matplotlib.pyplot as plt
+import matplotlib.dates as mdates
+import numpy as np
+import geopandas as gp
+# gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
+
+import scipy as sp
+import scipy.ndimage
+#from astropy.convolution import Gaussian2DKernel
+import contextily as ctx
+import os
+from shapely.geometry import Point, MultiPoint
+from shapely.ops import nearest_points
+
+import datetime as dt
+import pytz
+
+import pickle
+
+# %% Read in data
+dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/'
+
+df = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_mobile_2013_05_02.xlsx',
+ sheet_name='mag_all_mobile')
+
+# Convert to geodataframe
+gdf = gp.GeoDataFrame(df, geometry=gp.points_from_xy(df.Longitude, df.Latitude, crs="EPSG:4326"))
+
+#convert data to CRS of D3D
+gdf.geometry = gdf.geometry.to_crs("EPSG:32118")
+
+# Add new cols to geodataframe
+gdf['Distance'] = np.zeros([len(gdf), 1])
+gdf['DistanceSmth'] = np.zeros([len(gdf), 1])
+gdf['dist'] = np.zeros([len(gdf), 1])
+gdf['date'] = np.zeros([len(gdf), 1])
+
+# Set Date
+gdf['date'] = gdf['year'].astype(str) + '-' + gdf['mon'].astype(str).str.zfill(2) + '-' + gdf['day'].astype(
+ str).str.zfill(2)
+
+# Column names to distances data for plotting
+depths1 = gdf.columns[11:43] # ADCP sensor depth reading column names to list
+depths = np.array([float(d.replace('m', '')) for d in depths1]) * -1
+
+#loop through all transects
+
+transectCount = 0
+transects = gdf['transect'].unique()
+
+kernel = Gaussian2DKernel(x_stddev=0.25)
+
+for transect_id in range(1, 2): #transects:
+ # Select a given trasect
+ tMask = (gdf['transect']==transect_id)
+ # Remove rows without locations
+ tMask[pd.isna(gdf['Latitude'])] = False
+
+ # Find distance betwen points in m
+ gdf.loc[tMask, 'Distance'] = gdf[tMask].distance(gdf[tMask].shift())
+
+ # Many of the points are recorded as being at the same location...
+ # Where the distance is zero, set it to half of the following distance
+
+ # Find zeros and iloc after zeros
+ zeroDist = (gdf['Distance'] == 0) & tMask
+ zeroDistShift = zeroDist.shift()
+ zeroDistShift.iloc[0] = False
+ distShift = gdf['Distance'].shift(-1)
+ distShift.iloc[-1] = False
+ gdf.loc[tMask, 'DistanceSmth'] = gdf['Distance'][tMask]
+
+ # Set zeros to half of following
+ gdf.loc[zeroDist, 'DistanceSmth'] = distShift[zeroDist]/2
+ # Set following to half
+ gdf.loc[zeroDistShift, 'DistanceSmth'] = gdf['Distance']/2
+
+ # Set initial to zero
+ gdf.loc[gdf[tMask].index[0], 'DistanceSmth'] = 0
+
+ # If final location is duplicate, set to previous value
+ if gdf.loc[gdf[tMask].index[-1], 'Distance']==0:
+ gdf.loc[gdf[tMask].index[-1], 'DistanceSmth'] = gdf.loc[gdf[tMask].index[-2], 'DistanceSmth']
+
+ # Get cumulative sum of distances
+ gdf.loc[tMask, 'dist'] = gdf.loc[tMask, 'DistanceSmth'].cumsum()
+
+ # get velocity data all rows columns 11-43
+ V = gdf.values[tMask, 11:43].astype(float)
+
+# #__________________________________________________________________________________________________________________________
+ # %% Plotting
+ if transectCount == 0:
+ fig, axes = plt.subplots(nrows=5, ncols=4, figsize=(16, 8))
+ fig.tight_layout(pad=2.5)
+ ax = axes.flat
+
+ colormap = 'jet'
+ vmin=0
+ vmax=0.5
+
+ VS = sp.ndimage.filters.gaussian_filter(V, [1, 1], mode='nearest')
+
+# vDatEnd = (~np.isnan(V)).cumsum(1).argmax(1).astype(int) + 1
+# VS = convolve(V, kernel)
+# for i in range(0, len(VS)):
+# VS[i, vDatEnd[i]:-1] = np.nan
+
+ pltDat = ax[transectCount].pcolormesh(gdf.loc[tMask, 'dist'], depths, np.transpose(VS),
+ shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
+
+ cbar = fig.colorbar(pltDat, ax=ax[transectCount],
+ shrink=0.95,aspect=5)
+ cbar.set_label('Magnitude [m/s]')
+ ax[transectCount].set_xlabel('Distance Along Transect [m]')
+ ax[transectCount].set_ylabel('Depth below WSL [m]')
+ stationStart = next((i for i, j in enumerate(tMask) if j), None)
+ ax[transectCount].set_title(gdf.loc[stationStart, 'station'])
+ ax[transectCount].set_ylim(-6, 0)
+
+ transectCount = transectCount + 1
+
+plt.show()
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Transects221-241.png',
+# bbox_inches='tight', dpi=300)
+
+# %% Save Transects
+gdf.loc[:, df.columns != 'geometry'].to_xarray().to_netcdf(
+ 'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/Transects.nc')
+
+# %% Load in moored data
+gdf_moored = dict()
+
+# Loop through directions
+for direction in ['u_all_moored', 'u_all_moored', 'mag_all_moored' , 'dir_all_moored']:
+ df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
+ sheet_name=direction)
+
+ # Shift col names to put second back in
+ colIN = list(df_moored_data.columns)
+ colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
+ df_moored_data.columns = colOUT
+
+ ### Moored current meter locations from here:
+ ### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
+ ### Table 3-1, PDF page 65
+ ### Locations change between deployment 1-9 and 10/11
+
+ ### YSI from 2021 FRMR report Pg 385
+
+ df_moored = pd.DataFrame(
+ {'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
+ 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
+ 'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
+ 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
+ 'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
+ 208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
+ 'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
+ 996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
+ 'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
+ gdf_moored_loc = gp.GeoDataFrame(
+ df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
+ gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
+
+ # Loop through Station IDs for deployments 1-9 and assign locations
+ for d in range(1, 11):
+ depMask = df_moored_data['deployment'] == ('dep' + str(d))
+ for stationIDX, station in enumerate(df_moored['depOrig']):
+ stationMask = (df_moored_data['station'] == station) & depMask
+
+ # Assign geography to station plus deployment
+ df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
+ df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
+
+ # Loop through Station IDs for deployments 10-11 and assign locations
+ for d in range(10,12):
+ depMask = df_moored_data['deployment'] == 'dep' + str(d)
+ for stationIDX, station in enumerate(df_moored['depCurr']):
+ stationMask = (df_moored_data['station'] == station) & depMask
+
+ # Assign geography to station plus deployment
+ df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
+ df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
+
+ # Create geodataframe and convert to USSP
+ gdf_moored[direction] = gp.GeoDataFrame(
+ df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
+
+ #convert data to CRS of D3D
+ gdf_moored[direction].geometry = gdf_moored[direction].geometry.to_crs("EPSG:32118")
+
+ gdf_moored[direction]['date'] = pd.to_datetime(gdf_moored[direction]['year'].astype(str) + '-' +
+ gdf_moored[direction]['month'].astype(str).str.zfill(2) + '-' +
+ gdf_moored[direction]['day'].astype(str).str.zfill(2) + ' ' +
+ gdf_moored[direction]['hour'].astype(str).str.zfill(2) + ':' +
+ gdf_moored[direction]['minute'].astype(str).str.zfill(2))
+
+
+# %% ADCP1
+gdf_moored.iloc[:, 1:-2].to_xarray().to_netcdf(
+ 'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP1.nc')
+
+# %% Plot moored data
+# Column names to distances data for plotting
+depths1 = gdf_moored.columns[8:51] # ADCP sensor depth reading column names to list
+depths_moored = np.array([float(d.replace('m', '')) for d in depths1])
+
+colormap = 'jet'
+vmin = 0
+vmax = 0.2
+
+fig, axes = plt.subplots(nrows=5, ncols=1, figsize=(16, 8))
+fig.tight_layout(pad=2)
+
+# Loop through Station IDs for deployments 1-9 and assign locations
+for d in range(11):
+ depMask = gdf_moored['deployment'] == ('dep' + str(d))
+ for stationIDX, station in enumerate(df_moored['depCurr']):
+ stationMask = (gdf_moored['station'] == station) & depMask
+
+ V = gdf_moored.values[stationMask, 8:51].astype(float)
+
+ if len(gdf_moored.loc[stationMask, 'date']) != 0:
+ pltDat = axes[stationIDX].pcolormesh(gdf_moored.loc[stationMask, 'date'],
+ depths_moored, np.transpose(V),
+ shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
+
+ axes[stationIDX].set_ylim(0, 7)
+ fmt_half_year = mdates.MonthLocator(interval=1)
+ axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
+ axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
+
+ if d == 1:
+ axes[stationIDX].set_ylabel('Elevation [m]')
+ axes[stationIDX].set_title(station)
+ cbar = fig.colorbar(pltDat, ax=axes[stationIDX],
+ shrink=1.1, aspect=3)
+ cbar.set_label('Magnitude [m/s]')
+ pltDat.set_clim([0, 0.2])
+
+plt.show()
+
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2012.png',
+# bbox_inches='tight', dpi=300)
+
+#%% Read in report obs
+reportObs = pd.read_csv('C:/Users/arey/files/Projects/Newtown/NTC_Obs_NC_086_Report.csv',
+ header=None, parse_dates=[0], index_col=0)
+
+# Avergae duplicate dates
+reportObs = reportObs.groupby(reportObs.index).mean()
+
+#%% Read in report EFDC
+reportEFDC = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/06_Models/02_EFDC/04_Model_Comparisons/NC086_DAV_Report_2016_EFDC.csv',
+ header=None, parse_dates=[0], index_col=0)
+
+
+#%% Read in EFDC data
+efdc7_df = pd.read_csv(
+ '//ott-athena.baird.com/D/11934.201_Newtown_Creek_TPP/EFDC_Results/RevisedEFDC/2012_Mon7/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
+
+
+# Read in EFDC grid
+efdc_grid_df = pd.read_csv('//ott-athena.baird.com/F/2019_FMRM_Deliverable/INPUT_FILES/GRID_FILES/NC_ER_lxly_20140129.inp',
+ delim_whitespace=True, skiprows=4,
+ names=['I', 'J', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
+
+efdc_grid_gdf = gp.GeoDataFrame(
+ efdc_grid_df, geometry=gp.points_from_xy(efdc_grid_df.X, efdc_grid_df.Y), crs="EPSG:32118")
+
+
+efdc7_merged = pd.merge(efdc7_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
+ right_on = ['I','J']).drop(columns=[
+ ' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
+efdc7_merged['date'] = pd.to_datetime([dt.datetime(2012, 7, 1, 00, 00, 00) +
+ dt.timedelta(hours=h) for h in efdc7_merged['ST_hr']])
+efdc7_merged.set_index('date', inplace=True)
+efdc7_merged.index = efdc7_merged.index.tz_localize(
+ pytz.timezone('EST')) #Convert to UTC
+del efdc7_df
+
+efdc_merged_gdf = gp.GeoDataFrame(
+ efdc7_merged, geometry=efdc7_merged['geometry'], crs="EPSG:32118")
+del efdc7_merged
+
+#%% Find nearest grid point to EFDC ADCP Station
+station = 'NC086CM'
+stationLoc = gdf_moored_loc.loc[0, 'geometry']
+nearest_geoms = nearest_points(stationLoc, MultiPoint(efdc_grid_gdf.geometry))
+station_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry']==nearest_geoms[1]]
+
+#%% Plot single deplyment for comparision
+fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(12, 8), sharex=True)
+
+station = 'NC086CM'
+stationMask = (gdf_moored['mag_all_moored']['station'] == station)
+# axes.plot(gdf_moored.loc[stationMask, 'date'],
+# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
+
+
+# Plot vector component in arbitrary direction
+for plotDir in [250]:
+ ADCPtime = pd.to_datetime(gdf_moored['mag_all_moored'].loc[stationMask, 'date'].values).tz_localize(
+ pytz.timezone('EST'))
+ axes[0].plot(ADCPtime, gdf_moored['mag_all_moored'].iloc[:, 8:50].loc[stationMask].mean(
+ axis=1).multiply(3.28084).multiply(
+ np.cos(np.deg2rad(gdf_moored['dir_all_moored'].iloc[:, 8:50].loc[
+ stationMask].mean(axis=1)) - np.deg2rad(plotDir))), label =
+ 'ADCP Obs at ' + str(plotDir) + ' deg')
+
+# axes.plot(gdf_moored.loc[stationMask, 'date'],
+# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
+
+# Plot EFDC
+axes[1].plot(station_gdf.index,
+ np.sqrt(station_gdf.loc[:, ['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1)**2+
+ station_gdf.loc[:, ['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)**2)*3.28084 *
+ np.cos(np.arctan2(station_gdf.loc[:,
+ ['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1),
+ station_gdf.loc[:,
+ ['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)) - np.deg2rad(plotDir)),
+ 'r', label='EFDC Model at ' + str(plotDir) + ' deg')
+
+axes[1].plot(station_gdf.index,
+ np.sqrt(station_gdf.loc[:, ['U_VEL5']].mean(axis=1)**2+
+ station_gdf.loc[:, ['V_VEL5']].mean(axis=1)**2)*3.28084 *
+ np.cos(np.arctan2(station_gdf.loc[:,
+ ['U_VEL5']].mean(axis=1),
+ station_gdf.loc[:,
+ ['V_VEL5']].mean(axis=1)) - np.deg2rad(plotDir)),
+ label='EFDC Model Layer 5 at ' + str(plotDir) + ' deg')
+
+
+# Plot report obs
+axes[0].plot(reportObs.index.tz_localize(
+ pytz.timezone('EST')), reportObs[1], 'k', label='Report Obs')
+
+# plot report efdc
+axes[1].plot(reportEFDC.index.tz_localize(
+ pytz.timezone('EST')) - dt.timedelta(hours=0), reportEFDC[1], 'k', label='Report EFDC')
+
+
+# Add legend
+axes[0].legend()
+axes[1].legend()
+
+axes[1].set_xlim([dt.datetime(2012, 7, 17, 12, 0, 0), dt.datetime(2012, 7, 24, 12, 0, 0)])
+axes[0].set_ylim([-1, 1])
+axes[1].set_ylim([-1, 1])
+
+plt.show()
+
+# %% Load in ADCP2 data
+
+# Read in locations
+df_adcp2_locs = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/NCP2_ADCP_D1-D3/AQ_LocationsSO_ADCP_ADV_overview_Coords_20150126_AJMR.xlsx',
+ sheet_name='ADCP')
+gdf_adcp2_locs = gp.GeoDataFrame(df_adcp2_locs,
+ geometry=gp.points_from_xy(df_adcp2_locs.X_NYSPLI, df_adcp2_locs.Y_NYSPLI), crs="EPSG:2263")
+gdf_adcp2_locs = gdf_adcp2_locs.to_crs("EPSG:32118")
+
+
+adcp2_data_path = '//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/'
+
+adcp2_paths = ['NCP2_ADCP_D1-D3/ADCP_D1_070314_090814', 'NCP2_ADCP_D1-D3/ADCP_D2_090914_110214',
+ 'NCP2_ADCP_D1-D3/ADCP_D3_110414_010615', 'NCP2_ADCP_D4-D5/D4_010715_030315',
+ 'NCP2_ADCP_D4-D5/D5_030415_050515']
+
+adcp2_gdfs = dict()
+
+for depIDX, adcp2_path in enumerate(adcp2_paths):
+ adcp2_files = os.listdir(adcp2_data_path + adcp2_path) # returns list of files in adv folder
+
+ for adcp2_file in adcp2_files:
+ if '.xls' in adcp2_file or '.csv' in adcp2_file:
+ if '.xls' in adcp2_file:
+ df_in = pd.read_excel(adcp2_data_path + adcp2_path + '/' + adcp2_file)
+ else:
+ df_in = pd.read_csv(adcp2_data_path + adcp2_path + '/' + adcp2_file)
+
+ for stationIDX, station in enumerate(df_adcp2_locs['parent_loc_code']):
+ advStrIDX = adcp2_file.find('_')+1
+ if adcp2_file[advStrIDX:advStrIDX+3] in station:
+ adcp2_geo_x = np.ones([len(df_in), 1]) * df_adcp2_locs.X_NYSPLI[stationIDX]
+ adcp2_geo_y = np.ones([len(df_in), 1]) * df_adcp2_locs.Y_NYSPLI[stationIDX]
+
+
+ df_in['date'] = pd.to_datetime(df_in['Year'].astype(str) + '-' + df_in['Month'].astype(
+ str).str.zfill(2) + '-' + df_in['Day'].astype(
+ str).str.zfill(2) + ' ' + df_in['Hour'].astype(
+ str).str.zfill(2) + ':' + df_in['Minute'].astype(
+ str).str.zfill(2) + ':' + df_in['Second'].astype(str).str.zfill(2))
+
+ gdf_in = gp.GeoDataFrame(
+ df_in, geometry=gp.points_from_xy(adcp2_geo_x, adcp2_geo_y), crs="EPSG:2263")
+
+ if adcp2_file[advStrIDX:advStrIDX + 3] not in adcp2_gdfs:
+ adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]] = dict()
+
+ if adcp2_file[0:advStrIDX-1] not in adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]]:
+ adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]] = dict()
+
+ if depIDX+1 not in adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]]:
+ adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]][depIDX+1] = gdf_in
+# %% ADCP2
+for stationIDX, stat in enumerate(adcp2_gdfs):
+ for varIDX, var in enumerate(adcp2_gdfs[stat]):
+ for depIDX, depdat in enumerate(adcp2_gdfs[stat][var]):
+ ncDat = adcp2_gdfs[stat][var][depdat].iloc[:, 1:-2]
+ ncDat.to_xarray().to_netcdf(
+ 'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP2/' +
+ stat + '_' + var + '_d' + str(depdat) + '.nc')
+
+gdf_adcp2_locs.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP2/ADCP2locations.csv')
+
+# %% Plot ADCP2 Data
+fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(9, 8))
+fig.tight_layout(pad=2)
+
+colormap = 'jet'
+vmin = 0
+vmax = 0.2
+
+for stationIDX, stat in enumerate(adcp2_gdfs):
+ for depIDX, depdat in enumerate(adcp2_gdfs[stat]['mag']):
+ depths1 = adcp2_gdfs[stat]['mag'][depdat].columns[6:-2] # ADCP sensor depth reading column names to list
+ depths_moored = np.array([float(d.replace('m', '')) for d in depths1])
+
+ V = adcp2_gdfs[stat]['mag'][depdat].values[:, 6:-2].astype(float)
+
+ pltDat = axes[stationIDX].pcolormesh(adcp2_gdfs[stat]['mag'][depdat].loc[:, 'date'],
+ depths_moored, np.transpose(V),
+ shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
+
+ axes[stationIDX].set_ylim(0, 7)
+ axes[stationIDX].set_xlim(pd.to_datetime("2014-07-01"), pd.to_datetime('2015-05-15'))
+ #axes[stationIDX, depIDX].format_xdata = mdates.DateFormatter('%Y-%m')
+ fmt_half_year = mdates.MonthLocator(interval=1)
+ axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
+ axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
+
+ axes[stationIDX].set_title(str(stat))
+
+ axes[stationIDX].set_ylabel('Elevation [m]')
+
+ cbar = fig.colorbar(pltDat, ax=axes[stationIDX],
+ shrink=1.1, aspect=3)
+ cbar.set_label('Magnitude [m/s]')
+ pltDat.set_clim([0, 0.2])
+
+plt.show()
+
+
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2014.png',
+# bbox_inches='tight', dpi=300)
+
+# %% Load in Water Level Data
+# Gauge Locations from map
+gdf_gaugeLoc = gp.read_file('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/NTC6.kml')
+gdf_gaugeLocUSSP = gdf_gaugeLoc.to_crs("EPSG:32118")
+
+# All in Feet NAVD88
+df_wl_FFG = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
+ sheet_name='Field_Facility_Gauge')
+gdf_wl_FFG = gp.GeoDataFrame(df_wl_FFG,
+ geometry=gp.points_from_xy(np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].x[2],
+ np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].y[2]), crs="EPSG:4326")
+gdf_wl_FFG.geometry = gdf_wl_FFG.geometry.to_crs("EPSG:32118")
+
+
+df_wl_NGG1 = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
+ sheet_name='National_Grid_Gauge')
+gdf_wl_NGG1 = gp.GeoDataFrame(df_wl_NGG1,
+ geometry=gp.points_from_xy(np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].x[3],
+ np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
+gdf_wl_NGG1.geometry = gdf_wl_NGG1.geometry.to_crs("EPSG:32118")
+
+# Also includes temperature
+df_wl_NGG2 = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP2_NG_TideGauge_20160113/NCP2_NG_TideGauge_20160113.xlsx',
+ sheet_name='Sheet1')
+gdf_wl_NGG2 = gp.GeoDataFrame(df_wl_NGG2,
+ geometry=gp.points_from_xy(np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].x[3],
+ np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
+gdf_wl_NGG2.geometry = gdf_wl_NGG2.geometry.to_crs("EPSG:32118")
+
+gdf_wl_FFG.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv')
+gdf_wl_NGG1.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv')
+gdf_wl_NGG2.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv')
+
+
+# %% Plot Water Level Data
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 4))
+
+axes.plot(gdf_wl_FFG.Date_time,
+ gdf_wl_FFG['Water Surface Elevation (ft)']*0.3048,
+ label='Field Facility Gauge')
+
+axes.plot(gdf_wl_NGG1.Date_time,
+ gdf_wl_NGG1['Water Surface Elevation (ft)']*0.3048,
+ label='National Grid Gauge Deployment 1')
+
+axes.plot(gdf_wl_NGG2.datetime,
+ gdf_wl_NGG2['water_surface_elevation']*0.3048,
+ label='National Grid Gauge Deployment 2')
+
+fmt_half_year = mdates.MonthLocator(interval=6)
+axes.xaxis.set_major_locator(fmt_half_year)
+axes.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
+axes.set_ylabel('Water Surface Elevation [mNAVD88]')
+axes.set_title('Water Surface Elevation')
+
+axes.legend()
+
+fig.show()
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/WaterLevel.png',
+# bbox_inches='tight', dpi=300)
+
+
+
+
+# %% Load temperature data
+df_tdat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/10_Salinity/tData.xlsx',
+ skiprows=[1])
+
+# Day Month order is reversed
+df_tdat['date'] = pd.to_datetime(df_tdat['CollectionDate'])
+
+df_tsample = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/10_Salinity/tSampleLocation.csv')
+
+# Create geodataframe and convert to USSP
+gdf_tsample = gp.GeoDataFrame(
+ df_tsample, geometry=gp.points_from_xy(df_tsample.EastCoordinate, df_tsample.NorthCoordinate), crs="EPSG:2263")
+
+#convert data to CRS of D3D
+gdf_tsample.geometry = gdf_tsample.geometry.to_crs("EPSG:32118")
+
+tdat_geo = list()
+tdat_facility = list()
+
+for i in range(0, len(df_tdat)):
+ #sampleID = df_tdat['LocationID'][i][0:df_tdat['LocationID'][i].find('_')]
+ #geoFind = df_tsample.loc[df_tsample['ParentLocationID'] == sampleID]
+ geoFind = df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].geometry.values
+
+ if len(geoFind) !=0:
+ tdat_geo.append(df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].geometry.values[0])
+ tdat_facility.append(df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].SourceArea.values[0])
+ else:
+ tdat_geo.append(Point())
+ tdat_facility.append('')
+
+# Create geodataframe
+gdf_tdat = gp.GeoDataFrame(df_tdat, geometry=tdat_geo, crs="EPSG:32118")
+gdf_tdat.loc[:, 'SourceArea'] = tdat_facility
+
+gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='ft', 'DepthM'] = gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='ft', 'BeginDepth']*0.3048
+gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='cm', 'DepthM'] = gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='cm', 'BeginDepth']*0.01
+
+
+# Import spring observations
+df_springDat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/11_Temperature/NC_Spr2012_Benthic_Water_Quality_FieldData&Observations_20121022.xlsx')
+
+# Create geodataframe and convert to USSP
+gdf_springDat = gp.GeoDataFrame(
+ df_springDat, geometry=gp.points_from_xy(df_springDat.x_coord_as_numeric, df_springDat.y_coord_as_numeric), crs="EPSG:2263")
+gdf_springDat.geometry = gdf_springDat.geometry.to_crs("EPSG:32118")
+
+
+# Import Summer observations
+df_summerDat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/11_Temperature/NC_Sum2012_Benthic_Water_Quality_FieldData&Observations_20130205.xlsx')
+
+# Create geodataframe and convert to USSP
+gdf_summerDat = gp.GeoDataFrame(
+ df_summerDat, geometry=gp.points_from_xy(df_summerDat.x_coord_as_numeric, df_summerDat.y_coord_as_numeric), crs="EPSG:2263")
+gdf_summerDat.geometry = gdf_summerDat.geometry.to_crs("EPSG:32118")
+
+# Merged
+df_SpringSummerDat = pd.concat([df_springDat, gdf_summerDat], ignore_index=True)
+gdf_SpringSummerDat = gp.GeoDataFrame(
+ df_SpringSummerDat, geometry=gp.points_from_xy(df_SpringSummerDat.x_coord_as_numeric, df_SpringSummerDat.y_coord_as_numeric), crs="EPSG:2263")
+gdf_SpringSummerDat.geometry = gdf_SpringSummerDat.geometry.to_crs("EPSG:32118")
+
+# %% Save temperature and salinity data
+gdf_SpringSummerDat.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/TempSal/Spring_Summer.csv')
+
+# %% Plot Salinity Time series
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 8))
+plotMaskStation = (gdf_tdat.SourceArea == 'Newtown Creek') | (gdf_tdat.SourceArea == 'East Branch of Newtown Creek')
+plotMaskWater = (gdf_tdat.SampleMedium == 'Surface Water')
+plotMask = plotMaskStation & plotMaskWater
+
+pltDat = axes.scatter(gdf_tdat.loc[plotMask, 'date'],
+ gdf_tdat.loc[plotMask, 'DepthM'], 10,
+ gdf_tdat.loc[plotMask, 'NumericResult'])
+axes.set_ylim(10, 0)
+axes.set_ylabel('Depth below water surface [m]')
+axes.set_title('Surface Water Salinity Samples')
+axes.set_xlabel('Date')
+
+fmt_half_year = mdates.MonthLocator(interval=12)
+axes.xaxis.set_major_locator(fmt_half_year)
+axes.xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
+
+cbar = fig.colorbar(pltDat, ax=axes, shrink=0.95)
+cbar.set_label('Salinity [PSU]')
+pltDat.set_clim([5, 40])
+
+fig.show()
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Salinity.png',
+# bbox_inches='tight', dpi=300)
+
+
+
+# %% Plot Temperature Time series
+## Additional salinity obs are here!
+fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(6, 8))
+fig.tight_layout(pad=2)
+
+for i, miles in enumerate(np.arange(0, 3, 0.5)):
+ plotMaskDistance = (gdf_SpringSummerDat.miles_from_NC_mouth > miles) & (
+ gdf_SpringSummerDat.miles_from_NC_mouth < miles + 0.5)
+ plotMaskStation = (gdf_SpringSummerDat.subfacility_code == 'Newtown Creek')
+ plotMaskVar = (gdf_SpringSummerDat.chemical_name == 'Temperature (field)')# Temperature (field)'
+
+ plotMask = plotMaskDistance & plotMaskStation & plotMaskVar
+
+
+ pltDat = axes[i].scatter(gdf_SpringSummerDat.loc[plotMask, 'location_start_date'],
+ gdf_SpringSummerDat.loc[plotMask, 'elev']*0.3048, 10,
+ gdf_SpringSummerDat.loc[plotMask, 'result_value'])
+
+ axes[i].set_ylim(-10, 0)
+ axes[i].set_ylabel('Elevation [m]')
+
+ cbar = fig.colorbar(pltDat, ax=axes[i], shrink=0.95)
+ cbar.set_label('Temp [degC]')
+ pltDat.set_clim([5, 30])
+
+ fmt_half_year = mdates.MonthLocator(interval=1)
+ axes[i].xaxis.set_major_locator(fmt_half_year)
+ axes[i].xaxis.set_major_formatter(mdates.DateFormatter('%m-%Y'))
+
+axes[0].set_title('Surface Water Temperature Samples')
+axes[i].set_xlabel('Date')
+
+fig.show()
+
+
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Temperature.png',
+# bbox_inches='tight', dpi=300)
+
+# %% Load in ADV data
+
+# Read in locations
+df_adv_locs = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/Velocity_Data_Compiled/Phase2/Locations/AQ_LocationsSO_ADCP_ADV_overview_Coords_20150126_AJMR.xlsx',
+ sheet_name='ADV2')
+gdf_adv_locs = gp.GeoDataFrame(df_adv_locs,
+ geometry=gp.points_from_xy(df_adv_locs.X_NYSPLI, df_adv_locs.Y_NYSPLI), crs="EPSG:2263")
+gdf_adv_locs = gdf_adv_locs.to_crs("EPSG:32118")
+
+
+adv_data_path = '//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/05 Currents/Velocity_Data_Compiled/Phase2/Raw'
+
+adv_paths = ['NCP2_ADV_D1-D2/ADV_D1_070914_080214', 'NCP2_ADV_D1-D2/ADV_D2_080214_090814',
+ 'NCP2_ADV_D3-D4/ADV_D3_090914_100614', 'NCP2_ADV_D3-D4/ADV_D4_100714_110214',
+ 'NCP2_ADV_D5-D6/ADV_D5_110414_120214', 'NCP2_ADV_D5-D6/ADV_D6_120414_010615']
+
+adv_gdfs = dict()
+adv_dfs = dict()
+
+for depIDX, adv_path in enumerate(adv_paths):
+ adv_files = os.listdir(adv_data_path + '/' + adv_path) # returns list of files in adv folder
+
+ for adv_file in adv_files:
+ if '.txt' in adv_file and 'Readme' not in adv_file:
+ df_in = pd.read_csv(adv_data_path + '/' + adv_path + '/' + adv_file, delim_whitespace=True, skipinitialspace=True)
+
+ for stationIDX, station in enumerate(gdf_adv_locs['parent_loc_code']):
+ if adv_file[-9:-6] in station:
+ # adv_geo_x = np.ones([len(df_in), 1]) * df_adv_locs.X_NYSPLI[stationIDX]
+ # adv_geo_y = np.ones([len(df_in), 1]) * df_adv_locs.Y_NYSPLI[stationIDX]
+
+
+ df_in['date'] = pd.to_datetime(df_in['Year'].astype(str) + '-' + df_in['Month'].astype(
+ str).str.zfill(2) + '-' + df_in['Day'].astype(
+ str).str.zfill(2) + ' ' + df_in['Hour'].astype(
+ str).str.zfill(2) + ':' + df_in['Minute'].astype(
+ str).str.zfill(2) + ':' + df_in['Second'].astype(str).str.zfill(2))
+ df_in.set_index('date', inplace=True)
+
+ df_in.drop(columns=['Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'], inplace=True)
+ df_in.dropna(how='all', axis=1, inplace=True)
+
+ colName = df_in.columns
+ df_in[colName] = df_in[colName].astype('float32')
+
+ # Location
+ if adv_file[-9:-6] not in adv_gdfs:
+ adv_gdfs[adv_file[-9:-6]] = dict()
+ adv_dfs[adv_file[-9:-6]] = dict()
+ # D1-6
+ if adv_file[-6:-4] not in adv_gdfs[adv_file[-9:-6]]:
+ adv_gdfs[adv_file[-9:-6]][adv_file[-6:-4]] = gdf_adv_locs.iloc[stationIDX]
+ adv_dfs[adv_file[-9:-6]][adv_file[-6:-4]] = df_in
+ else:
+ adv_gdfs[adv_file[-9:-6]][adv_file[-6:-4]] = gdf_adv_locs.iloc[stationIDX]
+ adv_dfs[adv_file[-9:-6]][adv_file[-6:-4]].loc[:, colName] = df_in
+
+ print('ADV:' + adv_file[-9:-6] +
+ '; ' + adv_file[-6:-4] +
+ '; var:' + adv_file[3:6])
+
+# with open('ADV.pickle', 'wb') as f:
+# pickle.dump(adv_dfs, f)
+# %% Save ADV to NetCDF
+for stationIDX, stat in enumerate(adv_dfs):
+ for depIDX, depdat in enumerate(adv_dfs[stat]):
+ ncDat = adv_dfs[stat][depdat]
+ ncDat.columns = ncDat.columns.str.replace(r"[()]", "_")
+ ncDat.columns = ncDat.columns.str.replace(r"[/]", "_")
+
+ ncDat.to_xarray().to_netcdf(
+ 'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADV/' + stat + '_' + depdat + '.nc')
+
+gdf_adv_locs.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADV/ADVlocations.csv')
+
+# %% Plot ADV Data
+fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(9, 8))
+fig.tight_layout(pad=2)
+
+for stationIDX, stat in enumerate(adv_dfs):
+ for depIDX, depdat in enumerate(adv_dfs[stat]):
+
+ # adv_dfs[stat][depdat]['vel'].iloc[::60, :].plot(ax=axes[stationIDX])
+ if 'Velocity' in adv_dfs[stat][depdat].columns:
+ plotingDat = adv_dfs[stat][depdat].loc[:, 'Velocity'].resample('1s').mean()
+ else:
+ plotingDat = adv_dfs[stat][depdat].loc[:, 'Velocity_m_s_'].resample('1s').mean()
+
+ axes[stationIDX].plot(plotingDat.index, plotingDat)
+
+ # axes[stationIDX].set_ylim(0, 7)
+ axes[stationIDX].set_xlim(pd.to_datetime("2014-07-01"), pd.to_datetime('2015-02-01'))
+ # #axes[stationIDX, depIDX].format_xdata = mdates.DateFormatter('%Y-%m')
+ fmt_half_year = mdates.MonthLocator(interval=1)
+ axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
+ axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
+
+
+ axes[stationIDX].set_title(str(stat))
+ #
+ axes[stationIDX].set_ylabel('ADV Velocity [m/s]')
+
+ # cbar.set_label('Velocity Magnitude [m/s]')
+ # pltDat.set_clim([0, 0.2])
+
+plt.show()
+
+
+fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADV_raw.png',
+ bbox_inches='tight', dpi=300)
+
+# %% Plot Map
+mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
+
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
+
+locTableNames = []
+locTableX = []
+locTableY = []
+
+axes.set_xlim(303500, 306500)
+axes.set_ylim(61000, 63750)
+gdf.plot(ax=axes, markersize=10, color='blue', label='Mobile ADCP')
+for x, y, label in zip(gdf.drop_duplicates(subset="station", keep='last').geometry.x,
+ gdf.drop_duplicates(subset="station", keep='last').geometry.y,
+ gdf.drop_duplicates(subset="station", keep='last').station):
+ axes.annotate(label, xy=(x, y), xytext=(20, 3), textcoords="offset points", color='blue',
+ bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
+
+
+gdf_SpringSummerDat.plot(ax=axes, markersize=12, color='magenta', label='Temperature & Salinity')
+for x, y, label in zip(gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').geometry.x,
+ gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').geometry.y,
+ gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').loc_name):
+ locTableNames.append(label)
+ locTableX.append(x)
+ locTableY.append(y)
+
+gdf_moored_loc.plot(ax=axes, markersize=20, color='red', label='Moored ADCP 2012')
+for x, y, label in zip(gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').geometry.x,
+ gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').geometry.y,
+ gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').depCurr):
+ locTableNames.append(label)
+ locTableX.append(x)
+ locTableY.append(y)
+
+gdf_adcp2_locs.plot(ax=axes, markersize=20, color='orange', label='Moored ADCP 2014')
+for x, y, label in zip(gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').geometry.x,
+ gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').geometry.y,
+ gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').parent_loc_code):
+ axes.annotate(label, xy=(x, y), xytext=(-65, 3), textcoords="offset points", color='orange',
+ bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
+ locTableNames.append(label)
+ locTableX.append(x)
+ locTableY.append(y)
+
+gdf_adv_locs.plot(ax=axes, markersize=20, color='green', label='Moored ADV 2014')
+for x, y, label in zip(gdf_adv_locs.geometry.x,
+ gdf_adv_locs.geometry.y,
+ gdf_adv_locs.parent_loc_code):
+ axes.annotate(label, xy=(x, y), xytext=(-30, -30), textcoords="offset points", color='green',
+ bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
+ locTableNames.append(label)
+ locTableX.append(x)
+ locTableY.append(y)
+
+gdf_gaugeLocUSSP.loc[2:3, 'geometry'].plot(ax=axes, markersize=20, color='yellow', label='Water Level Gauge')
+for x, y, label in zip(gdf_gaugeLocUSSP.loc[2:3, 'geometry'].x,
+ gdf_gaugeLocUSSP.loc[2:3, 'geometry'].y,
+ gdf_gaugeLocUSSP.loc[2:3, 'Name']):
+ locTableNames.append(label)
+ locTableX.append(x)
+ locTableY.append(y)
+
+for x, y, label in zip(gdf.geometry.x,
+ gdf.geometry.y,
+ gdf.station + '_' + gdf.transect.astype(str) + gdf['min'].astype(str) + gdf.second.astype(str)):
+ locTableNames.append(label)
+ locTableX.append(x)
+ locTableY.append(y)
+
+
+
+ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
+axes.set_xlabel('New York State Plane Easting [m]')
+axes.set_ylabel('New York State Plane Northing [m]')
+axes.legend()
+
+# axes[1].set_xlim(303500, 306500)
+# axes[1].set_ylim(61000, 63750)
+# gdf_SpringSummerDat.plot(ax=axes[1], markersize=12, color='magenta', label='Temperature & Salinity')
+# axes[1].set_xlabel('New York State Plane Easting [m]')
+# axes[1].legend()
+# ctx.add_basemap(axes[1], source=mapbox, crs='EPSG:32118')
+
+fig.show()
+# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/DataMap_ADV.png',
+# bbox_inches='tight', dpi=300)
+
+
+# %% Import grid shapefile and find cells
+delftGrid = gp.read_file('C:/Users/arey/files/Projects/Newtown/Topology data of 2D network.shp')
+delftGrid = delftGrid.set_crs("EPSG:32118")
+delftGrid['centroid'] = delftGrid.geometry.centroid
+
+obsPts = gp.GeoDataFrame(locTableNames, geometry=gp.points_from_xy(locTableX, locTableY), crs="EPSG:32118")
+
+joinPTS = gp.sjoin(obsPts, delftGrid, op='within')
+
+uniqueJoinPTS = joinPTS.index_right.unique()
+groupdObsLabels = joinPTS.groupby(by='index_right').agg({0:lambda x:list(x)})
+
+uniqueDelftGrid = delftGrid.iloc[uniqueJoinPTS, :]
+uniqueDelftGrid['Station Names'] = groupdObsLabels
+
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
+axes.set_xlim(303500, 306500)
+axes.set_ylim(61000, 63750)
+delftGrid.plot(ax=axes, markersize=10, color='gray', label='Mobile ADCP')
+delftGrid.loc[uniqueJoinPTS, 'geometry'].plot(ax=axes, markersize=10, color='blue', label='Mobile ADCP')
+ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
+axes.set_xlabel('New York State Plane Easting [m]')
+axes.set_ylabel('New York State Plane Northing [m]')
+fig.show()
+
+uniqueDelftGrid.to_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/06_Models/06_Delft3DFM/RectConnect2_Obs_PTS.xlsx')
diff --git a/NTC_DFM/NTC_SedTestPlot.py b/NTC_DFM/NTC_SedTestPlot.py
new file mode 100644
index 0000000..2cf232c
--- /dev/null
+++ b/NTC_DFM/NTC_SedTestPlot.py
@@ -0,0 +1,427 @@
+"""
+@author: AJMR
+
+Plotting and animation script for NTC sediment tests
+
+April 17, 2023
+"""
+
+import os
+import pandas as pd
+import geopandas as gpd
+import netCDF4 as nc
+import math
+import numpy as np
+
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+from scipy.interpolate import LinearNDInterpolator, interp1d
+import matplotlib.animation as animation
+
+import dfm_tools as dfmt
+
+import xarray as xr
+
+import cartopy.crs as ccrs
+import contextily as ctx
+from dfm_tools.regulargrid import scatter_to_regulargrid
+
+from pathlib import Path
+
+# Colormaps
+import cm_xml_to_matplotlib as cm
+
+#%% Read Model Log
+
+runLog = pd.read_excel('C:/Users/arey/files/Projects/Newtown/Model Log NTC.xlsx', 'ModelLog')
+
+dataPath = "FlowFM/dflowfm/output/FlowFM_map.nc"
+histPath = "FlowFM/dflowfm/output/FlowFM_his.nc"
+
+#%% Import Model results for static images
+
+# Define which models to import
+modelPlot = [120, 121, 129, 130, 131, 132, 133, 134]
+
+d3dfm_DataArray = [None] * (max(modelPlot)+1)
+sedMass = [None] * (max(modelPlot)+1) # Available Mass of Sediment
+erodeSed = [None] * (max(modelPlot)+1) # Erosion and deposition
+initialBed = [None] * (max(modelPlot)+1) # Erosion and deposition
+modelCRS = [None] * (max(modelPlot)+1) # Model CRS
+
+for i in modelPlot:
+ file_nc_map = Path(runLog['Run Location'][i]) / dataPath
+
+ # Open Map file as xarrray Dataset
+ d3dfm_DataArray[i] = dfmt.open_partitioned_dataset(file_nc_map.as_posix())
+
+ # Get Var info
+ vars_pd = dfmt.get_ncvarproperties(d3dfm_DataArray[i])
+
+ # Get last timestep
+ tStep = d3dfm_DataArray[i].time[-1].values
+
+ # Import sand sediment class
+ sedMass[i] = d3dfm_DataArray[i].mesh2d_bodsed[-1, :, 2].compute()
+ initialBed[i]= d3dfm_DataArray[i].mesh2d_s1[0, :] - d3dfm_DataArray[i].mesh2d_waterdepth[0, :]
+ finalBed = d3dfm_DataArray[i].mesh2d_s1[-1, :] - d3dfm_DataArray[i].mesh2d_waterdepth[-1, :]
+
+ erodeSed[i] = finalBed - initialBed[i]
+ erodeSed[i] = erodeSed[i].compute()
+ initialBed[i] = initialBed[i].compute()
+
+ modelCRS[i] = vars_pd['EPSG_code']['projected_coordinate_system']
+
+#%% Define axis limits
+axLim_Xmin = []
+axLim_Xmax = []
+
+axLim_Ymin = []
+axLim_Ymax = []
+
+# Set axis limits
+# Full domain
+axLim_Xmin.append(298500)
+axLim_Xmax.append(306800)
+
+axLim_Ymin.append(58500)
+axLim_Ymax.append(68000)
+
+# Zoom North River
+axLim_Xmin.append(303500)
+axLim_Xmax.append(305500)
+
+axLim_Ymin.append(66000)
+axLim_Ymax.append(68000)
+
+# Zoom Tribs
+axLim_Xmin.append(305500)
+axLim_Xmax.append(306800)
+
+axLim_Ymin.append(60000)
+axLim_Ymax.append(62200)
+
+# Zoom far trib
+axLim_Xmin.append(305800)
+axLim_Xmax.append(305900)
+
+axLim_Ymin.append(60150)
+axLim_Ymax.append(60400)
+
+
+#%% Plot using DFM functions
+
+for i in modelPlot:
+ # Create figure
+ fig, axesFig = plt.subplots(1, 4, figsize=(8, 2))
+
+ axes = axesFig.flatten()
+
+ # Loop through axis and plot with different limits
+ for subIDX in range(0, 4):
+
+ # # Plot Sediment Mass
+ # smp = sedMass[i].ugrid.plot(ax=axes[subIDX], edgecolors='face', cmap='turbo',
+ # vmin=0, vmax=2000, add_colorbar=False)
+
+ # # Plot Erosion
+ # smp = erodeSed[i].ugrid.plot(ax=axes[subIDX], edgecolors='face', cmap='coolwarm',
+ # vmin=-1, vmax=1, add_colorbar=False)
+
+ # Plot Bed
+ smp = initialBed[i].ugrid.plot(ax=axes[subIDX], edgecolors='face', cmap='nipy_spectral',
+ vmin=-25, vmax=0, add_colorbar=False)
+
+
+ # Set axis labels
+ axes[subIDX].set_xlabel('')
+ axes[subIDX].set_ylabel('')
+ axes[subIDX].set_title('')
+ axes[subIDX].set_xticks([])
+ axes[subIDX].set_yticks([])
+
+ axes[subIDX].set_xlim(axLim_Xmin[subIDX], axLim_Xmax[subIDX])
+ axes[subIDX].set_ylim(axLim_Ymin[subIDX], axLim_Ymax[subIDX])
+
+ # Add basemap
+ # ctx.add_basemap(ax=axes, source=ctx.providers.Esri.WorldImagery, crs=modelCRS[i], attribution=False)
+ mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
+ ctx.add_basemap(axes[subIDX], source=mapbox, crs=modelCRS[i])
+
+
+ # cbar = plt.colorbar(smp, ax=axesFig.ravel().tolist(), label='Sand Mass [kg/m2]')
+ # cbar = plt.colorbar(smp, ax=axesFig.ravel().tolist(), label='Erosion/ deposition [m]')
+ cbar = plt.colorbar(smp, ax=axesFig.ravel().tolist(), label='Initial Bed [mNAVD88]')
+
+ plt.show()
+
+ # fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs2/SedMass_' + runLog['Run'][i] + '.png',
+ # bbox_inches='tight', dpi=300)
+
+ # fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs2/ErodeDep_' + runLog['Run'][i] + '.png',
+ # bbox_inches='tight', dpi=300)
+
+ fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs2/InitialBed_' + runLog['Run'][i] + '.png',
+ bbox_inches='tight', dpi=300)
+
+#%% Plot using DFM functions
+#Cmap Path
+cmap_path = 'C:/Users/arey/Repo/MATLAB_Q/Downloads/KeyColormaps/'
+
+for i in modelPlot:
+ fig, axes = plt.subplots(nrows=1, ncols=1, subplot_kw={'projection': ccrs.epsg(32118)}, figsize=(6, 6))
+ fig.patch.set_facecolor('white')
+
+ fig.tight_layout(pad=3.0)
+
+ # Load cmaps
+ cmap = 'AJMR_Sed5_RevE.xml'
+ plotcmap = cm.make_cmap(cmap_path + cmap)
+
+ pc = plot_netmapdata(ugrid_all[i].verts, values=modelMaxShear[i][0, :],
+ ax=axes, linewidth=0.5, cmap=plotcmap)
+
+ axes.set_xlim(305900, 306400)
+ axes.set_ylim(61500, 62200)
+
+ # axes.quiver(modelX[i], modelX[i], U[i], V[i], color='w', scale=1.00)
+
+ mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
+ ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
+
+ extent = axes.get_extent()
+ axes.set_xticks(np.linspace(extent[0], extent[1], 4))
+ axes.set_yticks(np.linspace(extent[2], extent[3], 4))
+
+ pc.set_clim([0, 0.145])
+ cbarTicks = [0, 0.0378, 0.063, 0.0826, 0.11, 0.145]
+ cbar = fig.colorbar(pc, ax=axes, shrink=0.95,
+ ticks=cbarTicks)
+ cbar.set_label('Total Max Shear Stress [N/m^2]')
+
+ # Add label on colorbar
+ cbar.ax.text(0.08, (cbarTicks[0]+cbarTicks[1])/2, 'CLAY', ha='center', va='center',
+ rotation=90, fontweight='bold')
+ cbar.ax.text(0.08, (cbarTicks[1]+cbarTicks[2])/2, 'FSILT', ha='center', va='center',
+ rotation=90, fontweight='bold')
+ cbar.ax.text(0.08, (cbarTicks[2]+cbarTicks[3])/2, 'MSILT', ha='center', va='center',
+ rotation=90, fontweight='bold')
+ cbar.ax.text(0.08, (cbarTicks[3]+cbarTicks[4])/2, 'CSILT', ha='center', va='center',
+ rotation=90, fontweight='bold')
+ cbar.ax.text(0.08, (cbarTicks[4]+cbarTicks[5])/2, 'FSAND', ha='center', va='center',
+ rotation=90, fontweight='bold')
+
+ plt.show()
+
+ fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs/Shear_Class_' + runLog['Run'][i] + '.png',
+ bbox_inches='tight', dpi=300)
+
+#%% Import Model results for animations
+
+# modelPlot = [20, 22]
+# modelPlot = [18, 20, 21, 22]
+# modelPlot = [18, 21]
+modelPlot = [25, 26]
+
+modelSedConc = [None] * (max(modelPlot)+1)
+modelMaxShear_animate = [None] * (max(modelPlot)+1)
+ugrid_all = [None] * (max(modelPlot)+1)
+tSteps = [None] * (max(modelPlot)+1)
+
+for i in modelPlot:
+ file_nc_map = Path(runLog['Run Location'][i]) / dataPath
+
+ # Get Var info
+ vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_map.as_posix())
+
+ # Import
+ tSteps[i] = get_timesfromnc(file_nc=file_nc_map.as_posix(), varname='time')
+
+ ugrid_all[i] = get_netdata(file_nc=file_nc_map.as_posix())
+ modelSedConc[i] = get_ncmodeldata(file_nc=file_nc_map.as_posix(), varname='mesh2d_sedfrac_concentration',
+ timestep='all', station=0, layer=0) #timestep='all' OR timestep=range(0, 30)
+ modelMaxShear_animate[i] = get_ncmodeldata(file_nc=file_nc_map.as_posix(), varname='mesh2d_tausmax',
+ timestep='all')
+
+
+# %% Import water levels from hist
+modelHistWL = [None] * (max(modelPlot)+1)
+modelHistTime = [None] * (max(modelPlot)+1)
+# tSteps = [None] * (max(modelPlot)+1)
+
+for i in modelPlot:
+ file_nc_hist = Path(runLog['Run Location'][i]) / histPath
+
+ vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_hist.as_posix())
+
+ # Get station names
+ histStations = get_hisstationlist(file_nc=file_nc_hist.as_posix(), varname='waterlevel')
+
+ # Import
+ modelHistTime[i] = get_timesfromnc(file_nc=file_nc_hist.as_posix(), varname='time')
+ modelHistWL[i] = get_ncmodeldata(file_nc=file_nc_hist.as_posix(), varname='waterlevel',
+ timestep='all', station=1)
+
+
+#%% Sediment animations
+plt.rcParams['animation.ffmpeg_path'] = \
+ 'C:/Users/arey/Local/ffmpeg-2022-02-14-git-59c647bcf3-full_build/bin/ffmpeg.exe'
+
+mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
+x, y, arrow_length = 0.93, 0.95, 0.12
+# fontprops = fm.FontProperties(size=12)
+
+# Set model to plot
+# modelPlot = [20, 22]
+# modelPlot = [18, 20, 21, 22]
+modelPlot = [25, 26]
+cmap_path = 'C:/Users/arey/Repo/MATLAB_Q/Downloads/KeyColormaps/'
+
+for m in modelPlot:
+ vmax = 0.75
+ vmin = 0
+ # vmax = 0.145
+ # vmin = 0
+
+ cbarTicks = [0, 0.0378, 0.063, 0.0826, 0.11, 0.145]
+
+ cmapName = 'turbo'
+ cmap = mpl.cm.turbo
+ # cmapName = 'AJMR_Sed5_RevE.xml'
+ # cmap = cm.make_cmap(cmap_path + cmapName)
+
+
+ # Zoom limits
+ # xLimits = [305900, 306500]
+ # yLimits = [61400, 62200]
+
+ # Wide Limits
+ xLimits = [302719.4, 306934.2]
+ yLimits = [60263.7, 64194.8]
+
+ # Setup Video
+ metadata = dict(title='NTC Sediment Animation', artist='Matplotlib',
+ comment='AJMR June 28, 2022')
+ writer = animation.FFMpegWriter(fps=2, metadata=metadata, codec='h264', bitrate=5000)
+ fig, axes = plt.subplots(figsize=(6, 6))
+
+ writer.setup(fig, '//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/' +
+ '06_Models/04_Delft3D/Figures/SedConc/HQ3_Wide_RevC_Long_Sed_Cond_'
+ + runLog['Run'][m] + '.mp4')
+
+ mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
+
+ for i in np.arange(1, len(modelSedConc[m])-1, 1): #289
+ # Clear axes for video
+ axes.cla()
+
+ # Create new figure for stills
+ # fig, axes = plt.subplots(figsize=(6, 6))
+
+ # Clear inset WL Plot
+ # if i > 1:
+ # axin1.cla()
+
+ # Plot sediment
+ pc = plot_netmapdata(ugrid_all[m].verts, values=modelSedConc[m][i, 0, :, 0],
+ ax=axes, cmap=cmapName,
+ antialiaseds=False)
+
+ # Plot shear
+ # pc = plot_netmapdata(ugrid_all[m].verts, values=modelMaxShear_animate[m][i,:],
+ # ax=axes, cmap=cmap,
+ # antialiaseds=False)
+
+ # Set map limits
+ axes.set_xlim(xLimits[0], xLimits[1])
+ axes.set_ylim(yLimits[0], yLimits[1])
+
+ # Add basemap
+ ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
+
+ # Set map ticks
+ axes.set_xticks(np.linspace(xLimits[0], xLimits[1], 4))
+ axes.set_yticks(np.linspace(yLimits[0], yLimits[1], 4))
+
+ # Color Limits
+ pc.set_clim([vmin, vmax])
+
+ # Add title
+ axes.title.set_text('Bottom Sediment Concentration at: ' + tSteps[m][i].strftime("%Y/%m/%d %H:%M"))
+ # axes.title.set_text('Total Max Shear Stress at: ' + str(tSteps[m][i]))
+
+ # Add inset wl plot
+ axin1 = axes.inset_axes([0.1, 0.1, 0.4, 0.15])
+ axin1.plot(modelHistTime[m], modelHistWL[m])
+ # Add current point
+ # Find closest time
+ abs_deltas_from_target_date = np.absolute(modelHistTime[m] - tSteps[m][i])
+ axin1.scatter(modelHistTime[m][np.argmin(abs_deltas_from_target_date)], modelHistWL[m][np.argmin(abs_deltas_from_target_date)], 10, 'r')
+
+ # axin1.set_xticks([modelHistTime[m][0], modelHistTime[m][len(modelHistTime[m])-1]])
+ axin1.set_xticks([])
+ axin1.set_yticks([])
+
+ plt.setp(axin1.get_xticklabels(), backgroundcolor="white")
+ plt.setp(axin1.get_yticklabels(), backgroundcolor="white")
+
+ if i == 1: #i < 1000
+ fig.subplots_adjust(right=0.8)
+ norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
+
+ # Create colorbar axis
+ # cax = plt.axes([0.82, 0.25, 0.04, 0.5])
+ cax = plt.axes([0.85, 0.15, 0.04, 0.7])
+
+ # Add colorbar with designated tick marks
+ # cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
+ # norm=norm,
+ # orientation='vertical',
+ # ticks=cbarTicks)
+
+ # Add colorbar without designated tick marks
+ cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
+ norm=norm,
+ orientation='vertical')
+
+ # Colorbar lavel
+ cb1.set_label('Sediment Concentration [$kg/m^3$]')
+ # cb1.set_label('Total Max Shear Stress [N/m^2]')
+
+ # Add label on colorbar
+ # cb1.ax.text(0.08, (cbarTicks[0] + cbarTicks[1]) / 2, 'CLAY', ha='center', va='center',
+ # rotation=90, fontweight='bold')
+ # cb1.ax.text(0.08, (cbarTicks[1] + cbarTicks[2]) / 2, 'FSILT', ha='center', va='center',
+ # rotation=90, fontweight='bold')
+ # cb1.ax.text(0.08, (cbarTicks[2] + cbarTicks[3]) / 2, 'MSILT', ha='center', va='center',
+ # rotation=90, fontweight='bold')
+ # cb1.ax.text(0.08, (cbarTicks[3] + cbarTicks[4]) / 2, 'CSILT', ha='center', va='center',
+ # rotation=90, fontweight='bold')
+ # cb1.ax.text(0.08, (cbarTicks[4] + cbarTicks[5]) / 2, 'FSAND', ha='center', va='center',
+ # rotation=90, fontweight='bold')
+
+
+ # Change label font size for only the primary axis
+ for item in ([axes.xaxis.label, axes.yaxis.label] +
+ axes.get_xticklabels() + axes.get_yticklabels()):
+ item.set_fontsize(4)
+
+
+ # Save video frame
+ writer.grab_frame()
+ plt.show()
+ print(i)
+
+ # Save stills
+ fig.savefig('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/' +
+ '06_Models/04_Delft3D/Figures/SedConc/Wide_RevC_SedConc_' +
+ runLog['Run'][m] + '_Frame_' + str(i) + '.png',
+ bbox_inches='tight', dpi=300)
+ # fig.savefig('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/' +
+ # '06_Models/04_Delft3D/Figures/ShearStress/Wide_ShearStress_' +
+ # runLog['Run'][m] + '_Frame_' + str(i) + '.png',
+ # bbox_inches='tight', dpi=300)
+
+ # Save video
+ writer.finish()
+
diff --git a/NTC_DFM/Plot_Meas_versus_Model_Temp_Sal_EFDC_Delft3D_TM_2014-2015.py b/NTC_DFM/Plot_Meas_versus_Model_Temp_Sal_EFDC_Delft3D_TM_2014-2015.py
new file mode 100644
index 0000000..030eec0
--- /dev/null
+++ b/NTC_DFM/Plot_Meas_versus_Model_Temp_Sal_EFDC_Delft3D_TM_2014-2015.py
@@ -0,0 +1,411 @@
+#%%
+#Project: 11934.201 Newtown Creek TPP - Privileged and Confidential
+#Confidentiality Note: For internal discussion only.
+#Description: This code creates plots for comparing temperature, salinity between the EFDC model, Delft3DFM, and the measured data.
+#To run, ensure that the model log is saved locally, and change the path in the code.
+
+#%% # -*- coding: utf-8 -*-
+"""
+Created on Wed Feb 8 11:14:28 2023
+
+@author: mrobinson
+"""
+import pandas as pd
+import numpy as np
+import matplotlib.pyplot as plt
+import scipy
+from scipy.interpolate import interp1d
+from scipy import interpolate
+
+from numpy import trapz
+
+from datetime import datetime, timedelta
+from matplotlib import dates as mpl_dates
+from matplotlib.ticker import (MultipleLocator, AutoMinorLocator)
+
+import os
+import pandas as pd
+import numpy as np
+from sklearn.metrics import mean_squared_error
+import geopandas as gp
+import pytz
+
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import matplotlib.animation as animation
+
+import dfm_tools as dfmt
+
+import cartopy.crs as ccrs
+import contextily as ctx
+
+from shapely.geometry import Point, MultiPoint
+from shapely.ops import nearest_points
+
+import pathlib as pl
+import xarray as xr
+
+from datetime import timedelta
+import datetime as datetime
+
+
+#%% Load in Measured data
+
+dfIN=pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/21_YSI Data/D1-D9_Combined/Combined_with_2015/All_Combine.csv')
+
+# Filter out invalid data
+df1 = dfIN
+df1.loc[df1['Temp'] < -30, 'Temp'] = np.nan
+df1.loc[df1['Sal'] < 0, 'Sal'] = np.nan
+df1['DateTime'] = pd.to_datetime(df1['Date Time Local'])
+df1.set_index('DateTime', inplace=True)
+
+df1.index = df1.index.tz_localize(pytz.timezone('America/New_York'),
+ ambiguous='NaT', nonexistent='NaT')
+
+df1.index = df1.index.tz_convert(pytz.utc)
+
+# Convert to Dataframe
+YSI_df = pd.DataFrame(df1, columns=['Station', 'Temp', 'Sal', 'Depth'])
+# Set index as DateTime
+
+#Change Station of interest (EB043, EK108, NC310, NC313, NC316, NC318)
+Station=['EB043','EK108','NC310','NC313','NC316','NC318']
+x=4
+i=Station[x]
+
+
+Station_interest=i
+Top=str(Station_interest)+str('A')
+Bot=str(Station_interest)+str('C')
+
+
+#%% Load in EFDC Output
+FILE=r"//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/06_Models/02_EFDC/06_EFDC_Outputs/2014-2015/EFDC_Compiled_2014_2015.xlsx"
+
+EFDC_dat = pd.read_excel(FILE, None)
+
+# Loop through stations and add to dataframe
+Stations=['EB043', 'EK108', 'NC310', 'NC313', 'NC316', 'NC318']
+for Stat in Stations:
+ # Set index as DateTime
+ EFDC_dat[Stat].set_index('date fix', inplace=True)
+ EFDC_dat[Stat].index = pd.to_datetime(EFDC_dat[Stat].index).tz_localize(
+ pytz.timezone('UTC'))
+
+#%% Load in Telemac Output
+
+FILE_T=r"C:/Users/arey/Downloads/Results_Summarized_Telemac.xlsx"
+
+TM_dat = pd.read_excel(FILE_T, None)
+
+# Loop through stations and adjust times
+Stations=['EB043', 'EK108', 'NC310', 'NC313', 'NC316', 'NC318']
+for Stat in Stations:
+ # Set index as DateTime
+ TM_dat[Stat].set_index('Date_Time', inplace=True)
+ TM_dat[Stat].index = pd.to_datetime(TM_dat[Stat].index).tz_localize(
+ pytz.timezone('UTC'))
+
+#%% Load in Water Level Data
+ # Field Gauge-EAST
+FFG_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv")
+FFG_pd['DateTime'] = pd.to_datetime(FFG_pd.Date_time)
+FFG_pd.set_index(FFG_pd['DateTime'], inplace=True)
+# Put in UTC from Eastern Time (with DST!)
+FFG_pd = FFG_pd.tz_localize(
+ pytz.timezone('US/Eastern'), ambiguous='infer').tz_convert(pytz.utc)
+
+# National Grid Gauge-WEST
+NGG1_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv")
+NGG1_pd['DateTime'] = pd.to_datetime(NGG1_pd.Date_time)
+NGG1_pd.set_index(NGG1_pd['DateTime'], inplace=True)
+# Put in UTC from Eastern Time (with DST!)
+NGG1_pd = NGG1_pd.tz_localize(
+ pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
+# Drop ambiguous times
+NGG1_pd = NGG1_pd.dropna()
+
+NGG2_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv")
+NGG2_pd['DateTime'] = pd.to_datetime(NGG2_pd.datetime)
+NGG2_pd.set_index(NGG2_pd['DateTime'], inplace=True)
+# Put in UTC from Eastern Time (with DST!)
+NGG2_pd = NGG2_pd.tz_localize(
+ pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
+# Drop ambiguous times
+NGG2_pd = NGG2_pd.dropna()
+
+
+#%% Load in Delft Results
+runLog = pd.read_excel('C:/Users/arey/files/Projects/Newtown/Model Log NTC.xlsx', 'ModelLog')
+
+
+dataPath = "FlowFM_map.nc"
+histPath = "FlowFM_his.nc"
+
+#read in time series
+modelPlot = [105]#, 106]
+
+Delft_WL = [None] * (max(modelPlot)+1)
+Delft_T_B = [None] * (max(modelPlot)+1)
+Delft_S_B = [None] * (max(modelPlot)+1)
+Delft_T_T = [None] * (max(modelPlot)+1)
+Delft_S_T = [None] * (max(modelPlot)+1)
+
+
+# Note, this uses the standard netcdf lib + xarray
+for i in modelPlot:
+ file_nc_hist = pl.Path(runLog['Run Location'][i],
+ 'FlowFM', 'dflowfm', 'output', 'FlowFM_his.nc')
+ # file_nc_hist = "C:/Users/mrobinson/OneDrive - W.F. Baird & Associates Coastal Engineers Ltd/Documents/11934.202 NTC/06 Models/06 Delft3DFM/Results/Run129/FlowFM_his.nc"
+
+
+ # Hist file path
+ #file_nc_hist = file_nc_hist.as_posix()
+
+ # Open hist file dataset
+ data_xr = xr.open_mfdataset(file_nc_hist, preprocess=dfmt.preprocess_hisnc)
+
+ # Get stations
+ stations_pd = data_xr['stations'].to_dataframe()
+
+ # Get observations
+ #observations_pd=data_xr['stations'].to_dataframe()
+
+ # Convert to Pandas
+ Delft_WL[i] = data_xr.waterlevel.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108','West_WL']).to_pandas()
+
+ #Temp
+ Delft_T_B[i] = data_xr.temperature.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=0).to_pandas()
+ Delft_T_T[i] = data_xr.temperature.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=9).to_pandas()
+
+ #Salinity
+ Delft_S_B[i] = data_xr.salinity.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=0).to_pandas()
+ Delft_S_T[i] = data_xr.salinity.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=9).to_pandas()
+
+ # Put in UTC if needed
+ if i == 104 or i == 105:
+ Delft_WL[i] = Delft_WL[i].tz_localize(
+ pytz.timezone('EST')).tz_convert(pytz.utc)
+
+ Delft_T_B[i] = Delft_T_B[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
+ Delft_T_T[i] = Delft_T_T[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
+
+ Delft_S_B[i] = Delft_S_B[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
+ Delft_S_T[i] = Delft_S_T[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
+ else:
+ Delft_WL[i] = Delft_WL[i].tz_localize(
+ pytz.timezone('UTC'))
+
+
+#%% Plot Water Levels
+i = 105
+
+# Create Figure
+fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(18, 8), sharex=False, sharey=True)
+fig.suptitle('Comparison of Predicted and Measured Water Level at NGG', fontsize=18)
+
+# Plot Oberservations
+NGG2_pd['water_surface_elevation'].multiply(0.3048).plot(label='FFG', color='k', linewidth=2)
+
+# Plot Delft
+Delft_WL[i]['West_WL'].plot(label='Delft', color='r', linewidth=2)
+
+# Plot YSI
+YSI_df.loc[YSI_df['Station'] == 'NC318A', 'Depth'].add(-1.3).plot(
+ label='YSI', color='b', linewidth=2, ax=ax)
+
+# Plot EFDC
+EFDC_dat['NC318A'].loc[:, 'WSE_m'].plot(label='EFDC', color='m', linewidth=2, ax=ax)
+
+# Plot Telemac
+TM_dat['NC318'].loc[:, 'WSE_m'].plot(label='EFDC', color='m', linewidth=2, ax=ax)
+
+
+# Set Time Bounds
+dateMin = pd.to_datetime(datetime.datetime(2014, 11, 1, 0, 0, 0, 0), utc=True)
+dateMax = pd.to_datetime(datetime.datetime(2014, 11, 13, 0, 0, 0, 0), utc=True)
+
+# Set X Axis
+ax.set_xlim(dateMin, dateMax)
+
+# Set Y Axis
+ax.set_ylim(-1.5, 2.5)
+
+plt.show()
+
+#%% Plot of Temperature AJMR
+def resampleToObs(model, obs, obsMatchFreq):
+ # Resample to observation time
+ modelSampled = model.resample(obsMatchFreq).interpolate().reindex(
+ obs.index)
+ return modelSampled
+
+def syncModelRMSE(model, obs):
+ rmseOut = mean_squared_error(obs.loc[model.notna() & obs.notna()].dropna(
+ ).values, model.loc[model.notna() & obs.notna()].dropna(
+ ).values,squared=False)
+ return rmseOut
+
+degree_sign = u'\N{DEGREE SIGN}'
+
+
+# Create Figure
+fig, ax = plt.subplots(nrows=2, ncols=6, figsize=(16, 7), sharex=True, sharey=True)
+# fig.suptitle('Comparison of Predicted and Measured Temperature at NC316', fontsize=18)
+
+# Set Time Bounds for all axis since linked
+# dateMin = pd.to_datetime(datetime.datetime(2015, 7, 15, 0, 0, 0, 0), utc=True)
+# dateMax = pd.to_datetime(datetime.datetime(2015, 9, 1, 0, 0, 0, 0), utc=True)
+# dateMin = pd.to_datetime(datetime.datetime(2015, 7, 28, 0, 0, 0, 0), utc=True)
+# dateMax = pd.to_datetime(datetime.datetime(2015, 8, 7, 0, 0, 0, 0), utc=True)
+dateMin = pd.to_datetime(datetime.datetime(2014, 7, 1, 0, 0, 0, 0), utc=True)
+dateMax = pd.to_datetime(datetime.datetime(2015, 10, 1, 0, 0, 0, 0), utc=True)
+
+
+# Loop through stations and plot
+for StatIDX, Stat in enumerate(Stations):
+ ax[0, StatIDX].set_title(Stat)
+
+ # Resample to 5 minute data, reindex to observed and calculate RMSD
+ ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'A', 'Temp']
+ ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
+
+ DelftInterp = resampleToObs(Delft_T_T[i][Stat], ObsDat, '5T')
+ EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'TEM10'].resample('10T').nearest(), ObsDat, '5T')
+ # TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'TEM10'].resample('15T').nearest(), ObsDat, '5T')
+
+ RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
+ RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
+ # RMSE_T = syncModelRMSE(TMInterp, ObsDat)
+
+ # Plot Top Layer Temperature
+ ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[0, StatIDX])
+ ax[0, StatIDX].plot(Delft_WL[i].index, Delft_T_T[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + degree_sign + 'C')
+ EFDC_dat[Stat].loc[:, 'TEM10'].plot(linewidth=1, ax=ax[0, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + degree_sign + 'C')
+ # TM_dat[Stat].loc[:, 'TEM10'].plot(linewidth=1, ax=ax[0, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + degree_sign + 'C')
+
+ ax[0, StatIDX].legend(loc='lower right')
+
+
+ # Plot Bottom Layer Temperature
+ # Resample to 5 minute data, reindex to observed and calculate RMSD
+ ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'C', 'Temp']
+ ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
+
+ DelftInterp = resampleToObs(Delft_T_B[i][Stat], ObsDat, '5T')
+ EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'TEM1'].resample('10T').nearest(), ObsDat, '5T')
+ # TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'TEM1'].resample('15T').nearest(), ObsDat, '5T')
+
+ RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
+ RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
+ # RMSE_T = syncModelRMSE(TMInterp, ObsDat)
+
+ ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[1, StatIDX])
+ ax[1, StatIDX].plot(Delft_WL[i].index, Delft_T_B[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + degree_sign + 'C')
+ EFDC_dat[Stat].loc[:, 'TEM1'].plot(linewidth=1, ax=ax[1, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + degree_sign + 'C')
+ # TM_dat[Stat].loc[:, 'TEM1'].plot(linewidth=1, ax=ax[1, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + degree_sign + 'C')
+
+ # Add Legend and Set X Axis label
+ ax[1, StatIDX].legend(loc='lower right')
+ ax[1, StatIDX].set_xlabel('UTC Date')
+
+
+# Set X Axis
+ax[0, 0].set_xlim(dateMin, dateMax)
+ax[0, 0].xaxis.set_major_formatter(
+ mpl_dates.ConciseDateFormatter(ax[0, 0].xaxis.get_major_locator()))
+
+# Set Y Axis
+# ax[0, 0].set_ylim(18, 28)
+ax[0, 0].set_ylim(-10, 30)
+ax[0, 0].set_ylabel('Temperature Top (' + degree_sign + 'C)')
+ax[1, 0].set_ylabel('Temperature Bottom (' + degree_sign + 'C)')
+
+# Reduce spacing between subplots
+plt.tight_layout()
+plt.show()
+
+# Save figure
+fig.savefig('C:/Users/arey/files/Projects/Newtown/TempSalFigs/Temperature_All.png',
+ bbox_inches='tight', dpi=300)
+
+
+#%% Plot Salinity AJMR
+Stations=['NC310', 'NC313', 'NC316', 'NC318', 'EB043', 'EK108']
+
+# Create Figure
+fig, ax = plt.subplots(nrows=2, ncols=6, figsize=(16, 7), sharex=True, sharey=True)
+
+
+# Set Time Bounds for all axis since linked
+# dateMin = pd.to_datetime(datetime.datetime(2015, 7, 15, 0, 0, 0, 0), utc=True)
+# dateMax = pd.to_datetime(datetime.datetime(2015, 9, 1, 0, 0, 0, 0), utc=True)
+dateMin = pd.to_datetime(datetime.datetime(2015, 7, 28, 0, 0, 0, 0), utc=True)
+dateMax = pd.to_datetime(datetime.datetime(2015, 8, 7, 0, 0, 0, 0), utc=True)
+
+
+# Loop through stations and plot
+for StatIDX, Stat in enumerate(Stations):
+ ax[0, StatIDX].set_title(Stat)
+
+ # Resample to 5 minute data, reindex to observed and calculate RMSE
+ ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'A', 'Sal']
+ ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
+
+ DelftInterp = resampleToObs(Delft_S_T[i][Stat], ObsDat, '5T')
+ EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'SAL10'].resample('10T').nearest(), ObsDat, '5T')
+ # TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'SAL10'].resample('15T').nearest(), ObsDat, '5T')
+
+ RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
+ RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
+ # RMSE_T = syncModelRMSE(TMInterp, ObsDat)
+
+ # Plot Top Layer Salinity
+ ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[0, StatIDX])
+ ax[0, StatIDX].plot(Delft_S_T[i].index, Delft_S_T[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + ' psu')
+ EFDC_dat[Stat].loc[:, 'SAL10'].plot(linewidth=1, ax=ax[0, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + ' psu')
+ # TM_dat[Stat].loc[:, 'SAL10'].plot(linewidth=1, ax=ax[0, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + ' psu')
+
+ ax[0, StatIDX].legend(loc='lower right')
+
+ # Resample to 5 minute data, reindex to observed and calculate RMSD
+ ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'C', 'Sal']
+ ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
+
+ DelftInterp = resampleToObs(Delft_S_B[i][Stat], ObsDat, '5T')
+ EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'SAL1'].resample('10T').nearest(), ObsDat, '5T')
+ # TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'SAL1'].resample('15T').nearest(), ObsDat, '5T')
+
+ RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
+ RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
+ # RMSE_T = syncModelRMSE(TMInterp, ObsDat)
+
+ # Plot Bottom Layer Salinity
+ ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[1, StatIDX])
+ ax[1, StatIDX].plot(Delft_S_B[i].index, Delft_S_B[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + ' psu')
+ EFDC_dat[Stat].loc[:, 'SAL1'].plot(linewidth=1, ax=ax[1, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + ' psu')
+ # TM_dat[Stat].loc[:, 'SAL1'].plot(linewidth=1, ax=ax[1, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + 'psu')
+
+ # Add Legend and Set X Axis label
+ ax[1, StatIDX].legend(loc='lower right')
+ ax[1, StatIDX].set_xlabel('UTC Date')
+
+# Set X Axis
+ax[0, 0].set_xlim(dateMin, dateMax)
+ax[0, 0].xaxis.set_major_formatter(
+ mpl_dates.ConciseDateFormatter(ax[0, 0].xaxis.get_major_locator()))
+
+# Set Y Axis
+ax[0, 0].set_ylim(12, 27)
+ax[0, 0].set_ylabel('Salinity Top (psu)')
+ax[1, 0].set_ylabel('Salinity Bottom (psu)')
+
+# Reduce spacing between subplots
+plt.tight_layout()
+plt.show()
+
+# Save figure
+fig.savefig('C:/Users/arey/files/Projects/Newtown/TempSalFigs/Salinity_2015_Zoom.png',
+ bbox_inches='tight', dpi=300)
+
diff --git a/NTC_DFM/WATER_LEVEL_PLOTS_v2.py b/NTC_DFM/WATER_LEVEL_PLOTS_v2.py
new file mode 100644
index 0000000..19b2122
--- /dev/null
+++ b/NTC_DFM/WATER_LEVEL_PLOTS_v2.py
@@ -0,0 +1,455 @@
+#%%
+#Project: 11934.201 Newtown Creek TPP - Privileged and Confidential
+#Confidentiality Note: For internal discussion only.
+#Description: This code plots the water levels at Field Facility Gage, and National Grid.
+#To run,
+
+
+#%% # -*- coding: utf-8 -*-
+"""
+Created on Thu Feb 16 11:54:29 2023
+
+@author: mrobinson
+"""
+
+#%%Packages
+import os
+import pandas as pd
+import numpy as np
+import geopandas as gp
+import pytz
+import matplotlib as mpl
+import matplotlib.pyplot as plt
+import matplotlib.animation as animation
+import dfm_tools as dfmt
+from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
+from dfm_tools.get_nc_helpers import get_ncvarproperties, get_stationid_fromstationlist, get_varnamefromattrs
+from dfm_tools.get_nc_helpers import get_timesfromnc
+import contextily as ctx
+from shapely.geometry import Point, MultiPoint
+from shapely.ops import nearest_points
+import pathlib as pl
+import xarray as xr
+from datetime import timedelta
+import datetime as datetime
+from matplotlib import dates as mpl_dates
+from matplotlib.ticker import (MultipleLocator, AutoMinorLocator)
+from sklearn.metrics import mean_squared_error
+
+
+#%% Read Model Log
+
+runLog = pd.read_excel("C:/Users/mrobinson/OneDrive - W.F. Baird & Associates Coastal Engineers Ltd/Documents/11934.202 NTC/Model Log NTC.xlsx", 'ModelLog')
+
+dataPath = "FlowFM_map.nc"
+histPath = "FlowFM_his.nc"
+
+#%% Load in Water Level Data
+ # Field Gauge-EAST
+FFG_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv")
+FFG_pd['DateTime'] = pd.to_datetime(FFG_pd.Date_time)
+FFG_pd.set_index(FFG_pd['DateTime'], inplace=True)
+# Put in UTC from Eastern Time (with DST!)
+FFG_pd = FFG_pd.tz_localize(
+ pytz.timezone('US/Eastern'), ambiguous='infer').tz_convert(pytz.utc)
+
+# National Grid Gauge-WEST
+NGG1_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv")
+NGG1_pd['DateTime'] = pd.to_datetime(NGG1_pd.Date_time)
+NGG1_pd.set_index(NGG1_pd['DateTime'], inplace=True)
+# Put in UTC from Eastern Time (with DST!)
+NGG1_pd = NGG1_pd.tz_localize(
+ pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
+# Drop ambiguous times
+NGG1_pd = NGG1_pd.dropna()
+
+NGG2_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP – Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv")
+NGG2_pd['DateTime'] = pd.to_datetime(NGG2_pd.datetime)
+NGG2_pd.set_index(NGG2_pd['DateTime'], inplace=True)
+# Put in UTC from Eastern Time (with DST!)
+NGG2_pd = NGG2_pd.tz_localize(
+ pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
+# Drop ambiguous times
+NGG2_pd = NGG2_pd.dropna()
+
+#%% Read in time series
+modelPlot = [105, 106, 107, 108]
+
+Delft_WL = [None] * (max(modelPlot)+1)
+
+# Note, this uses the standard netcdf lib + xarray
+for i in modelPlot:
+ file_nc_hist = pl.Path(runLog['Run Location'][i],
+ 'FlowFM', 'dflowfm', 'output', 'FlowFM_his.nc')
+
+ # Hist file path
+ file_nc_hist = file_nc_hist.as_posix()
+
+ # Open hist file dataset
+ data_xr = xr.open_mfdataset(file_nc_hist, preprocess=dfmt.preprocess_hisnc)
+
+ # Get Variables
+ vars_pd = get_ncvarproperties(data_xr)
+
+ # Get stations
+ stations_pd = data_xr['stations'].to_dataframe()
+
+ # Convert to Pandas
+ Delft_WL[i] = data_xr.waterlevel.sel(stations=['West_WL', 'East_WL']).to_pandas()
+
+ # Put in UTC if needed
+ if i == 104:
+ Delft_WL[i] = Delft_WL[i].tz_localize(
+ pytz.timezone('EST')).tz_convert(pytz.utc)
+ else:
+ Delft_WL[i] = Delft_WL[i].tz_localize(
+ pytz.timezone('UTC'))
+
+#%%
+modelPlot=[107]
+i=107
+
+interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ Delft_WL[modelPlot[0]].index[-1],
+ freq="20min")
+
+
+
+df1=Delft_WL[i]
+df1_index=df1.index.shift(periods=1,freq='0H')
+
+df2=pd.DataFrame(index=df1_index)
+df2['West_WL']=df1['West_WL']
+df2['East_WL']=df1['East_WL']
+df2['West_WL']=df2['West_WL'].shift(30)
+df2['East_WL']=df2['East_WL'].shift(30)
+
+x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
+x[x==-1.15332]=np.nan
+x[0:2584]=np.NAN
+
+x2= np.interp(interpTimes,NGG1_pd.index,NGG1_pd['Water Surface Elevation (ft)']*0.3048)
+x2[0:2638]=np.NAN
+
+
+#RMSE Values
+predNGG=np.interp(interpTimes,df2.index, df2['West_WL'])
+rms = mean_squared_error(predNGG[2638:], x2[2638:], squared=False)
+print(rms)
+
+predFG=np.interp(interpTimes,df2.index, df2['East_WL'])
+
+rms2 = mean_squared_error(predFG[2584:], x[2584:], squared=False)
+print(rms2)
+
+#Plot
+
+
+fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(20, 8))
+fig.patch.set_facecolor('white')
+#interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ # Delft_WL[modelPlot[0]].index[-1],
+ # freq="20min")
+
+
+axes[0].title.set_text('National Grid Gauge')
+axes[0].scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
+ x2,s=1,
+ label='RMSE =' + str(round(rms,2)))
+
+
+axes[1].title.set_text('Field Facility Gauge')
+axes[1].scatter(np.interp(interpTimes,df2.index, df2['East_WL']), x,s=1,
+label='RMSE =' + str(round(rms2,2)))
+
+
+axes[1].set_xlim(-2, 2)
+axes[1].set_ylim(-2, 2)
+axes[0].set_xlim(-2, 2)
+axes[0].set_ylim(-2, 2)
+
+
+linepts = np.array([-3, 3])
+axes[0].plot(linepts, linepts, color='k')
+axes[1].plot(linepts, linepts, color='k')
+
+axes[0].set_xlabel('Modelled water level (m)')
+axes[0].set_ylabel('Measured water level (m)')
+
+axes[1].set_xlabel('Modelled water level (m)')
+axes[1].set_ylabel('Measured water level (m)')
+
+axes[1].grid()
+axes[0].grid()
+
+fig.suptitle(runLog['Plotting Legend Entry'][i])
+
+axes[1].legend(loc='lower left')
+axes[0].legend(loc='lower left')
+
+#%%
+modelPlot=[106]
+i=106
+
+#interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ # Delft_WL[modelPlot[0]].index[-1],
+ # freq="20min")
+
+#Note For 106 Run, period is from 2012-03-01 to 2012-10-30
+
+interpTimes = pd.date_range("2012-03-01", "2012-10-30", freq="20min")
+
+
+df1=Delft_WL[i]
+df1_index=df1.index.shift(periods=1,freq='0H')
+
+df2=pd.DataFrame(index=df1_index)
+df2['West_WL']=df1['West_WL']
+df2['East_WL']=df1['East_WL']
+df2['West_WL']=df2['West_WL'].shift(30)
+df2['East_WL']=df2['East_WL'].shift(30)
+
+x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
+x[x==-1.15332]=np.nan
+x[0:2584]=np.NAN
+
+x2= np.interp(interpTimes,NGG1_pd.index,NGG1_pd['Water Surface Elevation (ft)']*0.3048)
+x2[0:2638]=np.NAN
+
+
+#RMSE Values
+predNGG=np.interp(interpTimes,df2.index, df2['West_WL'])
+rms = mean_squared_error(predNGG[2638:], x2[2638:], squared=False)
+print(rms.round(3))
+
+predFG=np.interp(interpTimes,df2.index, df2['East_WL'])
+
+rms2 = mean_squared_error(predFG[2584:], x[2584:], squared=False)
+print(rms2.round(3))
+
+#Plot
+
+
+fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(18, 8))
+fig.patch.set_facecolor('white')
+plt.rcParams['font.size'] = 16
+#interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ # Delft_WL[modelPlot[0]].index[-1],
+ # freq="20min")
+
+
+axes[0].title.set_text('National Grid Gauge')
+axes[0].scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
+ x2,s=1,
+ label='RMSE =' + str(round(rms,3)))
+
+
+axes[1].title.set_text('Field Facility Gauge')
+axes[1].scatter(np.interp(interpTimes,df2.index, df2['East_WL']), x,s=1,
+label='RMSE =' + str(round(rms2,3)))
+
+
+axes[1].set_xlim(-2, 2)
+axes[1].set_ylim(-2, 2)
+axes[0].set_xlim(-2, 2)
+axes[0].set_ylim(-2, 2)
+
+
+linepts = np.array([-3, 3])
+axes[0].plot(linepts, linepts, color='k')
+axes[1].plot(linepts, linepts, color='k')
+
+axes[0].set_xlabel('Modelled water level (m)')
+axes[0].set_ylabel('Measured water level (m)')
+
+axes[1].set_xlabel('Modelled water level (m)')
+axes[1].set_ylabel('Measured water level (m)')
+
+axes[1].grid()
+axes[0].grid()
+
+axes[0].set_aspect('equal')
+axes[1].set_aspect('equal')
+
+fig.suptitle(runLog['Plotting Legend Entry'][i])
+
+axes[1].legend(loc='lower left')
+axes[0].legend(loc='lower left')
+
+
+#%%
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+#%%
+
+fig, axes = plt.subplots(nrows=1,ncols=2,figsize=(20,10))
+
+axes[0].plot(FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048,color='red')
+axes[0].plot(df2.index,df2['East_WL'])
+
+
+axes[1].plot(NGG1_pd['DateTime'],NGG1_pd['Water Surface Elevation (ft)']*0.3048,color='red')
+axes[1].plot(df2.index,df2['West_WL'])
+
+axes[0].set_xlim([np.datetime64('2012-01-01'), np.datetime64('2013-01-01')])
+
+
+#%%
+modelPlot=[105]
+i=105
+
+
+
+interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ Delft_WL[modelPlot[0]].index[-1],
+ freq="20min")
+
+df1=Delft_WL[i]
+df1_index=df1.index.shift(periods=1,freq='0H')
+
+df2=pd.DataFrame(index=df1_index)
+df2['West_WL']=df1['West_WL']
+df2['East_WL']=df1['East_WL']
+df2['West_WL']=df2['West_WL'].shift(30)
+df2['East_WL']=df2['East_WL'].shift(30)
+
+x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
+x2=np.interp(interpTimes,df2.index, df2['West_WL'])
+
+pred=np.interp(interpTimes,df2.index, df2['West_WL'])
+meas=np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048)
+
+#RMSE Values
+rms3=mean_squared_error(meas[15:],pred[15:], squared=False)
+print(rms3)
+"""
+predNGG=np.interp(interpTimes,df2.index, df2['West_WL'])
+rms = mean_squared_error(predNGG[2638:], x2[2638:], squared=False)
+print(rms)
+
+predFG=np.interp(interpTimes,df2.index, df2['East_WL'])
+
+rms2 = mean_squared_error(predFG[2584:], x[2584:], squared=False)
+print(rms2)
+"""
+
+# Plot
+
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 10))
+fig.patch.set_facecolor('white')
+interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ Delft_WL[modelPlot[0]].index[-1],
+ freq="20min")
+
+
+axes.title.set_text('National Grid Gauge')
+axes.scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
+ np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048),s=1,
+ label='RMSE =' + str(round(rms3,2)))
+
+
+
+axes.set_xlim(-2, 2)
+axes.set_ylim(-2, 2)
+linepts = np.array([-3, 3])
+axes.plot(linepts, linepts, color='k')
+axes.set_xlabel('Modelled water level (m)')
+axes.set_ylabel('Measured water level (m)')
+axes.grid()
+
+
+
+
+
+
+
+fig.suptitle(runLog['Plotting Legend Entry'][i])
+
+
+#%%
+modelPlot=[105]
+i=105
+
+interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ Delft_WL[modelPlot[0]].index[-1],
+ freq="20min")
+df1=Delft_WL[i]
+df1_index=df1.index.shift(periods=1,freq='0H')
+
+df2=pd.DataFrame(index=df1_index)
+df2['West_WL']=df1['West_WL']
+df2['East_WL']=df1['East_WL']
+df2['West_WL']=df2['West_WL'].shift(30)
+df2['East_WL']=df2['East_WL'].shift(30)
+
+x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
+x2=np.interp(interpTimes,df2.index, df2['West_WL'])
+
+pred=np.interp(interpTimes,df2.index, df2['West_WL'])
+meas=np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048)
+
+#RMSE Values
+rms3=mean_squared_error(meas[15:],pred[15:], squared=False)
+print(rms3)
+
+# Plot
+fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 10))
+fig.patch.set_facecolor('white')
+interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
+ Delft_WL[modelPlot[0]].index[-1],
+ freq="20min")
+
+axes.title.set_text('National Grid Gauge')
+axes.scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
+ np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048),s=1,
+ label='RMSE =' + str(round(rms3,3)))
+
+axes.set_xlim(-2, 2)
+axes.set_ylim(-2, 2)
+linepts = np.array([-3, 3])
+axes.plot(linepts, linepts, color='k')
+axes.set_xlabel('Modelled water level (m)')
+axes.set_ylabel('Measured water level (m)')
+axes.grid()
+axes.legend(loc='lower left')
+
+axes.set_aspect('equal')
+fig.suptitle(runLog['Plotting Legend Entry'][i])
+
+
+
+
+
diff --git a/NTC_DFM/dfm_tools_23.txt b/NTC_DFM/dfm_tools_23.txt
new file mode 100644
index 0000000..cb48807
--- /dev/null
+++ b/NTC_DFM/dfm_tools_23.txt
@@ -0,0 +1,205 @@
+# This file may be used to create an environment using:
+# $ conda create --name --file
+# platform: win-64
+@EXPLICIT
+https://conda.anaconda.org/conda-forge/win-64/git-2.39.1-h57928b3_0.conda
+https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2022.12.7-h5b45459_0.conda
+https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2023.0.0-h57928b3_25922.conda
+https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.11-hd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/python_abi-3.9-3_cp39.conda
+https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda
+https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.34.31931-h4c5c07a_10.conda
+https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/vc-14.3-hb6edc58_10.conda
+https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h8ffe710_4.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/expat-2.5.0-h1537add_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/geos-3.11.1-h1537add_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/icu-70.1-h0e60522_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/jpeg-9e-h8ffe710_2.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libaec-1.0.6-h63175ca_1.conda
+https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.0.9-hcfcfb64_8.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.17-hcfcfb64_0.conda
+https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-h8ffe710_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-2.1.4-hcfcfb64_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libspatialindex-1.9.3-h39d44d4_4.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.40.0-hcfcfb64_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.2.4-h8ffe710_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libzlib-1.2.13-hcfcfb64_4.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.4-hcfcfb64_0.conda
+https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/openssl-3.0.7-hcfcfb64_2.conda
+https://conda.anaconda.org/conda-forge/win-64/pixman-0.40.0-h8ffe710_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/snappy-1.1.9-hfb803bf_2.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/tk-8.6.12-h8ffe710_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.4-h63175ca_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/freexl-1.0.6-h67ca5e6_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/gettext-0.21.1-h5728263_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/krb5-1.20.1-heb0366b_0.conda
+https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.0.9-hcfcfb64_8.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.0.9-hcfcfb64_8.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.39-h19919ed_0.conda
+https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-he22b5cd_12.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libssh2-1.10.0-h9a1e1f7_3.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libxml2-2.10.3-hc3477c8_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libzip-1.9.2-h519de47_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/pcre2-10.40-h17e33f8_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/python-3.9.15-h4de0772_0_cpython.conda
+https://conda.anaconda.org/conda-forge/win-64/sqlite-3.40.0-hcfcfb64_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/zlib-1.2.13-hcfcfb64_4.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.2-h12be248_6.conda
+https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda
+https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.3-hdccc3a2_0.conda
+https://conda.anaconda.org/conda-forge/win-64/boost-cpp-1.78.0-h9f4b32c_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.0.9-hcfcfb64_8.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/docopt-0.6.2-py_1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-h546665d_1.conda
+https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/geographiclib-1.52-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h1b1b6ef_5.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.4-py39h1f6ef14_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libcurl-7.87.0-h68f0423_0.conda
+https://conda.anaconda.org/conda-forge/win-64/libglib-2.74.1-he8f3873_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.8.0-h039e092_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libpq-15.1-ha9684e8_3.conda
+https://conda.anaconda.org/conda-forge/win-64/libtiff-4.5.0-hf8721a0_2.conda
+https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.39.1-py39hd28a505_1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/lz4-4.2.0-py39hf617134_0.conda
+https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.2-py39ha55989b_0.conda
+https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.4-py39h1f6ef14_1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/networkx-3.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.4-py39ha55989b_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0-py39ha55989b_5.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/rtree-1.0.1-py39h09fdee3_1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/setuptools-66.1.1-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.1.0-pyh8a188c0_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/tornado-6.2-py39ha55989b_1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/unicodedata2-15.0.0-py39ha55989b_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/webob-1.8.7-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.9-hcd874cb_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/xyzservices-2022.9.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/brotli-1.0.9-hcfcfb64_8.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/cffi-1.15.1-py39h68f70e3_3.conda
+https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.2.0-h9ebe7e4_0.conda
+https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-win_pyhd8ed1ab_2.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/curl-7.87.0-h68f0423_0.conda
+https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.0-py39ha55989b_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.2-hbde0cde_0.conda
+https://conda.anaconda.org/conda-forge/noarch/geopy-2.3.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/hdf5-1.12.2-nompi_h57737ce_101.conda
+https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/joblib-1.2.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/lcms2-2.14-ha5c8aab_1.conda
+https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-hf2ab4e4_1015.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libxcb-1.13-hcd874cb_1004.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/munch-2.5.0-py_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.0-ha2aaf27_2.conda
+https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.0-py39ha55989b_0.conda
+https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pip-23.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/postgresql-15.1-hd87cd2b_3.conda
+https://conda.anaconda.org/conda-forge/win-64/proj-9.1.0-heca977f_1.conda
+https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/tbb-2021.7.0-h91493d7_1.conda
+https://conda.anaconda.org/conda-forge/noarch/tqdm-4.64.1-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/zict-2.2.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/branca-0.6.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/brotlipy-0.7.0-py39ha55989b_1005.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/cairo-1.16.0-hd694305_1014.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/cryptography-39.0.0-py39hb6bd5e6_0.conda
+https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.1.1-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/fonttools-4.38.0-py39ha55989b_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.1-h720ab47_5.conda
+https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.0-h61be68b_0.conda
+https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.8.1-nompi_h8c042bf_106.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.0.1-h07bf483_22.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/mkl-2022.1.0-h6a75c08_874.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/pillow-9.4.0-py39h9767c21_0.conda
+https://conda.anaconda.org/conda-forge/win-64/pyproj-3.4.1-py39h9727d73_0.conda
+https://conda.anaconda.org/conda-forge/win-64/tiledb-2.13.2-h3132609_0.conda
+https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-16_win64_mkl.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/poppler-22.12.0-h183ae7b_1.conda
+https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-16_win64_mkl.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/libgdal-3.6.2-h060c9ed_3.conda
+https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-16_win64_mkl.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/distributed-2023.1.1-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/numpy-1.23.5-py39hbccbffa_0.conda
+https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/bokeh-2.4.3-pyhd8ed1ab_3.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/bottleneck-1.3.6-py39hc266a54_0.conda
+https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.5.1-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/cftime-1.6.2-py39hc266a54_1.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/contourpy-1.0.7-py39h1f6ef14_0.conda
+https://conda.anaconda.org/conda-forge/noarch/folium-0.14.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/gdal-3.6.2-py39h3be0312_3.conda
+https://conda.anaconda.org/conda-forge/win-64/numba-0.56.4-py39h99ae161_0.conda
+https://conda.anaconda.org/conda-forge/win-64/pandas-1.5.3-py39h2ba5b7c_0.conda
+https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pydap-3.3.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/pyepsg-0.4.0-py_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.1-py39h7c5f289_0.conda
+https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-py_0.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/dask-2023.1.1-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/fiona-1.9.0-py39hb5a1417_0.conda
+https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.12.2-pyha770c72_0.conda
+https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.6.3-py39haf65ace_0.conda
+https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.6.2-nompi_py39h34fa13a_100.tar.bz2
+https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.4-py39hce277b7_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/scipy-1.10.0-py39hfbf2dce_0.conda
+https://conda.anaconda.org/conda-forge/noarch/xarray-2023.1.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/win-64/cartopy-0.21.1-py39h25ee47b_0.conda
+https://conda.anaconda.org/conda-forge/noarch/contextily-1.2.0-pyhd8ed1ab_0.tar.bz2
+https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.2.1-py39h6fe01c0_0.conda
+https://conda.anaconda.org/conda-forge/noarch/xugrid-0.2.0-pyhd8ed1ab_0.conda
+https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.5.0-pyhd8ed1ab_1.conda
+https://conda.anaconda.org/conda-forge/noarch/geopandas-0.12.2-pyhd8ed1ab_0.conda
diff --git a/NTC_DFM/dfm_tools_23.yml b/NTC_DFM/dfm_tools_23.yml
new file mode 100644
index 0000000..6ceff63
--- /dev/null
+++ b/NTC_DFM/dfm_tools_23.yml
@@ -0,0 +1,212 @@
+name: dfm_tools_23
+channels:
+ - conda-forge
+dependencies:
+ - affine=2.4.0=pyhd8ed1ab_0
+ - appdirs=1.4.4=pyh9f0ad1d_0
+ - attrs=22.2.0=pyh71513ae_0
+ - beautifulsoup4=4.11.1=pyha770c72_0
+ - blosc=1.21.3=hdccc3a2_0
+ - bokeh=2.4.3=pyhd8ed1ab_3
+ - boost-cpp=1.78.0=h9f4b32c_1
+ - bottleneck=1.3.6=py39hc266a54_0
+ - branca=0.6.0=pyhd8ed1ab_0
+ - brotli=1.0.9=hcfcfb64_8
+ - brotli-bin=1.0.9=hcfcfb64_8
+ - brotlipy=0.7.0=py39ha55989b_1005
+ - bzip2=1.0.8=h8ffe710_4
+ - ca-certificates=2022.12.7=h5b45459_0
+ - cairo=1.16.0=hd694305_1014
+ - cartopy=0.21.1=py39h25ee47b_0
+ - cdsapi=0.5.1=pyhd8ed1ab_0
+ - certifi=2022.12.7=pyhd8ed1ab_0
+ - cffi=1.15.1=py39h68f70e3_3
+ - cfitsio=4.2.0=h9ebe7e4_0
+ - cftime=1.6.2=py39hc266a54_1
+ - charset-normalizer=2.1.1=pyhd8ed1ab_0
+ - click=8.1.3=win_pyhd8ed1ab_2
+ - click-plugins=1.1.1=py_0
+ - cligj=0.7.2=pyhd8ed1ab_1
+ - cloudpickle=2.2.1=pyhd8ed1ab_0
+ - colorama=0.4.6=pyhd8ed1ab_0
+ - contextily=1.2.0=pyhd8ed1ab_0
+ - contourpy=1.0.7=py39h1f6ef14_0
+ - cryptography=39.0.0=py39hb6bd5e6_0
+ - curl=7.87.0=h68f0423_0
+ - cycler=0.11.0=pyhd8ed1ab_0
+ - cytoolz=0.12.0=py39ha55989b_1
+ - dask=2023.1.1=pyhd8ed1ab_0
+ - dask-core=2023.1.1=pyhd8ed1ab_0
+ - distributed=2023.1.1=pyhd8ed1ab_0
+ - docopt=0.6.2=py_1
+ - et_xmlfile=1.1.0=pyhd8ed1ab_0
+ - expat=2.5.0=h1537add_0
+ - fiona=1.9.0=py39hb5a1417_0
+ - folium=0.14.0=pyhd8ed1ab_0
+ - font-ttf-dejavu-sans-mono=2.37=hab24e00_0
+ - font-ttf-inconsolata=3.000=h77eed37_0
+ - font-ttf-source-code-pro=2.038=h77eed37_0
+ - font-ttf-ubuntu=0.83=hab24e00_0
+ - fontconfig=2.14.2=hbde0cde_0
+ - fonts-conda-ecosystem=1=0
+ - fonts-conda-forge=1=0
+ - fonttools=4.38.0=py39ha55989b_1
+ - freetype=2.12.1=h546665d_1
+ - freexl=1.0.6=h67ca5e6_1
+ - fsspec=2023.1.0=pyhd8ed1ab_0
+ - gdal=3.6.2=py39h3be0312_3
+ - geographiclib=1.52=pyhd8ed1ab_0
+ - geopandas=0.12.2=pyhd8ed1ab_0
+ - geopandas-base=0.12.2=pyha770c72_0
+ - geopy=2.3.0=pyhd8ed1ab_0
+ - geos=3.11.1=h1537add_0
+ - geotiff=1.7.1=h720ab47_5
+ - gettext=0.21.1=h5728263_0
+ - git=2.39.1=h57928b3_0
+ - hdf4=4.2.15=h1b1b6ef_5
+ - hdf5=1.12.2=nompi_h57737ce_101
+ - heapdict=1.0.1=py_0
+ - icu=70.1=h0e60522_0
+ - idna=3.4=pyhd8ed1ab_0
+ - intel-openmp=2023.0.0=h57928b3_25922
+ - jinja2=3.1.2=pyhd8ed1ab_1
+ - joblib=1.2.0=pyhd8ed1ab_0
+ - jpeg=9e=h8ffe710_2
+ - kealib=1.5.0=h61be68b_0
+ - kiwisolver=1.4.4=py39h1f6ef14_1
+ - krb5=1.20.1=heb0366b_0
+ - lcms2=2.14=ha5c8aab_1
+ - lerc=4.0.0=h63175ca_0
+ - libaec=1.0.6=h63175ca_1
+ - libblas=3.9.0=16_win64_mkl
+ - libbrotlicommon=1.0.9=hcfcfb64_8
+ - libbrotlidec=1.0.9=hcfcfb64_8
+ - libbrotlienc=1.0.9=hcfcfb64_8
+ - libcblas=3.9.0=16_win64_mkl
+ - libcurl=7.87.0=h68f0423_0
+ - libdeflate=1.17=hcfcfb64_0
+ - libffi=3.4.2=h8ffe710_5
+ - libgdal=3.6.2=h060c9ed_3
+ - libglib=2.74.1=he8f3873_1
+ - libhwloc=2.8.0=h039e092_1
+ - libiconv=1.17=h8ffe710_0
+ - libjpeg-turbo=2.1.4=hcfcfb64_0
+ - libkml=1.3.0=hf2ab4e4_1015
+ - liblapack=3.9.0=16_win64_mkl
+ - libnetcdf=4.8.1=nompi_h8c042bf_106
+ - libpng=1.6.39=h19919ed_0
+ - libpq=15.1=ha9684e8_3
+ - librttopo=1.1.0=he22b5cd_12
+ - libspatialindex=1.9.3=h39d44d4_4
+ - libspatialite=5.0.1=h07bf483_22
+ - libsqlite=3.40.0=hcfcfb64_0
+ - libssh2=1.10.0=h9a1e1f7_3
+ - libtiff=4.5.0=hf8721a0_2
+ - libwebp-base=1.2.4=h8ffe710_0
+ - libxcb=1.13=hcd874cb_1004
+ - libxml2=2.10.3=hc3477c8_0
+ - libzip=1.9.2=h519de47_1
+ - libzlib=1.2.13=hcfcfb64_4
+ - llvmlite=0.39.1=py39hd28a505_1
+ - locket=1.0.0=pyhd8ed1ab_0
+ - lz4=4.2.0=py39hf617134_0
+ - lz4-c=1.9.4=hcfcfb64_0
+ - m2w64-gcc-libgfortran=5.3.0=6
+ - m2w64-gcc-libs=5.3.0=7
+ - m2w64-gcc-libs-core=5.3.0=7
+ - m2w64-gmp=6.1.0=2
+ - m2w64-libwinpthread-git=5.0.0.4634.697f757=2
+ - mapclassify=2.5.0=pyhd8ed1ab_1
+ - markupsafe=2.1.2=py39ha55989b_0
+ - matplotlib-base=3.6.3=py39haf65ace_0
+ - mercantile=1.2.1=pyhd8ed1ab_0
+ - mkl=2022.1.0=h6a75c08_874
+ - msgpack-python=1.0.4=py39h1f6ef14_1
+ - msys2-conda-epoch=20160418=1
+ - munch=2.5.0=py_0
+ - munkres=1.1.4=pyh9f0ad1d_0
+ - netcdf4=1.6.2=nompi_py39h34fa13a_100
+ - networkx=3.0=pyhd8ed1ab_0
+ - numba=0.56.4=py39h99ae161_0
+ - numba_celltree=0.1.6=pyhd8ed1ab_0
+ - numpy=1.23.5=py39hbccbffa_0
+ - openjpeg=2.5.0=ha2aaf27_2
+ - openpyxl=3.1.0=py39ha55989b_0
+ - openssl=3.0.7=hcfcfb64_2
+ - packaging=23.0=pyhd8ed1ab_0
+ - pandas=1.5.3=py39h2ba5b7c_0
+ - partd=1.3.0=pyhd8ed1ab_0
+ - pcre2=10.40=h17e33f8_0
+ - pillow=9.4.0=py39h9767c21_0
+ - pip=23.0=pyhd8ed1ab_0
+ - pixman=0.40.0=h8ffe710_0
+ - pooch=1.6.0=pyhd8ed1ab_0
+ - poppler=22.12.0=h183ae7b_1
+ - poppler-data=0.4.11=hd8ed1ab_0
+ - postgresql=15.1=hd87cd2b_3
+ - proj=9.1.0=heca977f_1
+ - psutil=5.9.4=py39ha55989b_0
+ - pthread-stubs=0.4=hcd874cb_1001
+ - pthreads-win32=2.9.1=hfa6e2cd_3
+ - pycparser=2.21=pyhd8ed1ab_0
+ - pydap=3.3.0=pyhd8ed1ab_0
+ - pyepsg=0.4.0=py_0
+ - pyopenssl=23.0.0=pyhd8ed1ab_0
+ - pyparsing=3.0.9=pyhd8ed1ab_0
+ - pyproj=3.4.1=py39h9727d73_0
+ - pyshp=2.3.1=pyhd8ed1ab_0
+ - pysocks=1.7.1=pyh0701188_6
+ - python=3.9.15=h4de0772_0_cpython
+ - python-dateutil=2.8.2=pyhd8ed1ab_0
+ - python_abi=3.9=3_cp39
+ - pytz=2022.7.1=pyhd8ed1ab_0
+ - pyyaml=6.0=py39ha55989b_5
+ - rasterio=1.3.4=py39hce277b7_0
+ - requests=2.28.2=pyhd8ed1ab_0
+ - rtree=1.0.1=py39h09fdee3_1
+ - scikit-learn=1.2.1=py39h6fe01c0_0
+ - scipy=1.10.0=py39hfbf2dce_0
+ - setuptools=66.1.1=pyhd8ed1ab_0
+ - shapely=2.0.1=py39h7c5f289_0
+ - six=1.16.0=pyh6c4a22f_0
+ - snappy=1.1.9=hfb803bf_2
+ - snuggs=1.4.7=py_0
+ - sortedcontainers=2.4.0=pyhd8ed1ab_0
+ - soupsieve=2.3.2.post1=pyhd8ed1ab_0
+ - sqlite=3.40.0=hcfcfb64_0
+ - tbb=2021.7.0=h91493d7_1
+ - tblib=1.7.0=pyhd8ed1ab_0
+ - threadpoolctl=3.1.0=pyh8a188c0_0
+ - tiledb=2.13.2=h3132609_0
+ - tk=8.6.12=h8ffe710_0
+ - toolz=0.12.0=pyhd8ed1ab_0
+ - tornado=6.2=py39ha55989b_1
+ - tqdm=4.64.1=pyhd8ed1ab_0
+ - typing_extensions=4.4.0=pyha770c72_0
+ - tzdata=2022g=h191b570_0
+ - ucrt=10.0.22621.0=h57928b3_0
+ - unicodedata2=15.0.0=py39ha55989b_0
+ - urllib3=1.26.14=pyhd8ed1ab_0
+ - vc=14.3=hb6edc58_10
+ - vs2015_runtime=14.34.31931=h4c5c07a_10
+ - webob=1.8.7=pyhd8ed1ab_0
+ - wheel=0.38.4=pyhd8ed1ab_0
+ - win_inet_pton=1.1.0=pyhd8ed1ab_6
+ - xarray=2023.1.0=pyhd8ed1ab_0
+ - xerces-c=3.2.4=h63175ca_1
+ - xorg-libxau=1.0.9=hcd874cb_0
+ - xorg-libxdmcp=1.1.3=hcd874cb_0
+ - xugrid=0.2.0=pyhd8ed1ab_0
+ - xyzservices=2022.9.0=pyhd8ed1ab_0
+ - xz=5.2.6=h8d14728_0
+ - yaml=0.2.5=h8ffe710_2
+ - zict=2.2.0=pyhd8ed1ab_0
+ - zlib=1.2.13=hcfcfb64_4
+ - zstd=1.5.2=h12be248_6
+ - pip:
+ - dfm-tools==0.10.3
+ - hydrolib-core==0.4.2
+ - lxml==4.9.2
+ - meshkernel==2.0.0
+ - pydantic==1.10.4
+prefix: C:\Users\arey\Anaconda3\envs\dfm_tools_23
diff --git a/pyextremes/.idea/.gitignore b/pyextremes/.idea/.gitignore
new file mode 100644
index 0000000..13566b8
--- /dev/null
+++ b/pyextremes/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/pyextremes/.idea/inspectionProfiles/Project_Default.xml b/pyextremes/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..7c85937
--- /dev/null
+++ b/pyextremes/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,13 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/.idea/inspectionProfiles/profiles_settings.xml b/pyextremes/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/pyextremes/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/.idea/misc.xml b/pyextremes/.idea/misc.xml
new file mode 100644
index 0000000..92f081e
--- /dev/null
+++ b/pyextremes/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/.idea/modules.xml b/pyextremes/.idea/modules.xml
new file mode 100644
index 0000000..36a29e5
--- /dev/null
+++ b/pyextremes/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/.idea/other.xml b/pyextremes/.idea/other.xml
new file mode 100644
index 0000000..640fd80
--- /dev/null
+++ b/pyextremes/.idea/other.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/.idea/pyextremes.iml b/pyextremes/.idea/pyextremes.iml
new file mode 100644
index 0000000..a027b29
--- /dev/null
+++ b/pyextremes/.idea/pyextremes.iml
@@ -0,0 +1,11 @@
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/.idea/vcs.xml b/pyextremes/.idea/vcs.xml
new file mode 100644
index 0000000..6c0b863
--- /dev/null
+++ b/pyextremes/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pyextremes/WL_Extremes.py b/pyextremes/WL_Extremes.py
new file mode 100644
index 0000000..a10c215
--- /dev/null
+++ b/pyextremes/WL_Extremes.py
@@ -0,0 +1,144 @@
+# -*- coding: utf-8 -*-
+"""
+Created on Mon Feb 27 10:12:19 2023
+
+@author: usaied
+"""
+
+
+import datetime
+#import calendar
+#import math
+#import re
+import pandas as pd
+#import seaborn as sns
+import matplotlib.dates as mdates
+#import dateutil
+import numpy as np
+import matplotlib.pyplot as plt
+#import statsmodels.api as sm
+#from statsmodels.tsa.statespace.sarimax import SARIMAX
+#from statsmodels.tsa.seasonal import STL
+#import sklearn
+import scipy
+#pip install pyextremes
+
+if __name__ == '__main__':
+ from pyextremes import get_extremes, get_return_periods
+ from pyextremes import EVA
+from pyextremes.plotting import plot_extremes
+
+
+# Read Data and Convert Datetime string to Datetime object
+d_parser = lambda x: datetime.datetime.strptime(x,'%Y-%m-%d %H:%M')
+WL_hourly = pd.read_csv("//srv-oak3.baird.com/Projects/13471.101 Ontario Place Therme/05_Analyses/37_EVA PyExtremes/input_WL.csv", skiprows = 16, parse_dates = ['Datetime'], date_parser = d_parser)
+
+
+#%% Fix for multiprocessing pool with PyCharm
+__file__ = 'WL_Extremes.py'
+
+
+#%% =========Data Processing and preparation================
+# Delete rows with no water level values
+WL_hourly = WL_hourly.drop(WL_hourly[(WL_hourly.WL < -900)].index)
+WL_hourly.dropna() # drop nan values
+# Reset index to Datetime
+WL_hourly.set_index('Datetime', inplace=True)
+# Hourly resample with linear interpolation
+WL_hourly = WL_hourly.asfreq('h') # This will generate missing values
+WL_hourly = WL_hourly.interpolate(method = 'linear') #Linear interpolation
+# Drop rows from extended WL dataframe
+StartDate = WL_hourly.index.min()
+EndDate = WL_hourly.index.max()
+Length_of_record = (EndDate-StartDate).days/365.2425 # in years
+#WL_hr_POR = WL_hourly.drop(WL_hourly[(WL_hourly.index < StartDate) | (WL_hourly.index > EndDate)].index)
+#df1['WL'] = WL_hr_POR['WL']
+
+# Resample for Monthly average water levels
+# Resample with monthly freq.
+WaterLevels_MM = WL_hourly.resample('M').mean() # Monthly resample
+WaterLevels_MM = WaterLevels_MM.interpolate(method='linear')
+WaterLevels_MM.rename(columns = {'WL':'WL_MM'}, inplace = True)
+
+##### Gaussian Kernel - Develop a continous variable for EVA ###
+Length = 30 # 30 Days average
+x = np.array(WL_hourly['WL'])
+y_static = scipy.ndimage.gaussian_filter(x, sigma=Length*6, order=0) # Note the Gaussian kernel is applied applied over 4 Sigma by default - Therfore multiplying Length (days) by 24hr/4
+
+WL_hourly['Static_WL'] = y_static
+WL_hourly['Surge'] = WL_hourly['WL'] - WL_hourly['Static_WL']
+
+Max_Surge = WL_hourly['Surge'].max()
+
+
+###### Peak Over Threshold - Surge Analysis #####
+Threshold = 0.15 # Surge threshold
+Storm_Duration = "48H" # Hours
+if __name__ == '__main__':
+ Surge = get_extremes(WL_hourly.Surge, "POT", threshold = 0.14, r="48H")
+ model = EVA(WL_hourly.Surge)
+ model.get_extremes(method = "POT", threshold = 0.14, r="48H")
+ model.plot_extremes()
+
+#Fit Distribution: By default the distribution is selected automatically as best between
+#'genextreme' (GEV) and 'gumbel_r' for 'BM' extremes and
+#'genpareto' and 'expon' for 'POT' extremes.
+#Best distribution is selected using the AIC metric.
+
+#model.fit_model(model='MLE', distribution='genpareto')
+if __name__ == '__main__':
+ model.fit_model()
+ RP_Surge = model.get_summary(return_period=[2, 5, 10, 25, 50, 100, 200, 500, 1000], alpha=0.95, n_samples=500)
+ fig, ax = model.plot_diagnostic(alpha=0.95)
+ # plt.savefig('Surge_EVA.png')
+
+plt.show()
+
+#%%
+
+######## Annual Maxima EVA (BM) Smoothed Static WL Signal ####
+Static_WL = get_extremes(WL_hourly.Static_WL, "BM", block_size="365.2425D")
+model = EVA(WL_hourly.Static_WL)
+model.get_extremes(method = "BM", block_size="365.2425D")
+model.plot_extremes()
+model.fit_model()
+RP_Static = model.get_summary(return_period=[2, 5, 10, 25, 50, 100, 200, 500, 1000], alpha=0.95, n_samples=1000)
+fig, ax = model.plot_diagnostic(alpha=0.95)
+plt.savefig('Static_EVA.png')
+
+######## Annual Maxima EVA (BM) Hourly WL Signal ####
+Combined_WL = get_extremes(WL_hourly.WL, "BM", block_size="365.2425D")
+model = EVA(WL_hourly.WL)
+model.get_extremes(method = "BM", block_size="365.2425D")
+model.plot_extremes()
+model.fit_model()
+RP_Combined = model.get_summary(return_period=[2, 5, 10, 25, 50, 100, 200, 500, 1000], alpha=0.95, n_samples=1000)
+fig, ax = model.plot_diagnostic(alpha=0.95)
+plt.savefig('Combined_EVA.png')
+
+
+
+# Time Series Plot
+fig, ax = plt.subplots(2, figsize=(30,10), dpi=400)
+ax[0].plot(WL_hourly.index, WL_hourly['WL'], linewidth=0.5, label='Water Level (m IGLD85)')
+ax[0].plot(WL_hourly.index, WL_hourly['Static_WL'], linewidth=1.5, color='black', label='Static Water Level (m IGLD85)')
+ax[0].legend()
+ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
+ax[0].xaxis.set_major_locator(mdates.YearLocator(1, month=1, day=1))
+ax[0].set_xlim([StartDate, EndDate])
+ax[0].set_ylabel("Water Level (m IGLD85)", fontsize=10)
+ax[0].grid(color = 'grey', linestyle = '--', linewidth = 0.5)
+
+ax[1].plot(WL_hourly.index, WL_hourly['Surge'], linewidth=0.5, label='Surge (m)')
+ax[1].legend()
+ax[1].xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
+ax[1].xaxis.set_major_locator(mdates.YearLocator(1, month=1, day=1))
+ax[1].set_xlim([StartDate, EndDate])
+ax[1].set_ylabel("Surge (m)", fontsize=10)
+ax[1].grid(color = 'grey', linestyle = '--', linewidth = 0.5)
+
+
+plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
+plt.gca().xaxis.set_major_locator(mdates.YearLocator(1, month=1, day=1))
+plt.tight_layout()
+plt.savefig('Water_Level_TS.png')
diff --git a/rasterProcess2/NWS_Alert_Read_Process_RevB.py b/rasterProcess2/NWS_Alert_Read_Process_RevB.py
index da310c3..d5f8780 100644
--- a/rasterProcess2/NWS_Alert_Read_Process_RevB.py
+++ b/rasterProcess2/NWS_Alert_Read_Process_RevB.py
@@ -16,6 +16,8 @@ import shapely.wkt
import re
+import urllib.parse
+
#%% Setup NWS ID
nws = nwswx.WxAPI('api@alexanderrey.ca')
@@ -154,13 +156,16 @@ for alertIDX in nws_alert_gdf.index:
else:
alertEndTime = nws_alert_gdf.loc[alertIDX, 'ends']
+ alertURL = 'https://alerts-v2.weather.gov/#/?id=' + \
+ urllib.parse.quote(nws_alert_gdf.loc[alertIDX, 'id'])
+
alertString = alertString + '[' + nws_alert_gdf.loc[alertIDX, 'headline'] + '}' \
'{' + alertDescription + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'areaDesc'] + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'onset'] + '}' + \
'{' + alertEndTime + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'severity'] + '}' + \
- '{' + nws_alert_gdf.loc[alertIDX, '@id'] + ']'
+ '{' + alertURL + ']'
alertData_1D[alertPoint] = alertString
diff --git a/rasterProcess2/enviorment.yml b/rasterProcess2/enviorment.yml
new file mode 100644
index 0000000..3282346
--- /dev/null
+++ b/rasterProcess2/enviorment.yml
@@ -0,0 +1,186 @@
+name: rasterProcess2
+channels:
+ - conda-forge
+dependencies:
+ - appdirs=1.4.4=pyh9f0ad1d_0
+ - attrs=22.2.0=pyh71513ae_0
+ - blosc=1.21.3=hdccc3a2_0
+ - bokeh=2.4.3=pyhd8ed1ab_3
+ - boost-cpp=1.78.0=h9f4b32c_1
+ - bottleneck=1.3.5=py38hbaf524b_1
+ - branca=0.6.0=pyhd8ed1ab_0
+ - brotli=1.0.9=hcfcfb64_8
+ - brotli-bin=1.0.9=hcfcfb64_8
+ - brotlipy=0.7.0=py38h91455d4_1005
+ - bzip2=1.0.8=h8ffe710_4
+ - ca-certificates=2022.12.7=h5b45459_0
+ - cairo=1.16.0=hd694305_1014
+ - cffi=1.15.1=py38h57701bc_3
+ - cfitsio=4.2.0=h9ebe7e4_0
+ - cftime=1.6.2=py38hbaf524b_1
+ - charset-normalizer=2.1.1=pyhd8ed1ab_0
+ - click=8.1.3=win_pyhd8ed1ab_2
+ - click-plugins=1.1.1=py_0
+ - cligj=0.7.2=pyhd8ed1ab_1
+ - cloudpickle=2.2.0=pyhd8ed1ab_0
+ - colorama=0.4.6=pyhd8ed1ab_0
+ - contourpy=1.0.7=py38hb1fd069_0
+ - cryptography=39.0.0=py38h95f5157_0
+ - curl=7.87.0=h68f0423_0
+ - cycler=0.11.0=pyhd8ed1ab_0
+ - cytoolz=0.12.0=py38h91455d4_1
+ - dask=2023.1.0=pyhd8ed1ab_0
+ - dask-core=2023.1.0=pyhd8ed1ab_0
+ - distributed=2023.1.0=pyhd8ed1ab_0
+ - expat=2.5.0=h1537add_0
+ - fiona=1.8.22=py38hc97cbf3_5
+ - folium=0.14.0=pyhd8ed1ab_0
+ - font-ttf-dejavu-sans-mono=2.37=hab24e00_0
+ - font-ttf-inconsolata=3.000=h77eed37_0
+ - font-ttf-source-code-pro=2.038=h77eed37_0
+ - font-ttf-ubuntu=0.83=hab24e00_0
+ - fontconfig=2.14.1=hbde0cde_0
+ - fonts-conda-ecosystem=1=0
+ - fonts-conda-forge=1=0
+ - fonttools=4.38.0=py38h91455d4_1
+ - freetype=2.12.1=h546665d_1
+ - freexl=1.0.6=h67ca5e6_1
+ - fsspec=2022.11.0=pyhd8ed1ab_0
+ - gdal=3.6.2=py38h5a6f081_3
+ - geopandas=0.12.2=pyhd8ed1ab_0
+ - geopandas-base=0.12.2=pyha770c72_0
+ - geos=3.11.1=h1537add_0
+ - geotiff=1.7.1=h720ab47_5
+ - gettext=0.21.1=h5728263_0
+ - hdf4=4.2.15=h1b1b6ef_5
+ - hdf5=1.12.2=nompi_h57737ce_101
+ - heapdict=1.0.1=py_0
+ - icu=70.1=h0e60522_0
+ - idna=3.4=pyhd8ed1ab_0
+ - intel-openmp=2023.0.0=h57928b3_25922
+ - jinja2=3.1.2=pyhd8ed1ab_1
+ - joblib=1.2.0=pyhd8ed1ab_0
+ - jpeg=9e=h8ffe710_2
+ - kealib=1.5.0=h61be68b_0
+ - kiwisolver=1.4.4=py38hb1fd069_1
+ - krb5=1.20.1=heb0366b_0
+ - lcms2=2.14=ha5c8aab_1
+ - lerc=4.0.0=h63175ca_0
+ - libaec=1.0.6=h63175ca_1
+ - libblas=3.9.0=16_win64_mkl
+ - libbrotlicommon=1.0.9=hcfcfb64_8
+3ibbrotlidec=1.0.9=hcfcfb64_8
+ - libbrotlienc=1.0.9=hcfcfb64_8
+ - libcblas=3.9.0=16_win64_mkl
+ - libcurl=7.87.0=h68f0423_0
+ - libdeflate=1.17=hcfcfb64_0
+ - libffi=3.4.2=h8ffe710_5
+ - libgdal=3.6.2=h060c9ed_3
+ - libglib=2.74.1=he8f3873_1
+ - libhwloc=2.8.0=h039e092_1
+ - libiconv=1.17=h8ffe710_0
+ - libjpeg-turbo=2.1.4=hcfcfb64_0
+ - libkml=1.3.0=hf2ab4e4_1015
+ - liblapack=3.9.0=16_win64_mkl
+ - libnetcdf=4.8.1=nompi_h8c042bf_106
+ - libpng=1.6.39=h19919ed_0
+ - libpq=15.1=ha9684e8_3
+ - librttopo=1.1.0=he22b5cd_12
+ - libspatialindex=1.9.3=h39d44d4_4
+ - libspatialite=5.0.1=h07bf483_22
+ - libsqlite=3.40.0=hcfcfb64_0
+ - libssh2=1.10.0=h9a1e1f7_3
+ - libtiff=4.5.0=hf8721a0_2
+ - libwebp-base=1.2.4=h8ffe710_0
+ - libxcb=1.13=hcd874cb_1004
+ - libxml2=2.10.3=hc3477c8_1
+ - libzip=1.9.2=h519de47_1
+ - libzlib=1.2.13=hcfcfb64_4
+ - locket=1.0.0=pyhd8ed1ab_0
+ - lz4=4.2.0=py38hb6d8784_0
+ - lz4-c=1.9.3=h8ffe710_1
+ - m2w64-gcc-libgfortran=5.3.0=6
+ - m2w64-gcc-libs=5.3.0=7
+ - m2w64-gcc-libs-core=5.3.0=7
+ - m2w64-gmp=6.1.0=2
+ - m2w64-libwinpthread-git=5.0.0.4634.697f757=2
+ - mapclassify=2.5.0=pyhd8ed1ab_1
+ - markupsafe=2.1.1=py38h91455d4_2
+ - matplotlib-base=3.6.2=py38h528a6c7_0
+ - mkl=2022.1.0=h6a75c08_874
+ - msgpack-python=1.0.4=py38hb1fd069_1
+ - msys2-conda-epoch=20160418=1
+ - munch=2.5.0=py_0
+ - munkres=1.1.4=pyh9f0ad1d_0
+ - netcdf4=1.6.2=nompi_py38h78680c8_100
+ - networkx=3.0=pyhd8ed1ab_0
+ - numpy=1.24.1=py38h90ce339_0
+ - openjpeg=2.5.0=ha2aaf27_2
+ - openssl=3.0.7=hcfcfb64_1
+ - packaging=23.0=pyhd8ed1ab_0
+ - pandas=1.5.2=py38h5846ac1_2
+ - partd=1.3.0=pyhd8ed1ab_0
+ - pcre2=10.40=h17e33f8_0
+ - pillow=9.4.0=py38h409c3de_0
+ - pip=22.3.1=pyhd8ed1ab_0
+ - pixman=0.40.0=h8ffe710_0
+ - pooch=1.6.0=pyhd8ed1ab_0
+ - poppler=22.12.0=h183ae7b_1
+ - poppler-data=0.4.11=hd8ed1ab_0
+ - postgresql=15.1=hd87cd2b_3
+ - proj=9.1.0=heca977f_1
+ - psutil=5.9.4=py38h91455d4_0
+ - pthread-stubs=0.4=hcd874cb_1001
+ - pthreads-win32=2.9.1=hfa6e2cd_3
+ - pycparser=2.21=pyhd8ed1ab_0
+ - pyopenssl=23.0.0=pyhd8ed1ab_0
+ - pyparsing=3.0.9=pyhd8ed1ab_0
+ - pyproj=3.4.1=py38hac5b721_0
+ - pysocks=1.7.1=pyh0701188_6
+ - python=3.8.15=h4de0772_0_cpython
+ - python-dateutil=2.8.2=pyhd8ed1ab_0
+ - python_abi=3.8=3_cp38
+ - pytz=2022.7.1=pyhd8ed1ab_0
+ - pyyaml=6.0=py38h91455d4_5
+ - requests=2.28.2=pyhd8ed1ab_0
+ - rtree=1.0.1=py38h8b54edf_1
+ - scikit-learn=1.2.0=py38h69724d7_0
+ - scipy=1.10.0=py38h0f6ee2a_0
+ - setuptools=66.0.0=pyhd8ed1ab_0
+ - shapely=2.0.0=py38h9c0aba1_0
+ - six=1.16.0=pyh6c4a22f_0
+ - snappy=1.1.9=hfb803bf_2
+ - sortedcontainers=2.4.0=pyhd8ed1ab_0
+ - sqlite=3.40.0=hcfcfb64_0
+ - tbb=2021.7.0=h91493d7_1
+ - tblib=1.7.0=pyhd8ed1ab_0
+ - threadpoolctl=3.1.0=pyh8a188c0_0
+ - tiledb=2.13.2=h3132609_0
+ - tk=8.6.12=h8ffe710_0
+ - toolz=0.12.0=pyhd8ed1ab_0
+ - tornado=6.2=py38h91455d4_1
+ - typing_extensions=4.4.0=pyha770c72_0
+ - ucrt=10.0.22621.0=h57928b3_0
+ - unicodedata2=15.0.0=py38h91455d4_0
+ - urllib3=1.26.14=pyhd8ed1ab_0
+ - vc=14.3=hb6edc58_10
+ - vs2015_runtime=14.34.31931=h4c5c07a_10
+ - wheel=0.38.4=pyhd8ed1ab_0
+ - win_inet_pton=1.1.0=pyhd8ed1ab_6
+ - xarray=2022.12.0=pyhd8ed1ab_0
+ - xerces-c=3.2.4=h63175ca_1
+ - xorg-libxau=1.0.9=hcd874cb_0
+ - xorg-libxdmcp=1.1.3=hcd874cb_0
+ - xyzservices=2022.9.0=pyhd8ed1ab_0
+ - xz=5.2.6=h8d14728_0
+ - yaml=0.2.5=h8ffe710_2
+ - zict=2.2.0=pyhd8ed1ab_0
+ - zlib=1.2.13=hcfcfb64_4
+ - zstd=1.5.2=h7755175_4
+ - pip:
+ - capparselib==0.6.6
+ - certifi==2022.12.7
+ - h3==4.0.0b2
+ - lxml==4.6.3
+ - nwswx==0.1.1
+prefix: C:\Users\arey\Anaconda3\envs\rasterProcess2