May 8 commit

This commit is contained in:
Alexander Rey 2023-05-08 10:57:46 -04:00
parent 15c7820c1c
commit f6061d40c4
31 changed files with 5214 additions and 71 deletions

511
EWR_Data/EWR_DataProc.py Normal file
View File

@ -0,0 +1,511 @@
#%% Plotting EWR Flume Tests
# Alexander Rey, 2022
#%% Import
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.colors as mcolors
import geopandas as gp
gp.options.use_pygeos = True
from shapely import geometry, ops
# Map Plotting
import cartopy.crs as ccrs
import contextily as ctx
# Interpolation
import scipy as sp
from scipy.interpolate import griddata
from scipy.interpolate import LinearNDInterpolator, interp1d
# Lowess interpolation
import statsmodels.api as sm
import pathlib as pl
import datetime
#%% Read in centerline shapefile
river_centerline = gp.read_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15.shp')
river_centerlineExploded = river_centerline.explode(ignore_index=True)
river_centerlineExploded.reset_index(inplace=True)
tempMulti = river_centerlineExploded.iloc[[5,0,1,2,3,4,6,7,9], 4]
# Put the sub-line coordinates into a list of sublists
outcoords = [list(i.coords) for i in tempMulti]
# Flatten the list of sublists and use it to make a new line
river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
river_centerline_merge_gpd2 =\
gp.GeoDataFrame(geometry=gp.points_from_xy(
river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
# Add distance along centerline
river_centerline_merge_gpd2['DistanceFromPrevious'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFromPrevious'].cumsum()
river_centerline_merge_gpd2.iloc[0, 1] = 0
river_centerline_merge_gpd2.iloc[0, 2] = 0
#%% Import Observations from database
# Read in combined dataset
obs_IN = pd.read_csv("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/output/combined dataset-SGJ.csv")
# Add Datetime
obs_IN['DateTime'] = pd.to_datetime(obs_IN.Sampledate_x)
# Add months
obs_IN['Month'] = obs_IN['DateTime'].dt.month
# Convert to geodataframe
obs_gdf = gp.GeoDataFrame(obs_IN, geometry=gp.points_from_xy(
obs_IN.loc[:, 'Longitude'], obs_IN.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Adjust Units
obs_gdf.loc[obs_gdf.Unit == 'NG/G', 'Sample_NG/G'] = obs_gdf.Samplevalue
obs_gdf.loc[obs_gdf.Unit == 'ng/g', 'Sample_NG/G'] = obs_gdf.Samplevalue
obs_gdf.loc[obs_gdf.Unit == 'UG/G', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
obs_gdf.loc[obs_gdf.Unit == 'ug/g', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
obs_gdf.loc[obs_gdf.Unit == 'MG/KG', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
obs_gdf.loc[obs_gdf.Unit == 'mg/kg', 'Sample_NG/G'] = obs_gdf.Samplevalue * 1000
obs_gdf.loc[obs_gdf.Unit == 'NG/L', 'Sample_NG/L'] = obs_gdf.Samplevalue
obs_gdf.loc[obs_gdf.Unit == 'ng/L', 'Sample_NG/L'] = obs_gdf.Samplevalue
obs_gdf.loc[obs_gdf.Unit == 'UG/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000
obs_gdf.loc[obs_gdf.Unit == 'ug/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000
obs_gdf.loc[obs_gdf.Unit == 'MG/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000000
obs_gdf.loc[obs_gdf.Unit == 'mg/L', 'Sample_NG/L'] = obs_gdf.Samplevalue * 1000000
# Add centerline and reset index
obs_gdf = gp.sjoin_nearest(river_centerline_merge_gpd2, obs_gdf, how='right', max_distance=250).reset_index()
# Sediment Hg GeoDataFrame where media is Sediment
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgSed = obs_gdf.loc[obsMask, :]
# Sediment Hg GeoDataFrame for surface sediment
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & (obs_gdf.RiverKM.notna()) &
((obs_gdf.TopDepth_x < 5) | (obs_gdf.TopDepth_x.isna())))
obs_HgSurfaceSed = obs_gdf.loc[obsMask, :]
# obs_HgSurfaceSed.to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgSurfaceSed.geojson', driver="GeoJSON")
obs_HgSurfaceSed.loc[:, ['Sample_NG/G', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgSurfaceSed2.shp')
# Group sediment core data to take average/min/max
obs_HgSurfaceSed_Avg = obs_HgSurfaceSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Flatten Multiindex
obs_HgSurfaceSed_Avg.columns = ["_".join(a) for a in obs_HgSurfaceSed_Avg.columns.to_flat_index()]
# Rename for shapefile
obs_HgSurfaceSed_Avg.rename(columns={"Sample_NG/G_mean": "Mean_NG/G",
"Sample_NG/G_max": "Max_NG/G",
"Sample_NG/G_min": "Min_NG/G"}, inplace=True)
# Convert to geodataframe and save
obs_HgSurfaceSed_Avg_gdf = gp.GeoDataFrame(obs_HgSurfaceSed_Avg, geometry=gp.points_from_xy(
obs_HgSurfaceSed_Avg.loc[:, 'Longitude_mean'], obs_HgSurfaceSed_Avg.loc[:, 'Latitude_mean'],
crs="EPSG:4326")).to_crs(crs="EPSG:32615")
obs_HgSurfaceSed_Avg_gdf.loc[:, ['Mean_NG/G', 'Max_NG/G', 'Min_NG/G', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgSurfaceSed_Avg.shp')
# Group sediment core data to take average/min/max
obs_HgSed_Avg = obs_HgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Merge Index
obs_HgSed_Avg.columns = obs_HgSed_Avg.columns.map('|'.join).str.strip('|')
# Fix Names
obs_HgSed_Avg.rename(columns={"RiverKM|mean": "RiverKM", "Longitude|mean": "Longitude",
"Latitude|mean": "Latitude", "DateTime|mean": "DateTime"}, inplace=True)
# Create new GeoDataFrame
obs_HgSed_Avg = gp.GeoDataFrame(obs_HgSed_Avg, geometry=gp.points_from_xy(
obs_HgSed_Avg.loc[:, 'Longitude'], obs_HgSed_Avg.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Water Hg GeoDataFrame where media is SurfaceWater (SW)
obsMask = (((obs_gdf.Media_x == 'SW')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)-Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgWater = obs_gdf.loc[obsMask, :]
# Save
# obs_HgWater.loc[:, ['Sample_NG/L', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_HgWater.shp')
# Sediment surface MeHg GeoDataFrame
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna() &
((obs_gdf.TopDepth_x < 5) | (obs_gdf.TopDepth_x.isna())))
obs_MeHgSed = obs_gdf.loc[obsMask, :]
# Save
# obs_MeHgSed.loc[:, ['Sample_NG/G', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_MeHgSed.shp')
# Group sediment core data to take average/min/max
obs_MeHgSed_Avg = obs_MeHgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Flatten Multiindex
obs_MeHgSed_Avg.columns = ["_".join(a) for a in obs_MeHgSed_Avg.columns.to_flat_index()]
# Rename for shapefile
obs_MeHgSed_Avg.rename(columns={"Sample_NG/G_mean": "Mean_NG/G",
"Sample_NG/G_max": "Max_NG/G",
"Sample_NG/G_min": "Min_NG/G"}, inplace=True)
# Create new GeoDataFrame
obs_MeHgSed_Avg_gdf = gp.GeoDataFrame(obs_MeHgSed_Avg, geometry=gp.points_from_xy(
obs_MeHgSed_Avg.loc[:, 'Longitude_mean'], obs_MeHgSed_Avg.loc[:, 'Latitude_mean'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
obs_MeHgSed_Avg_gdf.loc[:, ['Mean_NG/G', 'Max_NG/G', 'Min_NG/G', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_MeHgSed_Avg.shp')
# MeHg in Water
obsMask = (((obs_gdf.Media_x == 'SW')) &
((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_MeHgWater = obs_gdf.loc[obsMask, :]
# Save
obs_MeHgWater.loc[:, ['Sample_NG/L', 'Samplenumber_x', 'geometry']].to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/obs_MeHgWater.shp')
#%% Plot Observations
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(30, 15))
axes.set_xlim(494649, 513647)
axes.set_ylim(5514653, 5525256)
# Add basemap
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes, source=mapbox, crs='EPSG:26915')
obs_HgSed_Avg.plot(column='Sample_NG/G|mean', axes=axes, vmin=0, vmax=10000)
plt.show()
#%% Plot River Profile
name = "Set1"
cmap = cm.get_cmap(name)
colors = cmap.colors
# Create a HSV colormap sifted by 180 degrees
# create the "hsv" colormap
hsv = plt.get_cmap('hsv')
# shift the colormap so that red is in the middle
shifted_hsv = mcolors.LinearSegmentedColormap.from_list('shifted_hsv',
np.roll(hsv(np.linspace(0.0, 1.0, 256)), 128, axis=0))
interp_xvals = np.linspace(0, 100000, num=1000)
# Bin averaging function
def average_in_bins(df, bin_width):
binned_df = pd.cut(df.index, bins=range(0, int(df.index.max() + bin_width), bin_width), right=False)
return df.groupby(binned_df).mean()
fig, axes = plt.subplots(nrows=4, ncols=1, figsize=(8, 12), sharex=True)
axes[0].set_prop_cycle(color=colors)
axes[1].set_prop_cycle(color=colors)
axes[2].set_prop_cycle(color=colors)
axes[3].set_prop_cycle(color=colors)
# Plot surface Hg
# Plot using a different color for each month
obs_HgSurfaceSed.plot.scatter('RiverKM', 'Sample_NG/G', ax=axes[0], c='Month',
label='All Samples', vmin=1, vmax=12, cmap=shifted_hsv)
# obs_HgSurfaceSed.plot.scatter('RiverKM', 'Sample_NG/G', ax=axes[0], color='grey', label='All Samples')
# obs_HgSurfaceSed_Avg_gdf.plot.scatter('RiverKM_mean', 'Mean_NG/G', ax=axes[0], color='magenta', label='Site Averaged')
# obs_HgSurfaceSed_Index = obs_HgSurfaceSed.copy()
# obs_HgSurfaceSed_Index['RiverKM2'] = obs_HgSurfaceSed['RiverKM']
# obs_HgSurfaceSed_Index = obs_HgSurfaceSed_Index.set_index('RiverKM')
#
# binwidths = [10, 100, 1000, 10000]
# for binWidthIDX, binWidth in enumerate(binwidths):
# average_in_bins(obs_HgSurfaceSed_Index, binWidth).dropna(subset=['Sample_NG/G']).\
# plot.line('RiverKM2', 'Sample_NG/G', ax=axes[0],
# label=str(binWidth) + ' m binned Samples')
# Find and plot Lowess regression for each season
SeasonMonths = [[12, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10, 11]]
SeasonNames = ['Winter', 'Spring', 'Summer', 'Fall']
SeasonColors = ['tab:blue', 'tab:green', 'darkred', 'tab:orange']
for seasonIDX, season in enumerate(SeasonMonths):
# Skip Winter
if seasonIDX == 0:
continue
seasonMask = obs_HgSurfaceSed['DateTime'].dt.month.isin(season)
lowess = sm.nonparametric.lowess(obs_HgSurfaceSed.loc[seasonMask, 'Sample_NG/G'],
obs_HgSurfaceSed.loc[seasonMask, 'RiverKM'],
frac=0.5, xvals=interp_xvals, it=2)
axes[0].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
linewidth=4, color=SeasonColors[seasonIDX])
# lowess = sm.nonparametric.lowess(obs_HgSurfaceSed['Sample_NG/G'], obs_HgSurfaceSed['RiverKM'],
# frac=0.3, xvals=interp_xvals, it=2)
# axes[0].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4, color='black')
axes[0].set_ylabel('Surface Sediment Total Hg [ng/g]')
axes[0].set_ylim([0, 20000])
axes[0].set_title('Surface Sediment Total Hg')
axes[0].legend()
obs_MeHgSed.plot.scatter('RiverKM', 'Sample_NG/G', ax=axes[1], c='Month',
label='All Samples', vmin=1, vmax=12, cmap=shifted_hsv)
# obs_MeHgSed_Avg.plot.scatter('RiverKM_mean', 'Mean_NG/G', ax=axes[1], color='magenta', label='Site Averaged')
axes[1].set_ylabel('Surface Sediment Total MeHg [ng/g]')
# obs_MeHgSed_Index = obs_MeHgSed.copy()
# obs_MeHgSed_Index['RiverKM2'] = obs_MeHgSed_Index['RiverKM']
# obs_MeHgSed_Index = obs_MeHgSed_Index.set_index('RiverKM')
#
# binwidths = [10, 100, 1000, 10000]
# for binWidthIDX, binWidth in enumerate(binwidths):
# average_in_bins(obs_MeHgSed_Index, binWidth).dropna(subset=['Sample_NG/G']).\
# plot.line('RiverKM2', 'Sample_NG/G', ax=axes[1],
# label=str(binWidth) + ' m binned Samples')
# Find and plot Lowess regression
# lowess = sm.nonparametric.lowess(obs_MeHgSed['Sample_NG/G'], obs_MeHgSed['RiverKM'],
# frac=0.2, xvals=interp_xvals, it=2)
# axes[1].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4)
# Find and plot Lowess regression for each season
for seasonIDX, season in enumerate(SeasonMonths):
# Skip Winter
if seasonIDX == 0:
continue
seasonMask = obs_MeHgSed['DateTime'].dt.month.isin(season)
lowess = sm.nonparametric.lowess(obs_MeHgSed.loc[seasonMask, 'Sample_NG/G'],
obs_MeHgSed.loc[seasonMask, 'RiverKM'],
frac=0.5, xvals=interp_xvals, it=2)
axes[1].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
linewidth=4, color=SeasonColors[seasonIDX])
# axes[0].set_ylim([0, 20000])
axes[1].set_title('Surface Sediment Total MeHg')
axes[1].legend()
obs_HgWater.plot.scatter('RiverKM', 'Sample_NG/L', ax=axes[2], c='Month',
label='All Samples', vmin=1, vmax=12,
cmap=shifted_hsv)
# Find and plot Lowess regression
# lowess = sm.nonparametric.lowess(obs_HgWater['Sample_NG/L'], obs_HgWater['RiverKM'],
# frac=0.2, xvals=interp_xvals, it=2)
# axes[2].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4)
# Find and plot Lowess regression for each season
for seasonIDX, season in enumerate(SeasonMonths):
# Skip Winter
if seasonIDX == 0:
continue
seasonMask = obs_HgWater['DateTime'].dt.month.isin(season)
lowess = sm.nonparametric.lowess(obs_HgWater.loc[seasonMask, 'Sample_NG/L'],
obs_HgWater.loc[seasonMask, 'RiverKM'],
frac=0.4, xvals=interp_xvals, it=2)
axes[2].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
linewidth=4, color=SeasonColors[seasonIDX])
axes[2].set_ylabel('Water Total Hg [ng/L]')
axes[2].set_ylim([0, 50])
axes[2].set_title('Water Total Hg')
axes[2].legend()
obs_MeHgWater.plot.scatter('RiverKM', 'Sample_NG/L', ax=axes[3], c='Month',
label='All Samples', vmin=1, vmax=12, cmap=shifted_hsv)
# Find and plot Lowess regression
# lowess = sm.nonparametric.lowess(obs_MeHgWater['Sample_NG/L'], obs_MeHgWater['RiverKM'],
# frac=0.2, xvals=interp_xvals, it=2)
# axes[3].plot(interp_xvals, lowess, label='Lowess regression', linewidth=4)
for seasonIDX, season in enumerate(SeasonMonths):
# Skip Winter
if seasonIDX == 0:
continue
seasonMask = obs_MeHgWater['DateTime'].dt.month.isin(season)
lowess = sm.nonparametric.lowess(obs_MeHgWater.loc[seasonMask, 'Sample_NG/L'],
obs_MeHgWater.loc[seasonMask, 'RiverKM'],
frac=0.25, xvals=interp_xvals, it=2)
axes[3].plot(interp_xvals, lowess, label='Lowess:' + SeasonNames[seasonIDX],
linewidth=4, color=SeasonColors[seasonIDX])
axes[3].set_ylabel('Water Total MeHg [ng/L]')
axes[3].set_ylim([0, 5])
axes[3].set_title('Water Total MeHg')
axes[3].legend()
axes[3].set_xlim([0, 100000])
axes[3].set_xlabel('Distance Along River [m]')
plt.show()
fig.savefig("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/Figures/RiverDataProfiles_Lowess_Months_RevB.png",
bbox_inches='tight', dpi=200)
#%% Plot Profiles
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(7, 7))
obs_HgSed.plot.scatter('RiverKM', 'TopDepth_x', 12, 'Sample_NG/G', ax=axes, vmax=20000)
axes.invert_yaxis()
axes.set_xlabel('Distance Along River [m]')
axes.set_ylabel('Sample Depth [m]')
axes.set_xlim([0, 100000])
plt.show()
fig.savefig("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/Figures/HgDepthMonth.png",
bbox_inches='tight', dpi=200)
#%% Interpolate lowess to grid for hg
# Read in model points and roughness
bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
gridInterpNearest = griddata(np.array([obs_HgSurfaceSed.geometry.x, obs_HgSurfaceSed.geometry.y]).T,
obs_HgSurfaceSed['Sample_NG/G'], np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterpNearest))
gridInterpOut = np.transpose(gridInterpOut)
# Set Wetlands to zero
# Interpolate Roughness
RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z,
np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
method='nearest')
gridInterpOut[RoughInterp == 0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Nearest.xyz',
gridInterpOut, delimiter=" ")
# Linear interpolation
gridInterpNearest = griddata(np.array([obs_HgSurfaceSed.geometry.x, obs_HgSurfaceSed.geometry.y]).T,
obs_HgSurfaceSed['Sample_NG/G'], np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='linear')
gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterpNearest))
gridInterpOut = np.transpose(gridInterpOut)
# Set Wetlands to zero
gridInterpOut[RoughInterp == 0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Linear.xyz',
gridInterpOut, delimiter=" ")
# Find and plot Lowess regression
lowess = sm.nonparametric.lowess(obs_HgSurfaceSed['Sample_NG/G'], obs_HgSurfaceSed['RiverKM'],
frac=0.3, it=2)
gridInterpLowess= griddata(np.array([obs_HgSurfaceSed.geometry.x, obs_HgSurfaceSed.geometry.y]).T,
lowess[:, 1], np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterpLowess))
gridInterpOut = np.transpose(gridInterpOut)
# Set Wetlands to zero
gridInterpOut[RoughInterp == 0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Lowess.xyz',
gridInterpOut, delimiter=" ")
#%% Interpolate Hg to grid
# Read in model points and roughness
bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
# Loop through parameters
for i in range(0, 5):
if i == 0:
dataOUT = obs_HgSed_Avg['Sample_NG/G|mean']
dataOutx = obs_HgSed_Avg.geometry.x
dataOuty = obs_HgSed_Avg.geometry.y
dateFileName = 'HgSed_meanDB'
elif i == 1:
dataOUT = obs_MeHgSed_Avg['Sample_NG/G|mean']
dataOutx = obs_MeHgSed_Avg.geometry.x
dataOuty = obs_MeHgSed_Avg.geometry.y
dateFileName = 'MeHgSed_meanDB'
elif i == 2:
dataOUT = obs_HgWater['Sample_NG/L']
dataOutx = obs_HgWater.geometry.x
dataOuty = obs_HgWater.geometry.y
dateFileName = 'HgWater_allDB'
elif i == 3:
dataOUT = obs_MeHgWater['Sample_NG/L']
dataOutx = obs_MeHgWater.geometry.x
dataOuty = obs_MeHgWater.geometry.y
dateFileName = 'MeHgWater_allDB'
elif i == 4:
# Synthetic from Reed's Sheet
dataOUT = ((10 - 1) * np.exp(-0.08 * np.array(river_centerline_merge_gpd2.loc[:, 'RiverKM']) / 1000) + 1) * 1000
dataOutx = river_centerline_merge_gpd2.geometry.x
dataOuty = river_centerline_merge_gpd2.geometry.y
dateFileName = 'ReedHgSed'
gridInterp = griddata(np.array([dataOutx, dataOuty]).T, dataOUT, np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterp))
gridInterpOut = np.transpose(gridInterpOut)
# Set Wetlands to zero
# Interpolate Roughness
RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z, np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
method='nearest')
gridInterpOut[RoughInterp==0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/' + dateFileName + '.xyz',
gridInterpOut, delimiter=" ")

1437
EWR_Data/waves.py Normal file

File diff suppressed because it is too large Load Diff

8
MEDS/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

8
MEDS/.idea/MEDS.iml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
MEDS/.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (geopandas_forge_mustique)" project-jdk-type="Python SDK" />
</project>

8
MEDS/.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/MEDS.iml" filepath="$PROJECT_DIR$/.idea/MEDS.iml" />
</modules>
</component>
</project>

6
MEDS/.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@ -76,7 +76,12 @@ importPaths = ['//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Resear
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Great House', '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Great House',
None, None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Old Queens Fort', '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221127 WQ Sampling/Crane'] '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221127 WQ Sampling/Crane',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Crane',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20230212 WQ Sampling/Caribee']
siteNames = ['Great House', siteNames = ['Great House',
'Greensleeves', 'Greensleeves',
@ -111,7 +116,12 @@ siteNames = ['Great House',
'Great House', 'Great House',
None, None,
'Old Queens Fort', 'Old Queens Fort',
'Crane'] 'Crane',
'Great House',
None,
'Old Queens Fort',
'Crane',
'Caribee']
timeLabels= ['Before Construction', timeLabels= ['Before Construction',
'Before Construction', 'Before Construction',
@ -146,7 +156,12 @@ timeLabels= ['Before Construction',
'November', 'November',
None, None,
'November', 'November',
'November'] 'November',
'February',
None,
'February',
'February',
'February']
wave_bts_file = [ wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts', 'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
@ -182,13 +197,18 @@ wave_bts_file = [
None, None,
None, None,
None, None,
None,
None,
None,
None,
None,
None] None]
# %% Read in site shapefile # %% Read in site shapefile
siteShp = gp.read_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/SitePolygons_Crane.shp') siteShp = gp.read_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/SitePolygons_Crane_Caribee.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621") siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
for s in [33]: #range(15, 27): for s in [34, 36, 37, 38]: #range(15, 27):
## Define master import path ## Define master import path
importPath = importPaths[s] importPath = importPaths[s]
siteName = siteNames[s] siteName = siteNames[s]
@ -217,7 +237,7 @@ for s in [33]: #range(15, 27):
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()] Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST # Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
if s < 15 or s == 30 or s == 32 or s == 33: if s < 15 or s >= 30:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC') Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
else: else:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC') Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
@ -233,7 +253,7 @@ for s in [33]: #range(15, 27):
#convert GPS data to geodataframe #convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326")) GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
if s == 30 or s == 32 or s == 33: if s >= 30:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date1'].astype(str) + ' ' + GPS_gdf['Time1'].astype(str)) GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date1'].astype(str) + ' ' + GPS_gdf['Time1'].astype(str))
else: else:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str)) GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
@ -295,7 +315,12 @@ for s in [33]: #range(15, 27):
GFS_Lon = -59.442 GFS_Lon = -59.442
GFS_Lat = 13.1075 GFS_Lat = 13.1075
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[3, 1]) Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[3, 1])
elif siteName == 'Caribee':
axXlim = (217929, 218345)
axYlim = (1446528, 1446818)
GFS_Lon = -59.442
GFS_Lat = 13.1075
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[4, 1])
# Set min and max times using conductivity # Set min and max times using conductivity
if Obs_geo['inArea'].any(): if Obs_geo['inArea'].any():
@ -457,8 +482,8 @@ for s in [33]: #range(15, 27):
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae] parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0] # paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0] # paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
paramMin = [0.0, 32.0, 100, 26.0, 0, 0] paramMin = [0.0, 32.0, 100, 24.0, 0, 0]
paramMax = [6, 36.0, 130, 34.0, 150.0, 12000] paramMax = [6, 36.5, 130, 34.0, 150.0, 12000]
fig.patch.set_facecolor('white') fig.patch.set_facecolor('white')
@ -496,8 +521,9 @@ for s in [33]: #range(15, 27):
# Find the local maximums for Turbidity # Find the local maximums for Turbidity
if param == 'CH6:Turbidity(NTU)': if param == 'CH6:Turbidity(NTU)':
##### Adjust "order" to control the number of turbidity points that are selected
ilocs_max.append(argrelextrema(OBS_smoothed.values, ilocs_max.append(argrelextrema(OBS_smoothed.values,
np.greater_equal, order=8, mode='wrap')[0]) np.greater_equal, order=12, mode='wrap')[0])
# Add start and end points? # Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10) # ilocs_max = np.insert(ilocs_max, 0, 10)
@ -676,7 +702,11 @@ plotIDXsLoop = []
for i in range(0, 1): for i in range(0, 1):
summTable = None summTable = None
# plotIDXs = plotIDXsLoop[i] # plotIDXs = plotIDXsLoop[i]
plotIDXs = np.arange(i, 25, 3) # plotIDXs = np.arange(i, 25, 3)
#plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24, 27, 30, 34] # Great House
# plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32, 36] # Old Queen's Fort
plotIDXs = [1, 4, 7, 10, 13, 16, 19] # Greensleevs
for s, plotIDX in enumerate(plotIDXs): for s, plotIDX in enumerate(plotIDXs):
## Define master import path ## Define master import path
@ -712,7 +742,9 @@ for i in range(2, 3):
summTable = None summTable = None
# plotIDXs = np.arange(i, 27, 3) # plotIDXs = np.arange(i, 27, 3)
#plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32] #plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32]
plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24, 27, 30] # plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32, 36] # Old Queen's Fort
plotIDXs = [1, 4, 7, 10, 13, 16, 19] # Greensleevs
plotDates = [] plotDates = []
plotTable = np.empty([10, len(plotIDXs)]) plotTable = np.empty([10, len(plotIDXs)])

View File

@ -1 +1 @@
EFDC_compare.ipynb NTC_PlottingD3D_2023.py

View File

@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4"> <module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager"> <component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" /> <content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.8 (D3DFM)" jdkType="Python SDK" /> <orderEntry type="jdk" jdkName="Python 3.9 (dfm_tools_23)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
<component name="PyDocumentationSettings"> <component name="PyDocumentationSettings">

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (D3DFM)" project-jdk-type="Python SDK" /> <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (dfm_tools_23)" project-jdk-type="Python SDK" />
</project> </project>

View File

@ -12,16 +12,21 @@ import matplotlib.pyplot as plt
import matplotlib.dates as mdates import matplotlib.dates as mdates
import numpy as np import numpy as np
import geopandas as gp import geopandas as gp
gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw' # gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
import scipy as sp import scipy as sp
import scipy.ndimage import scipy.ndimage
#from astropy.convolution import Gaussian2DKernel #from astropy.convolution import Gaussian2DKernel
import contextily as ctx import contextily as ctx
import os import os
from shapely.geometry import Point from shapely.geometry import Point, MultiPoint
from shapely.ops import nearest_points
import datetime as dt
import pytz
import pickle import pickle
# %% Read in data # %% Read in data
dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/' dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/'
@ -134,67 +139,71 @@ gdf.loc[:, df.columns != 'geometry'].to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/Transects.nc') 'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/Transects.nc')
# %% Load in moored data # %% Load in moored data
df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx', gdf_moored = dict()
sheet_name='mag_all_moored')
# Shift col names to put second back in # Loop through directions
colIN = list(df_moored_data.columns) for direction in ['u_all_moored', 'u_all_moored', 'mag_all_moored' , 'dir_all_moored']:
colOUT = colIN[0:6] + ['second'] + colIN[6:-1] df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
df_moored_data.columns = colOUT sheet_name=direction)
### Moored current meter locations from here: # Shift col names to put second back in
### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf colIN = list(df_moored_data.columns)
### Table 3-1, PDF page 65 colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
### Locations change between deployment 1-9 and 10/11 df_moored_data.columns = colOUT
### YSI from 2021 FRMR report Pg 385 ### Moored current meter locations from here:
### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
### Table 3-1, PDF page 65
### Locations change between deployment 1-9 and 10/11
df_moored = pd.DataFrame( ### YSI from 2021 FRMR report Pg 385
{'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
gdf_moored_loc = gp.GeoDataFrame(
df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
# Loop through Station IDs for deployments 1-9 and assign locations df_moored = pd.DataFrame(
# for d in range(1,10): {'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
# depMask = df_moored_data['deployment'] == ('dep' + str(d)) 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
# for stationIDX, station in enumerate(df_moored['depOrig']): 'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
# stationMask = (df_moored_data['station'] == station) & depMask 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
# 'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
# # Assign geography to station plus deployment 208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
# df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing'] 'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
# df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting'] 996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
gdf_moored_loc = gp.GeoDataFrame(
df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
# Loop through Station IDs for deployments 10-11 and assign locations # Loop through Station IDs for deployments 1-9 and assign locations
for d in range(1,12): for d in range(1, 11):
depMask = df_moored_data['deployment'] == 'dep' + str(d) depMask = df_moored_data['deployment'] == ('dep' + str(d))
for stationIDX, station in enumerate(df_moored['depCurr']): for stationIDX, station in enumerate(df_moored['depOrig']):
stationMask = (df_moored_data['station'] == station) & depMask stationMask = (df_moored_data['station'] == station) & depMask
# Assign geography to station plus deployment # Assign geography to station plus deployment
df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing'] df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting'] df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Create geodataframe and convert to USSP # Loop through Station IDs for deployments 10-11 and assign locations
gdf_moored = gp.GeoDataFrame( for d in range(10,12):
df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263") depMask = df_moored_data['deployment'] == 'dep' + str(d)
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (df_moored_data['station'] == station) & depMask
#convert data to CRS of D3D # Assign geography to station plus deployment
gdf_moored.geometry = gdf_moored.geometry.to_crs("EPSG:32118") df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
gdf_moored['date'] = pd.to_datetime(gdf_moored['year'].astype(str) + '-' + # Create geodataframe and convert to USSP
gdf_moored['month'].astype(str).str.zfill(2) + '-' + gdf_moored[direction] = gp.GeoDataFrame(
gdf_moored['day'].astype(str).str.zfill(2) + ' ' + df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
gdf_moored['hour'].astype(str).str.zfill(2) + ':' +
gdf_moored['minute'].astype(str).str.zfill(2)) #convert data to CRS of D3D
gdf_moored[direction].geometry = gdf_moored[direction].geometry.to_crs("EPSG:32118")
gdf_moored[direction]['date'] = pd.to_datetime(gdf_moored[direction]['year'].astype(str) + '-' +
gdf_moored[direction]['month'].astype(str).str.zfill(2) + '-' +
gdf_moored[direction]['day'].astype(str).str.zfill(2) + ' ' +
gdf_moored[direction]['hour'].astype(str).str.zfill(2) + ':' +
gdf_moored[direction]['minute'].astype(str).str.zfill(2))
# %% ADCP1 # %% ADCP1
@ -214,7 +223,7 @@ fig, axes = plt.subplots(nrows=5, ncols=1, figsize=(16, 8))
fig.tight_layout(pad=2) fig.tight_layout(pad=2)
# Loop through Station IDs for deployments 1-9 and assign locations # Loop through Station IDs for deployments 1-9 and assign locations
for d in range(1,12): for d in range(11):
depMask = gdf_moored['deployment'] == ('dep' + str(d)) depMask = gdf_moored['deployment'] == ('dep' + str(d))
for stationIDX, station in enumerate(df_moored['depCurr']): for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (gdf_moored['station'] == station) & depMask stationMask = (gdf_moored['station'] == station) & depMask
@ -244,6 +253,112 @@ plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2012.png', # fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2012.png',
# bbox_inches='tight', dpi=300) # bbox_inches='tight', dpi=300)
#%% Read in report obs
reportObs = pd.read_csv('C:/Users/arey/files/Projects/Newtown/NTC_Obs_NC_086_Report.csv',
header=None, parse_dates=[0], index_col=0)
# Avergae duplicate dates
reportObs = reportObs.groupby(reportObs.index).mean()
#%% Read in report EFDC
reportEFDC = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/06_Models/02_EFDC/04_Model_Comparisons/NC086_DAV_Report_2016_EFDC.csv',
header=None, parse_dates=[0], index_col=0)
#%% Read in EFDC data
efdc7_df = pd.read_csv(
'//ott-athena.baird.com/D/11934.201_Newtown_Creek_TPP/EFDC_Results/RevisedEFDC/2012_Mon7/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
# Read in EFDC grid
efdc_grid_df = pd.read_csv('//ott-athena.baird.com/F/2019_FMRM_Deliverable/INPUT_FILES/GRID_FILES/NC_ER_lxly_20140129.inp',
delim_whitespace=True, skiprows=4,
names=['I', 'J', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc_grid_gdf = gp.GeoDataFrame(
efdc_grid_df, geometry=gp.points_from_xy(efdc_grid_df.X, efdc_grid_df.Y), crs="EPSG:32118")
efdc7_merged = pd.merge(efdc7_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc7_merged['date'] = pd.to_datetime([dt.datetime(2012, 7, 1, 00, 00, 00) +
dt.timedelta(hours=h) for h in efdc7_merged['ST_hr']])
efdc7_merged.set_index('date', inplace=True)
efdc7_merged.index = efdc7_merged.index.tz_localize(
pytz.timezone('EST')) #Convert to UTC
del efdc7_df
efdc_merged_gdf = gp.GeoDataFrame(
efdc7_merged, geometry=efdc7_merged['geometry'], crs="EPSG:32118")
del efdc7_merged
#%% Find nearest grid point to EFDC ADCP Station
station = 'NC086CM'
stationLoc = gdf_moored_loc.loc[0, 'geometry']
nearest_geoms = nearest_points(stationLoc, MultiPoint(efdc_grid_gdf.geometry))
station_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry']==nearest_geoms[1]]
#%% Plot single deplyment for comparision
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(12, 8), sharex=True)
station = 'NC086CM'
stationMask = (gdf_moored['mag_all_moored']['station'] == station)
# axes.plot(gdf_moored.loc[stationMask, 'date'],
# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
# Plot vector component in arbitrary direction
for plotDir in [250]:
ADCPtime = pd.to_datetime(gdf_moored['mag_all_moored'].loc[stationMask, 'date'].values).tz_localize(
pytz.timezone('EST'))
axes[0].plot(ADCPtime, gdf_moored['mag_all_moored'].iloc[:, 8:50].loc[stationMask].mean(
axis=1).multiply(3.28084).multiply(
np.cos(np.deg2rad(gdf_moored['dir_all_moored'].iloc[:, 8:50].loc[
stationMask].mean(axis=1)) - np.deg2rad(plotDir))), label =
'ADCP Obs at ' + str(plotDir) + ' deg')
# axes.plot(gdf_moored.loc[stationMask, 'date'],
# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
# Plot EFDC
axes[1].plot(station_gdf.index,
np.sqrt(station_gdf.loc[:, ['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1)**2+
station_gdf.loc[:, ['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)**2)*3.28084 *
np.cos(np.arctan2(station_gdf.loc[:,
['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1),
station_gdf.loc[:,
['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)) - np.deg2rad(plotDir)),
'r', label='EFDC Model at ' + str(plotDir) + ' deg')
axes[1].plot(station_gdf.index,
np.sqrt(station_gdf.loc[:, ['U_VEL5']].mean(axis=1)**2+
station_gdf.loc[:, ['V_VEL5']].mean(axis=1)**2)*3.28084 *
np.cos(np.arctan2(station_gdf.loc[:,
['U_VEL5']].mean(axis=1),
station_gdf.loc[:,
['V_VEL5']].mean(axis=1)) - np.deg2rad(plotDir)),
label='EFDC Model Layer 5 at ' + str(plotDir) + ' deg')
# Plot report obs
axes[0].plot(reportObs.index.tz_localize(
pytz.timezone('EST')), reportObs[1], 'k', label='Report Obs')
# plot report efdc
axes[1].plot(reportEFDC.index.tz_localize(
pytz.timezone('EST')) - dt.timedelta(hours=0), reportEFDC[1], 'k', label='Report EFDC')
# Add legend
axes[0].legend()
axes[1].legend()
axes[1].set_xlim([dt.datetime(2012, 7, 17, 12, 0, 0), dt.datetime(2012, 7, 24, 12, 0, 0)])
axes[0].set_ylim([-1, 1])
axes[1].set_ylim([-1, 1])
plt.show()
# %% Load in ADCP2 data # %% Load in ADCP2 data

View File

@ -0,0 +1,887 @@
# -*- coding: utf-8 -*-
"""
@author: aforsythe
Copied from "P:/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/ADCP_Plot_v4.py"
_AJMR
"""
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import geopandas as gp
# gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
import scipy as sp
import scipy.ndimage
#from astropy.convolution import Gaussian2DKernel
import contextily as ctx
import os
from shapely.geometry import Point, MultiPoint
from shapely.ops import nearest_points
import datetime as dt
import pytz
import pickle
# %% Read in data
dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/'
df = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_mobile_2013_05_02.xlsx',
sheet_name='mag_all_mobile')
# Convert to geodataframe
gdf = gp.GeoDataFrame(df, geometry=gp.points_from_xy(df.Longitude, df.Latitude, crs="EPSG:4326"))
#convert data to CRS of D3D
gdf.geometry = gdf.geometry.to_crs("EPSG:32118")
# Add new cols to geodataframe
gdf['Distance'] = np.zeros([len(gdf), 1])
gdf['DistanceSmth'] = np.zeros([len(gdf), 1])
gdf['dist'] = np.zeros([len(gdf), 1])
gdf['date'] = np.zeros([len(gdf), 1])
# Set Date
gdf['date'] = gdf['year'].astype(str) + '-' + gdf['mon'].astype(str).str.zfill(2) + '-' + gdf['day'].astype(
str).str.zfill(2)
# Column names to distances data for plotting
depths1 = gdf.columns[11:43] # ADCP sensor depth reading column names to list
depths = np.array([float(d.replace('m', '')) for d in depths1]) * -1
#loop through all transects
transectCount = 0
transects = gdf['transect'].unique()
kernel = Gaussian2DKernel(x_stddev=0.25)
for transect_id in range(1, 2): #transects:
# Select a given trasect
tMask = (gdf['transect']==transect_id)
# Remove rows without locations
tMask[pd.isna(gdf['Latitude'])] = False
# Find distance betwen points in m
gdf.loc[tMask, 'Distance'] = gdf[tMask].distance(gdf[tMask].shift())
# Many of the points are recorded as being at the same location...
# Where the distance is zero, set it to half of the following distance
# Find zeros and iloc after zeros
zeroDist = (gdf['Distance'] == 0) & tMask
zeroDistShift = zeroDist.shift()
zeroDistShift.iloc[0] = False
distShift = gdf['Distance'].shift(-1)
distShift.iloc[-1] = False
gdf.loc[tMask, 'DistanceSmth'] = gdf['Distance'][tMask]
# Set zeros to half of following
gdf.loc[zeroDist, 'DistanceSmth'] = distShift[zeroDist]/2
# Set following to half
gdf.loc[zeroDistShift, 'DistanceSmth'] = gdf['Distance']/2
# Set initial to zero
gdf.loc[gdf[tMask].index[0], 'DistanceSmth'] = 0
# If final location is duplicate, set to previous value
if gdf.loc[gdf[tMask].index[-1], 'Distance']==0:
gdf.loc[gdf[tMask].index[-1], 'DistanceSmth'] = gdf.loc[gdf[tMask].index[-2], 'DistanceSmth']
# Get cumulative sum of distances
gdf.loc[tMask, 'dist'] = gdf.loc[tMask, 'DistanceSmth'].cumsum()
# get velocity data all rows columns 11-43
V = gdf.values[tMask, 11:43].astype(float)
# #__________________________________________________________________________________________________________________________
# %% Plotting
if transectCount == 0:
fig, axes = plt.subplots(nrows=5, ncols=4, figsize=(16, 8))
fig.tight_layout(pad=2.5)
ax = axes.flat
colormap = 'jet'
vmin=0
vmax=0.5
VS = sp.ndimage.filters.gaussian_filter(V, [1, 1], mode='nearest')
# vDatEnd = (~np.isnan(V)).cumsum(1).argmax(1).astype(int) + 1
# VS = convolve(V, kernel)
# for i in range(0, len(VS)):
# VS[i, vDatEnd[i]:-1] = np.nan
pltDat = ax[transectCount].pcolormesh(gdf.loc[tMask, 'dist'], depths, np.transpose(VS),
shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
cbar = fig.colorbar(pltDat, ax=ax[transectCount],
shrink=0.95,aspect=5)
cbar.set_label('Magnitude [m/s]')
ax[transectCount].set_xlabel('Distance Along Transect [m]')
ax[transectCount].set_ylabel('Depth below WSL [m]')
stationStart = next((i for i, j in enumerate(tMask) if j), None)
ax[transectCount].set_title(gdf.loc[stationStart, 'station'])
ax[transectCount].set_ylim(-6, 0)
transectCount = transectCount + 1
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Transects221-241.png',
# bbox_inches='tight', dpi=300)
# %% Save Transects
gdf.loc[:, df.columns != 'geometry'].to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/Transects.nc')
# %% Load in moored data
gdf_moored = dict()
# Loop through directions
for direction in ['u_all_moored', 'u_all_moored', 'mag_all_moored' , 'dir_all_moored']:
df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
sheet_name=direction)
# Shift col names to put second back in
colIN = list(df_moored_data.columns)
colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
df_moored_data.columns = colOUT
### Moored current meter locations from here:
### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
### Table 3-1, PDF page 65
### Locations change between deployment 1-9 and 10/11
### YSI from 2021 FRMR report Pg 385
df_moored = pd.DataFrame(
{'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
gdf_moored_loc = gp.GeoDataFrame(
df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
# Loop through Station IDs for deployments 1-9 and assign locations
for d in range(1, 11):
depMask = df_moored_data['deployment'] == ('dep' + str(d))
for stationIDX, station in enumerate(df_moored['depOrig']):
stationMask = (df_moored_data['station'] == station) & depMask
# Assign geography to station plus deployment
df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Loop through Station IDs for deployments 10-11 and assign locations
for d in range(10,12):
depMask = df_moored_data['deployment'] == 'dep' + str(d)
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (df_moored_data['station'] == station) & depMask
# Assign geography to station plus deployment
df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Create geodataframe and convert to USSP
gdf_moored[direction] = gp.GeoDataFrame(
df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
#convert data to CRS of D3D
gdf_moored[direction].geometry = gdf_moored[direction].geometry.to_crs("EPSG:32118")
gdf_moored[direction]['date'] = pd.to_datetime(gdf_moored[direction]['year'].astype(str) + '-' +
gdf_moored[direction]['month'].astype(str).str.zfill(2) + '-' +
gdf_moored[direction]['day'].astype(str).str.zfill(2) + ' ' +
gdf_moored[direction]['hour'].astype(str).str.zfill(2) + ':' +
gdf_moored[direction]['minute'].astype(str).str.zfill(2))
# %% ADCP1
gdf_moored.iloc[:, 1:-2].to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP1.nc')
# %% Plot moored data
# Column names to distances data for plotting
depths1 = gdf_moored.columns[8:51] # ADCP sensor depth reading column names to list
depths_moored = np.array([float(d.replace('m', '')) for d in depths1])
colormap = 'jet'
vmin = 0
vmax = 0.2
fig, axes = plt.subplots(nrows=5, ncols=1, figsize=(16, 8))
fig.tight_layout(pad=2)
# Loop through Station IDs for deployments 1-9 and assign locations
for d in range(11):
depMask = gdf_moored['deployment'] == ('dep' + str(d))
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (gdf_moored['station'] == station) & depMask
V = gdf_moored.values[stationMask, 8:51].astype(float)
if len(gdf_moored.loc[stationMask, 'date']) != 0:
pltDat = axes[stationIDX].pcolormesh(gdf_moored.loc[stationMask, 'date'],
depths_moored, np.transpose(V),
shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
axes[stationIDX].set_ylim(0, 7)
fmt_half_year = mdates.MonthLocator(interval=1)
axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
if d == 1:
axes[stationIDX].set_ylabel('Elevation [m]')
axes[stationIDX].set_title(station)
cbar = fig.colorbar(pltDat, ax=axes[stationIDX],
shrink=1.1, aspect=3)
cbar.set_label('Magnitude [m/s]')
pltDat.set_clim([0, 0.2])
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2012.png',
# bbox_inches='tight', dpi=300)
#%% Read in report obs
reportObs = pd.read_csv('C:/Users/arey/files/Projects/Newtown/NTC_Obs_NC_086_Report.csv',
header=None, parse_dates=[0], index_col=0)
# Avergae duplicate dates
reportObs = reportObs.groupby(reportObs.index).mean()
#%% Read in report EFDC
reportEFDC = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/06_Models/02_EFDC/04_Model_Comparisons/NC086_DAV_Report_2016_EFDC.csv',
header=None, parse_dates=[0], index_col=0)
#%% Read in EFDC data
efdc7_df = pd.read_csv(
'//ott-athena.baird.com/D/11934.201_Newtown_Creek_TPP/EFDC_Results/RevisedEFDC/2012_Mon7/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
# Read in EFDC grid
efdc_grid_df = pd.read_csv('//ott-athena.baird.com/F/2019_FMRM_Deliverable/INPUT_FILES/GRID_FILES/NC_ER_lxly_20140129.inp',
delim_whitespace=True, skiprows=4,
names=['I', 'J', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc_grid_gdf = gp.GeoDataFrame(
efdc_grid_df, geometry=gp.points_from_xy(efdc_grid_df.X, efdc_grid_df.Y), crs="EPSG:32118")
efdc7_merged = pd.merge(efdc7_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc7_merged['date'] = pd.to_datetime([dt.datetime(2012, 7, 1, 00, 00, 00) +
dt.timedelta(hours=h) for h in efdc7_merged['ST_hr']])
efdc7_merged.set_index('date', inplace=True)
efdc7_merged.index = efdc7_merged.index.tz_localize(
pytz.timezone('EST')) #Convert to UTC
del efdc7_df
efdc_merged_gdf = gp.GeoDataFrame(
efdc7_merged, geometry=efdc7_merged['geometry'], crs="EPSG:32118")
del efdc7_merged
#%% Find nearest grid point to EFDC ADCP Station
station = 'NC086CM'
stationLoc = gdf_moored_loc.loc[0, 'geometry']
nearest_geoms = nearest_points(stationLoc, MultiPoint(efdc_grid_gdf.geometry))
station_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry']==nearest_geoms[1]]
#%% Plot single deplyment for comparision
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(12, 8), sharex=True)
station = 'NC086CM'
stationMask = (gdf_moored['mag_all_moored']['station'] == station)
# axes.plot(gdf_moored.loc[stationMask, 'date'],
# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
# Plot vector component in arbitrary direction
for plotDir in [250]:
ADCPtime = pd.to_datetime(gdf_moored['mag_all_moored'].loc[stationMask, 'date'].values).tz_localize(
pytz.timezone('EST'))
axes[0].plot(ADCPtime, gdf_moored['mag_all_moored'].iloc[:, 8:50].loc[stationMask].mean(
axis=1).multiply(3.28084).multiply(
np.cos(np.deg2rad(gdf_moored['dir_all_moored'].iloc[:, 8:50].loc[
stationMask].mean(axis=1)) - np.deg2rad(plotDir))), label =
'ADCP Obs at ' + str(plotDir) + ' deg')
# axes.plot(gdf_moored.loc[stationMask, 'date'],
# gdf_moored.iloc[:, 8:50].loc[stationMask].mean(axis=1).multiply(3.28084), 'k')
# Plot EFDC
axes[1].plot(station_gdf.index,
np.sqrt(station_gdf.loc[:, ['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1)**2+
station_gdf.loc[:, ['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)**2)*3.28084 *
np.cos(np.arctan2(station_gdf.loc[:,
['U_VEL1', 'U_VEL5', 'U_VEL10']].mean(axis=1),
station_gdf.loc[:,
['V_VEL1', 'V_VEL5', 'V_VEL10 ']].mean(axis=1)) - np.deg2rad(plotDir)),
'r', label='EFDC Model at ' + str(plotDir) + ' deg')
axes[1].plot(station_gdf.index,
np.sqrt(station_gdf.loc[:, ['U_VEL5']].mean(axis=1)**2+
station_gdf.loc[:, ['V_VEL5']].mean(axis=1)**2)*3.28084 *
np.cos(np.arctan2(station_gdf.loc[:,
['U_VEL5']].mean(axis=1),
station_gdf.loc[:,
['V_VEL5']].mean(axis=1)) - np.deg2rad(plotDir)),
label='EFDC Model Layer 5 at ' + str(plotDir) + ' deg')
# Plot report obs
axes[0].plot(reportObs.index.tz_localize(
pytz.timezone('EST')), reportObs[1], 'k', label='Report Obs')
# plot report efdc
axes[1].plot(reportEFDC.index.tz_localize(
pytz.timezone('EST')) - dt.timedelta(hours=0), reportEFDC[1], 'k', label='Report EFDC')
# Add legend
axes[0].legend()
axes[1].legend()
axes[1].set_xlim([dt.datetime(2012, 7, 17, 12, 0, 0), dt.datetime(2012, 7, 24, 12, 0, 0)])
axes[0].set_ylim([-1, 1])
axes[1].set_ylim([-1, 1])
plt.show()
# %% Load in ADCP2 data
# Read in locations
df_adcp2_locs = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NCP2_ADCP_D1-D3/AQ_LocationsSO_ADCP_ADV_overview_Coords_20150126_AJMR.xlsx',
sheet_name='ADCP')
gdf_adcp2_locs = gp.GeoDataFrame(df_adcp2_locs,
geometry=gp.points_from_xy(df_adcp2_locs.X_NYSPLI, df_adcp2_locs.Y_NYSPLI), crs="EPSG:2263")
gdf_adcp2_locs = gdf_adcp2_locs.to_crs("EPSG:32118")
adcp2_data_path = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/'
adcp2_paths = ['NCP2_ADCP_D1-D3/ADCP_D1_070314_090814', 'NCP2_ADCP_D1-D3/ADCP_D2_090914_110214',
'NCP2_ADCP_D1-D3/ADCP_D3_110414_010615', 'NCP2_ADCP_D4-D5/D4_010715_030315',
'NCP2_ADCP_D4-D5/D5_030415_050515']
adcp2_gdfs = dict()
for depIDX, adcp2_path in enumerate(adcp2_paths):
adcp2_files = os.listdir(adcp2_data_path + adcp2_path) # returns list of files in adv folder
for adcp2_file in adcp2_files:
if '.xls' in adcp2_file or '.csv' in adcp2_file:
if '.xls' in adcp2_file:
df_in = pd.read_excel(adcp2_data_path + adcp2_path + '/' + adcp2_file)
else:
df_in = pd.read_csv(adcp2_data_path + adcp2_path + '/' + adcp2_file)
for stationIDX, station in enumerate(df_adcp2_locs['parent_loc_code']):
advStrIDX = adcp2_file.find('_')+1
if adcp2_file[advStrIDX:advStrIDX+3] in station:
adcp2_geo_x = np.ones([len(df_in), 1]) * df_adcp2_locs.X_NYSPLI[stationIDX]
adcp2_geo_y = np.ones([len(df_in), 1]) * df_adcp2_locs.Y_NYSPLI[stationIDX]
df_in['date'] = pd.to_datetime(df_in['Year'].astype(str) + '-' + df_in['Month'].astype(
str).str.zfill(2) + '-' + df_in['Day'].astype(
str).str.zfill(2) + ' ' + df_in['Hour'].astype(
str).str.zfill(2) + ':' + df_in['Minute'].astype(
str).str.zfill(2) + ':' + df_in['Second'].astype(str).str.zfill(2))
gdf_in = gp.GeoDataFrame(
df_in, geometry=gp.points_from_xy(adcp2_geo_x, adcp2_geo_y), crs="EPSG:2263")
if adcp2_file[advStrIDX:advStrIDX + 3] not in adcp2_gdfs:
adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]] = dict()
if adcp2_file[0:advStrIDX-1] not in adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]]:
adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]] = dict()
if depIDX+1 not in adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]]:
adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]][depIDX+1] = gdf_in
# %% ADCP2
for stationIDX, stat in enumerate(adcp2_gdfs):
for varIDX, var in enumerate(adcp2_gdfs[stat]):
for depIDX, depdat in enumerate(adcp2_gdfs[stat][var]):
ncDat = adcp2_gdfs[stat][var][depdat].iloc[:, 1:-2]
ncDat.to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP2/' +
stat + '_' + var + '_d' + str(depdat) + '.nc')
gdf_adcp2_locs.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP2/ADCP2locations.csv')
# %% Plot ADCP2 Data
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(9, 8))
fig.tight_layout(pad=2)
colormap = 'jet'
vmin = 0
vmax = 0.2
for stationIDX, stat in enumerate(adcp2_gdfs):
for depIDX, depdat in enumerate(adcp2_gdfs[stat]['mag']):
depths1 = adcp2_gdfs[stat]['mag'][depdat].columns[6:-2] # ADCP sensor depth reading column names to list
depths_moored = np.array([float(d.replace('m', '')) for d in depths1])
V = adcp2_gdfs[stat]['mag'][depdat].values[:, 6:-2].astype(float)
pltDat = axes[stationIDX].pcolormesh(adcp2_gdfs[stat]['mag'][depdat].loc[:, 'date'],
depths_moored, np.transpose(V),
shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
axes[stationIDX].set_ylim(0, 7)
axes[stationIDX].set_xlim(pd.to_datetime("2014-07-01"), pd.to_datetime('2015-05-15'))
#axes[stationIDX, depIDX].format_xdata = mdates.DateFormatter('%Y-%m')
fmt_half_year = mdates.MonthLocator(interval=1)
axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
axes[stationIDX].set_title(str(stat))
axes[stationIDX].set_ylabel('Elevation [m]')
cbar = fig.colorbar(pltDat, ax=axes[stationIDX],
shrink=1.1, aspect=3)
cbar.set_label('Magnitude [m/s]')
pltDat.set_clim([0, 0.2])
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2014.png',
# bbox_inches='tight', dpi=300)
# %% Load in Water Level Data
# Gauge Locations from map
gdf_gaugeLoc = gp.read_file('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/NTC6.kml')
gdf_gaugeLocUSSP = gdf_gaugeLoc.to_crs("EPSG:32118")
# All in Feet NAVD88
df_wl_FFG = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
sheet_name='Field_Facility_Gauge')
gdf_wl_FFG = gp.GeoDataFrame(df_wl_FFG,
geometry=gp.points_from_xy(np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].x[2],
np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].y[2]), crs="EPSG:4326")
gdf_wl_FFG.geometry = gdf_wl_FFG.geometry.to_crs("EPSG:32118")
df_wl_NGG1 = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
sheet_name='National_Grid_Gauge')
gdf_wl_NGG1 = gp.GeoDataFrame(df_wl_NGG1,
geometry=gp.points_from_xy(np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].x[3],
np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
gdf_wl_NGG1.geometry = gdf_wl_NGG1.geometry.to_crs("EPSG:32118")
# Also includes temperature
df_wl_NGG2 = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP2_NG_TideGauge_20160113/NCP2_NG_TideGauge_20160113.xlsx',
sheet_name='Sheet1')
gdf_wl_NGG2 = gp.GeoDataFrame(df_wl_NGG2,
geometry=gp.points_from_xy(np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].x[3],
np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
gdf_wl_NGG2.geometry = gdf_wl_NGG2.geometry.to_crs("EPSG:32118")
gdf_wl_FFG.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv')
gdf_wl_NGG1.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv')
gdf_wl_NGG2.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv')
# %% Plot Water Level Data
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 4))
axes.plot(gdf_wl_FFG.Date_time,
gdf_wl_FFG['Water Surface Elevation (ft)']*0.3048,
label='Field Facility Gauge')
axes.plot(gdf_wl_NGG1.Date_time,
gdf_wl_NGG1['Water Surface Elevation (ft)']*0.3048,
label='National Grid Gauge Deployment 1')
axes.plot(gdf_wl_NGG2.datetime,
gdf_wl_NGG2['water_surface_elevation']*0.3048,
label='National Grid Gauge Deployment 2')
fmt_half_year = mdates.MonthLocator(interval=6)
axes.xaxis.set_major_locator(fmt_half_year)
axes.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
axes.set_ylabel('Water Surface Elevation [mNAVD88]')
axes.set_title('Water Surface Elevation')
axes.legend()
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/WaterLevel.png',
# bbox_inches='tight', dpi=300)
# %% Load temperature data
df_tdat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/10_Salinity/tData.xlsx',
skiprows=[1])
# Day Month order is reversed
df_tdat['date'] = pd.to_datetime(df_tdat['CollectionDate'])
df_tsample = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/10_Salinity/tSampleLocation.csv')
# Create geodataframe and convert to USSP
gdf_tsample = gp.GeoDataFrame(
df_tsample, geometry=gp.points_from_xy(df_tsample.EastCoordinate, df_tsample.NorthCoordinate), crs="EPSG:2263")
#convert data to CRS of D3D
gdf_tsample.geometry = gdf_tsample.geometry.to_crs("EPSG:32118")
tdat_geo = list()
tdat_facility = list()
for i in range(0, len(df_tdat)):
#sampleID = df_tdat['LocationID'][i][0:df_tdat['LocationID'][i].find('_')]
#geoFind = df_tsample.loc[df_tsample['ParentLocationID'] == sampleID]
geoFind = df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].geometry.values
if len(geoFind) !=0:
tdat_geo.append(df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].geometry.values[0])
tdat_facility.append(df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].SourceArea.values[0])
else:
tdat_geo.append(Point())
tdat_facility.append('')
# Create geodataframe
gdf_tdat = gp.GeoDataFrame(df_tdat, geometry=tdat_geo, crs="EPSG:32118")
gdf_tdat.loc[:, 'SourceArea'] = tdat_facility
gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='ft', 'DepthM'] = gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='ft', 'BeginDepth']*0.3048
gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='cm', 'DepthM'] = gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='cm', 'BeginDepth']*0.01
# Import spring observations
df_springDat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/11_Temperature/NC_Spr2012_Benthic_Water_Quality_FieldData&Observations_20121022.xlsx')
# Create geodataframe and convert to USSP
gdf_springDat = gp.GeoDataFrame(
df_springDat, geometry=gp.points_from_xy(df_springDat.x_coord_as_numeric, df_springDat.y_coord_as_numeric), crs="EPSG:2263")
gdf_springDat.geometry = gdf_springDat.geometry.to_crs("EPSG:32118")
# Import Summer observations
df_summerDat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/11_Temperature/NC_Sum2012_Benthic_Water_Quality_FieldData&Observations_20130205.xlsx')
# Create geodataframe and convert to USSP
gdf_summerDat = gp.GeoDataFrame(
df_summerDat, geometry=gp.points_from_xy(df_summerDat.x_coord_as_numeric, df_summerDat.y_coord_as_numeric), crs="EPSG:2263")
gdf_summerDat.geometry = gdf_summerDat.geometry.to_crs("EPSG:32118")
# Merged
df_SpringSummerDat = pd.concat([df_springDat, gdf_summerDat], ignore_index=True)
gdf_SpringSummerDat = gp.GeoDataFrame(
df_SpringSummerDat, geometry=gp.points_from_xy(df_SpringSummerDat.x_coord_as_numeric, df_SpringSummerDat.y_coord_as_numeric), crs="EPSG:2263")
gdf_SpringSummerDat.geometry = gdf_SpringSummerDat.geometry.to_crs("EPSG:32118")
# %% Save temperature and salinity data
gdf_SpringSummerDat.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/TempSal/Spring_Summer.csv')
# %% Plot Salinity Time series
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 8))
plotMaskStation = (gdf_tdat.SourceArea == 'Newtown Creek') | (gdf_tdat.SourceArea == 'East Branch of Newtown Creek')
plotMaskWater = (gdf_tdat.SampleMedium == 'Surface Water')
plotMask = plotMaskStation & plotMaskWater
pltDat = axes.scatter(gdf_tdat.loc[plotMask, 'date'],
gdf_tdat.loc[plotMask, 'DepthM'], 10,
gdf_tdat.loc[plotMask, 'NumericResult'])
axes.set_ylim(10, 0)
axes.set_ylabel('Depth below water surface [m]')
axes.set_title('Surface Water Salinity Samples')
axes.set_xlabel('Date')
fmt_half_year = mdates.MonthLocator(interval=12)
axes.xaxis.set_major_locator(fmt_half_year)
axes.xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
cbar = fig.colorbar(pltDat, ax=axes, shrink=0.95)
cbar.set_label('Salinity [PSU]')
pltDat.set_clim([5, 40])
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Salinity.png',
# bbox_inches='tight', dpi=300)
# %% Plot Temperature Time series
## Additional salinity obs are here!
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(6, 8))
fig.tight_layout(pad=2)
for i, miles in enumerate(np.arange(0, 3, 0.5)):
plotMaskDistance = (gdf_SpringSummerDat.miles_from_NC_mouth > miles) & (
gdf_SpringSummerDat.miles_from_NC_mouth < miles + 0.5)
plotMaskStation = (gdf_SpringSummerDat.subfacility_code == 'Newtown Creek')
plotMaskVar = (gdf_SpringSummerDat.chemical_name == 'Temperature (field)')# Temperature (field)'
plotMask = plotMaskDistance & plotMaskStation & plotMaskVar
pltDat = axes[i].scatter(gdf_SpringSummerDat.loc[plotMask, 'location_start_date'],
gdf_SpringSummerDat.loc[plotMask, 'elev']*0.3048, 10,
gdf_SpringSummerDat.loc[plotMask, 'result_value'])
axes[i].set_ylim(-10, 0)
axes[i].set_ylabel('Elevation [m]')
cbar = fig.colorbar(pltDat, ax=axes[i], shrink=0.95)
cbar.set_label('Temp [degC]')
pltDat.set_clim([5, 30])
fmt_half_year = mdates.MonthLocator(interval=1)
axes[i].xaxis.set_major_locator(fmt_half_year)
axes[i].xaxis.set_major_formatter(mdates.DateFormatter('%m-%Y'))
axes[0].set_title('Surface Water Temperature Samples')
axes[i].set_xlabel('Date')
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Temperature.png',
# bbox_inches='tight', dpi=300)
# %% Load in ADV data
# Read in locations
df_adv_locs = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/Velocity_Data_Compiled/Phase2/Locations/AQ_LocationsSO_ADCP_ADV_overview_Coords_20150126_AJMR.xlsx',
sheet_name='ADV2')
gdf_adv_locs = gp.GeoDataFrame(df_adv_locs,
geometry=gp.points_from_xy(df_adv_locs.X_NYSPLI, df_adv_locs.Y_NYSPLI), crs="EPSG:2263")
gdf_adv_locs = gdf_adv_locs.to_crs("EPSG:32118")
adv_data_path = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/Velocity_Data_Compiled/Phase2/Raw'
adv_paths = ['NCP2_ADV_D1-D2/ADV_D1_070914_080214', 'NCP2_ADV_D1-D2/ADV_D2_080214_090814',
'NCP2_ADV_D3-D4/ADV_D3_090914_100614', 'NCP2_ADV_D3-D4/ADV_D4_100714_110214',
'NCP2_ADV_D5-D6/ADV_D5_110414_120214', 'NCP2_ADV_D5-D6/ADV_D6_120414_010615']
adv_gdfs = dict()
adv_dfs = dict()
for depIDX, adv_path in enumerate(adv_paths):
adv_files = os.listdir(adv_data_path + '/' + adv_path) # returns list of files in adv folder
for adv_file in adv_files:
if '.txt' in adv_file and 'Readme' not in adv_file:
df_in = pd.read_csv(adv_data_path + '/' + adv_path + '/' + adv_file, delim_whitespace=True, skipinitialspace=True)
for stationIDX, station in enumerate(gdf_adv_locs['parent_loc_code']):
if adv_file[-9:-6] in station:
# adv_geo_x = np.ones([len(df_in), 1]) * df_adv_locs.X_NYSPLI[stationIDX]
# adv_geo_y = np.ones([len(df_in), 1]) * df_adv_locs.Y_NYSPLI[stationIDX]
df_in['date'] = pd.to_datetime(df_in['Year'].astype(str) + '-' + df_in['Month'].astype(
str).str.zfill(2) + '-' + df_in['Day'].astype(
str).str.zfill(2) + ' ' + df_in['Hour'].astype(
str).str.zfill(2) + ':' + df_in['Minute'].astype(
str).str.zfill(2) + ':' + df_in['Second'].astype(str).str.zfill(2))
df_in.set_index('date', inplace=True)
df_in.drop(columns=['Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'], inplace=True)
df_in.dropna(how='all', axis=1, inplace=True)
colName = df_in.columns
df_in[colName] = df_in[colName].astype('float32')
# Location
if adv_file[-9:-6] not in adv_gdfs:
adv_gdfs[adv_file[-9:-6]] = dict()
adv_dfs[adv_file[-9:-6]] = dict()
# D1-6
if adv_file[-6:-4] not in adv_gdfs[adv_file[-9:-6]]:
adv_gdfs[adv_file[-9:-6]][adv_file[-6:-4]] = gdf_adv_locs.iloc[stationIDX]
adv_dfs[adv_file[-9:-6]][adv_file[-6:-4]] = df_in
else:
adv_gdfs[adv_file[-9:-6]][adv_file[-6:-4]] = gdf_adv_locs.iloc[stationIDX]
adv_dfs[adv_file[-9:-6]][adv_file[-6:-4]].loc[:, colName] = df_in
print('ADV:' + adv_file[-9:-6] +
'; ' + adv_file[-6:-4] +
'; var:' + adv_file[3:6])
# with open('ADV.pickle', 'wb') as f:
# pickle.dump(adv_dfs, f)
# %% Save ADV to NetCDF
for stationIDX, stat in enumerate(adv_dfs):
for depIDX, depdat in enumerate(adv_dfs[stat]):
ncDat = adv_dfs[stat][depdat]
ncDat.columns = ncDat.columns.str.replace(r"[()]", "_")
ncDat.columns = ncDat.columns.str.replace(r"[/]", "_")
ncDat.to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADV/' + stat + '_' + depdat + '.nc')
gdf_adv_locs.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADV/ADVlocations.csv')
# %% Plot ADV Data
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(9, 8))
fig.tight_layout(pad=2)
for stationIDX, stat in enumerate(adv_dfs):
for depIDX, depdat in enumerate(adv_dfs[stat]):
# adv_dfs[stat][depdat]['vel'].iloc[::60, :].plot(ax=axes[stationIDX])
if 'Velocity' in adv_dfs[stat][depdat].columns:
plotingDat = adv_dfs[stat][depdat].loc[:, 'Velocity'].resample('1s').mean()
else:
plotingDat = adv_dfs[stat][depdat].loc[:, 'Velocity_m_s_'].resample('1s').mean()
axes[stationIDX].plot(plotingDat.index, plotingDat)
# axes[stationIDX].set_ylim(0, 7)
axes[stationIDX].set_xlim(pd.to_datetime("2014-07-01"), pd.to_datetime('2015-02-01'))
# #axes[stationIDX, depIDX].format_xdata = mdates.DateFormatter('%Y-%m')
fmt_half_year = mdates.MonthLocator(interval=1)
axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
axes[stationIDX].set_title(str(stat))
#
axes[stationIDX].set_ylabel('ADV Velocity [m/s]')
# cbar.set_label('Velocity Magnitude [m/s]')
# pltDat.set_clim([0, 0.2])
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADV_raw.png',
bbox_inches='tight', dpi=300)
# %% Plot Map
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
locTableNames = []
locTableX = []
locTableY = []
axes.set_xlim(303500, 306500)
axes.set_ylim(61000, 63750)
gdf.plot(ax=axes, markersize=10, color='blue', label='Mobile ADCP')
for x, y, label in zip(gdf.drop_duplicates(subset="station", keep='last').geometry.x,
gdf.drop_duplicates(subset="station", keep='last').geometry.y,
gdf.drop_duplicates(subset="station", keep='last').station):
axes.annotate(label, xy=(x, y), xytext=(20, 3), textcoords="offset points", color='blue',
bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
gdf_SpringSummerDat.plot(ax=axes, markersize=12, color='magenta', label='Temperature & Salinity')
for x, y, label in zip(gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').geometry.x,
gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').geometry.y,
gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').loc_name):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_moored_loc.plot(ax=axes, markersize=20, color='red', label='Moored ADCP 2012')
for x, y, label in zip(gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').geometry.x,
gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').geometry.y,
gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').depCurr):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_adcp2_locs.plot(ax=axes, markersize=20, color='orange', label='Moored ADCP 2014')
for x, y, label in zip(gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').geometry.x,
gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').geometry.y,
gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').parent_loc_code):
axes.annotate(label, xy=(x, y), xytext=(-65, 3), textcoords="offset points", color='orange',
bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_adv_locs.plot(ax=axes, markersize=20, color='green', label='Moored ADV 2014')
for x, y, label in zip(gdf_adv_locs.geometry.x,
gdf_adv_locs.geometry.y,
gdf_adv_locs.parent_loc_code):
axes.annotate(label, xy=(x, y), xytext=(-30, -30), textcoords="offset points", color='green',
bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_gaugeLocUSSP.loc[2:3, 'geometry'].plot(ax=axes, markersize=20, color='yellow', label='Water Level Gauge')
for x, y, label in zip(gdf_gaugeLocUSSP.loc[2:3, 'geometry'].x,
gdf_gaugeLocUSSP.loc[2:3, 'geometry'].y,
gdf_gaugeLocUSSP.loc[2:3, 'Name']):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
for x, y, label in zip(gdf.geometry.x,
gdf.geometry.y,
gdf.station + '_' + gdf.transect.astype(str) + gdf['min'].astype(str) + gdf.second.astype(str)):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
axes.set_xlabel('New York State Plane Easting [m]')
axes.set_ylabel('New York State Plane Northing [m]')
axes.legend()
# axes[1].set_xlim(303500, 306500)
# axes[1].set_ylim(61000, 63750)
# gdf_SpringSummerDat.plot(ax=axes[1], markersize=12, color='magenta', label='Temperature & Salinity')
# axes[1].set_xlabel('New York State Plane Easting [m]')
# axes[1].legend()
# ctx.add_basemap(axes[1], source=mapbox, crs='EPSG:32118')
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/DataMap_ADV.png',
# bbox_inches='tight', dpi=300)
# %% Import grid shapefile and find cells
delftGrid = gp.read_file('C:/Users/arey/files/Projects/Newtown/Topology data of 2D network.shp')
delftGrid = delftGrid.set_crs("EPSG:32118")
delftGrid['centroid'] = delftGrid.geometry.centroid
obsPts = gp.GeoDataFrame(locTableNames, geometry=gp.points_from_xy(locTableX, locTableY), crs="EPSG:32118")
joinPTS = gp.sjoin(obsPts, delftGrid, op='within')
uniqueJoinPTS = joinPTS.index_right.unique()
groupdObsLabels = joinPTS.groupby(by='index_right').agg({0:lambda x:list(x)})
uniqueDelftGrid = delftGrid.iloc[uniqueJoinPTS, :]
uniqueDelftGrid['Station Names'] = groupdObsLabels
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
axes.set_xlim(303500, 306500)
axes.set_ylim(61000, 63750)
delftGrid.plot(ax=axes, markersize=10, color='gray', label='Mobile ADCP')
delftGrid.loc[uniqueJoinPTS, 'geometry'].plot(ax=axes, markersize=10, color='blue', label='Mobile ADCP')
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
axes.set_xlabel('New York State Plane Easting [m]')
axes.set_ylabel('New York State Plane Northing [m]')
fig.show()
uniqueDelftGrid.to_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/06_Models/06_Delft3DFM/RectConnect2_Obs_PTS.xlsx')

427
NTC_DFM/NTC_SedTestPlot.py Normal file
View File

@ -0,0 +1,427 @@
"""
@author: AJMR
Plotting and animation script for NTC sediment tests
April 17, 2023
"""
import os
import pandas as pd
import geopandas as gpd
import netCDF4 as nc
import math
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from scipy.interpolate import LinearNDInterpolator, interp1d
import matplotlib.animation as animation
import dfm_tools as dfmt
import xarray as xr
import cartopy.crs as ccrs
import contextily as ctx
from dfm_tools.regulargrid import scatter_to_regulargrid
from pathlib import Path
# Colormaps
import cm_xml_to_matplotlib as cm
#%% Read Model Log
runLog = pd.read_excel('C:/Users/arey/files/Projects/Newtown/Model Log NTC.xlsx', 'ModelLog')
dataPath = "FlowFM/dflowfm/output/FlowFM_map.nc"
histPath = "FlowFM/dflowfm/output/FlowFM_his.nc"
#%% Import Model results for static images
# Define which models to import
modelPlot = [120, 121, 129, 130, 131, 132, 133, 134]
d3dfm_DataArray = [None] * (max(modelPlot)+1)
sedMass = [None] * (max(modelPlot)+1) # Available Mass of Sediment
erodeSed = [None] * (max(modelPlot)+1) # Erosion and deposition
initialBed = [None] * (max(modelPlot)+1) # Erosion and deposition
modelCRS = [None] * (max(modelPlot)+1) # Model CRS
for i in modelPlot:
file_nc_map = Path(runLog['Run Location'][i]) / dataPath
# Open Map file as xarrray Dataset
d3dfm_DataArray[i] = dfmt.open_partitioned_dataset(file_nc_map.as_posix())
# Get Var info
vars_pd = dfmt.get_ncvarproperties(d3dfm_DataArray[i])
# Get last timestep
tStep = d3dfm_DataArray[i].time[-1].values
# Import sand sediment class
sedMass[i] = d3dfm_DataArray[i].mesh2d_bodsed[-1, :, 2].compute()
initialBed[i]= d3dfm_DataArray[i].mesh2d_s1[0, :] - d3dfm_DataArray[i].mesh2d_waterdepth[0, :]
finalBed = d3dfm_DataArray[i].mesh2d_s1[-1, :] - d3dfm_DataArray[i].mesh2d_waterdepth[-1, :]
erodeSed[i] = finalBed - initialBed[i]
erodeSed[i] = erodeSed[i].compute()
initialBed[i] = initialBed[i].compute()
modelCRS[i] = vars_pd['EPSG_code']['projected_coordinate_system']
#%% Define axis limits
axLim_Xmin = []
axLim_Xmax = []
axLim_Ymin = []
axLim_Ymax = []
# Set axis limits
# Full domain
axLim_Xmin.append(298500)
axLim_Xmax.append(306800)
axLim_Ymin.append(58500)
axLim_Ymax.append(68000)
# Zoom North River
axLim_Xmin.append(303500)
axLim_Xmax.append(305500)
axLim_Ymin.append(66000)
axLim_Ymax.append(68000)
# Zoom Tribs
axLim_Xmin.append(305500)
axLim_Xmax.append(306800)
axLim_Ymin.append(60000)
axLim_Ymax.append(62200)
# Zoom far trib
axLim_Xmin.append(305800)
axLim_Xmax.append(305900)
axLim_Ymin.append(60150)
axLim_Ymax.append(60400)
#%% Plot using DFM functions
for i in modelPlot:
# Create figure
fig, axesFig = plt.subplots(1, 4, figsize=(8, 2))
axes = axesFig.flatten()
# Loop through axis and plot with different limits
for subIDX in range(0, 4):
# # Plot Sediment Mass
# smp = sedMass[i].ugrid.plot(ax=axes[subIDX], edgecolors='face', cmap='turbo',
# vmin=0, vmax=2000, add_colorbar=False)
# # Plot Erosion
# smp = erodeSed[i].ugrid.plot(ax=axes[subIDX], edgecolors='face', cmap='coolwarm',
# vmin=-1, vmax=1, add_colorbar=False)
# Plot Bed
smp = initialBed[i].ugrid.plot(ax=axes[subIDX], edgecolors='face', cmap='nipy_spectral',
vmin=-25, vmax=0, add_colorbar=False)
# Set axis labels
axes[subIDX].set_xlabel('')
axes[subIDX].set_ylabel('')
axes[subIDX].set_title('')
axes[subIDX].set_xticks([])
axes[subIDX].set_yticks([])
axes[subIDX].set_xlim(axLim_Xmin[subIDX], axLim_Xmax[subIDX])
axes[subIDX].set_ylim(axLim_Ymin[subIDX], axLim_Ymax[subIDX])
# Add basemap
# ctx.add_basemap(ax=axes, source=ctx.providers.Esri.WorldImagery, crs=modelCRS[i], attribution=False)
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes[subIDX], source=mapbox, crs=modelCRS[i])
# cbar = plt.colorbar(smp, ax=axesFig.ravel().tolist(), label='Sand Mass [kg/m2]')
# cbar = plt.colorbar(smp, ax=axesFig.ravel().tolist(), label='Erosion/ deposition [m]')
cbar = plt.colorbar(smp, ax=axesFig.ravel().tolist(), label='Initial Bed [mNAVD88]')
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs2/SedMass_' + runLog['Run'][i] + '.png',
# bbox_inches='tight', dpi=300)
# fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs2/ErodeDep_' + runLog['Run'][i] + '.png',
# bbox_inches='tight', dpi=300)
fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs2/InitialBed_' + runLog['Run'][i] + '.png',
bbox_inches='tight', dpi=300)
#%% Plot using DFM functions
#Cmap Path
cmap_path = 'C:/Users/arey/Repo/MATLAB_Q/Downloads/KeyColormaps/'
for i in modelPlot:
fig, axes = plt.subplots(nrows=1, ncols=1, subplot_kw={'projection': ccrs.epsg(32118)}, figsize=(6, 6))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3.0)
# Load cmaps
cmap = 'AJMR_Sed5_RevE.xml'
plotcmap = cm.make_cmap(cmap_path + cmap)
pc = plot_netmapdata(ugrid_all[i].verts, values=modelMaxShear[i][0, :],
ax=axes, linewidth=0.5, cmap=plotcmap)
axes.set_xlim(305900, 306400)
axes.set_ylim(61500, 62200)
# axes.quiver(modelX[i], modelX[i], U[i], V[i], color='w', scale=1.00)
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
extent = axes.get_extent()
axes.set_xticks(np.linspace(extent[0], extent[1], 4))
axes.set_yticks(np.linspace(extent[2], extent[3], 4))
pc.set_clim([0, 0.145])
cbarTicks = [0, 0.0378, 0.063, 0.0826, 0.11, 0.145]
cbar = fig.colorbar(pc, ax=axes, shrink=0.95,
ticks=cbarTicks)
cbar.set_label('Total Max Shear Stress [N/m^2]')
# Add label on colorbar
cbar.ax.text(0.08, (cbarTicks[0]+cbarTicks[1])/2, 'CLAY', ha='center', va='center',
rotation=90, fontweight='bold')
cbar.ax.text(0.08, (cbarTicks[1]+cbarTicks[2])/2, 'FSILT', ha='center', va='center',
rotation=90, fontweight='bold')
cbar.ax.text(0.08, (cbarTicks[2]+cbarTicks[3])/2, 'MSILT', ha='center', va='center',
rotation=90, fontweight='bold')
cbar.ax.text(0.08, (cbarTicks[3]+cbarTicks[4])/2, 'CSILT', ha='center', va='center',
rotation=90, fontweight='bold')
cbar.ax.text(0.08, (cbarTicks[4]+cbarTicks[5])/2, 'FSAND', ha='center', va='center',
rotation=90, fontweight='bold')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/SedFigs/Shear_Class_' + runLog['Run'][i] + '.png',
bbox_inches='tight', dpi=300)
#%% Import Model results for animations
# modelPlot = [20, 22]
# modelPlot = [18, 20, 21, 22]
# modelPlot = [18, 21]
modelPlot = [25, 26]
modelSedConc = [None] * (max(modelPlot)+1)
modelMaxShear_animate = [None] * (max(modelPlot)+1)
ugrid_all = [None] * (max(modelPlot)+1)
tSteps = [None] * (max(modelPlot)+1)
for i in modelPlot:
file_nc_map = Path(runLog['Run Location'][i]) / dataPath
# Get Var info
vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_map.as_posix())
# Import
tSteps[i] = get_timesfromnc(file_nc=file_nc_map.as_posix(), varname='time')
ugrid_all[i] = get_netdata(file_nc=file_nc_map.as_posix())
modelSedConc[i] = get_ncmodeldata(file_nc=file_nc_map.as_posix(), varname='mesh2d_sedfrac_concentration',
timestep='all', station=0, layer=0) #timestep='all' OR timestep=range(0, 30)
modelMaxShear_animate[i] = get_ncmodeldata(file_nc=file_nc_map.as_posix(), varname='mesh2d_tausmax',
timestep='all')
# %% Import water levels from hist
modelHistWL = [None] * (max(modelPlot)+1)
modelHistTime = [None] * (max(modelPlot)+1)
# tSteps = [None] * (max(modelPlot)+1)
for i in modelPlot:
file_nc_hist = Path(runLog['Run Location'][i]) / histPath
vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_hist.as_posix())
# Get station names
histStations = get_hisstationlist(file_nc=file_nc_hist.as_posix(), varname='waterlevel')
# Import
modelHistTime[i] = get_timesfromnc(file_nc=file_nc_hist.as_posix(), varname='time')
modelHistWL[i] = get_ncmodeldata(file_nc=file_nc_hist.as_posix(), varname='waterlevel',
timestep='all', station=1)
#%% Sediment animations
plt.rcParams['animation.ffmpeg_path'] = \
'C:/Users/arey/Local/ffmpeg-2022-02-14-git-59c647bcf3-full_build/bin/ffmpeg.exe'
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12
# fontprops = fm.FontProperties(size=12)
# Set model to plot
# modelPlot = [20, 22]
# modelPlot = [18, 20, 21, 22]
modelPlot = [25, 26]
cmap_path = 'C:/Users/arey/Repo/MATLAB_Q/Downloads/KeyColormaps/'
for m in modelPlot:
vmax = 0.75
vmin = 0
# vmax = 0.145
# vmin = 0
cbarTicks = [0, 0.0378, 0.063, 0.0826, 0.11, 0.145]
cmapName = 'turbo'
cmap = mpl.cm.turbo
# cmapName = 'AJMR_Sed5_RevE.xml'
# cmap = cm.make_cmap(cmap_path + cmapName)
# Zoom limits
# xLimits = [305900, 306500]
# yLimits = [61400, 62200]
# Wide Limits
xLimits = [302719.4, 306934.2]
yLimits = [60263.7, 64194.8]
# Setup Video
metadata = dict(title='NTC Sediment Animation', artist='Matplotlib',
comment='AJMR June 28, 2022')
writer = animation.FFMpegWriter(fps=2, metadata=metadata, codec='h264', bitrate=5000)
fig, axes = plt.subplots(figsize=(6, 6))
writer.setup(fig, '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/' +
'06_Models/04_Delft3D/Figures/SedConc/HQ3_Wide_RevC_Long_Sed_Cond_'
+ runLog['Run'][m] + '.mp4')
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
for i in np.arange(1, len(modelSedConc[m])-1, 1): #289
# Clear axes for video
axes.cla()
# Create new figure for stills
# fig, axes = plt.subplots(figsize=(6, 6))
# Clear inset WL Plot
# if i > 1:
# axin1.cla()
# Plot sediment
pc = plot_netmapdata(ugrid_all[m].verts, values=modelSedConc[m][i, 0, :, 0],
ax=axes, cmap=cmapName,
antialiaseds=False)
# Plot shear
# pc = plot_netmapdata(ugrid_all[m].verts, values=modelMaxShear_animate[m][i,:],
# ax=axes, cmap=cmap,
# antialiaseds=False)
# Set map limits
axes.set_xlim(xLimits[0], xLimits[1])
axes.set_ylim(yLimits[0], yLimits[1])
# Add basemap
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
# Set map ticks
axes.set_xticks(np.linspace(xLimits[0], xLimits[1], 4))
axes.set_yticks(np.linspace(yLimits[0], yLimits[1], 4))
# Color Limits
pc.set_clim([vmin, vmax])
# Add title
axes.title.set_text('Bottom Sediment Concentration at: ' + tSteps[m][i].strftime("%Y/%m/%d %H:%M"))
# axes.title.set_text('Total Max Shear Stress at: ' + str(tSteps[m][i]))
# Add inset wl plot
axin1 = axes.inset_axes([0.1, 0.1, 0.4, 0.15])
axin1.plot(modelHistTime[m], modelHistWL[m])
# Add current point
# Find closest time
abs_deltas_from_target_date = np.absolute(modelHistTime[m] - tSteps[m][i])
axin1.scatter(modelHistTime[m][np.argmin(abs_deltas_from_target_date)], modelHistWL[m][np.argmin(abs_deltas_from_target_date)], 10, 'r')
# axin1.set_xticks([modelHistTime[m][0], modelHistTime[m][len(modelHistTime[m])-1]])
axin1.set_xticks([])
axin1.set_yticks([])
plt.setp(axin1.get_xticklabels(), backgroundcolor="white")
plt.setp(axin1.get_yticklabels(), backgroundcolor="white")
if i == 1: #i < 1000
fig.subplots_adjust(right=0.8)
norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
# Create colorbar axis
# cax = plt.axes([0.82, 0.25, 0.04, 0.5])
cax = plt.axes([0.85, 0.15, 0.04, 0.7])
# Add colorbar with designated tick marks
# cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
# norm=norm,
# orientation='vertical',
# ticks=cbarTicks)
# Add colorbar without designated tick marks
cb1 = mpl.colorbar.ColorbarBase(cax, cmap=cmap,
norm=norm,
orientation='vertical')
# Colorbar lavel
cb1.set_label('Sediment Concentration [$kg/m^3$]')
# cb1.set_label('Total Max Shear Stress [N/m^2]')
# Add label on colorbar
# cb1.ax.text(0.08, (cbarTicks[0] + cbarTicks[1]) / 2, 'CLAY', ha='center', va='center',
# rotation=90, fontweight='bold')
# cb1.ax.text(0.08, (cbarTicks[1] + cbarTicks[2]) / 2, 'FSILT', ha='center', va='center',
# rotation=90, fontweight='bold')
# cb1.ax.text(0.08, (cbarTicks[2] + cbarTicks[3]) / 2, 'MSILT', ha='center', va='center',
# rotation=90, fontweight='bold')
# cb1.ax.text(0.08, (cbarTicks[3] + cbarTicks[4]) / 2, 'CSILT', ha='center', va='center',
# rotation=90, fontweight='bold')
# cb1.ax.text(0.08, (cbarTicks[4] + cbarTicks[5]) / 2, 'FSAND', ha='center', va='center',
# rotation=90, fontweight='bold')
# Change label font size for only the primary axis
for item in ([axes.xaxis.label, axes.yaxis.label] +
axes.get_xticklabels() + axes.get_yticklabels()):
item.set_fontsize(4)
# Save video frame
writer.grab_frame()
plt.show()
print(i)
# Save stills
fig.savefig('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/' +
'06_Models/04_Delft3D/Figures/SedConc/Wide_RevC_SedConc_' +
runLog['Run'][m] + '_Frame_' + str(i) + '.png',
bbox_inches='tight', dpi=300)
# fig.savefig('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/' +
# '06_Models/04_Delft3D/Figures/ShearStress/Wide_ShearStress_' +
# runLog['Run'][m] + '_Frame_' + str(i) + '.png',
# bbox_inches='tight', dpi=300)
# Save video
writer.finish()

View File

@ -0,0 +1,411 @@
#%%
#Project: 11934.201 Newtown Creek TPP - Privileged and Confidential
#Confidentiality Note: For internal discussion only.
#Description: This code creates plots for comparing temperature, salinity between the EFDC model, Delft3DFM, and the measured data.
#To run, ensure that the model log is saved locally, and change the path in the code.
#%% # -*- coding: utf-8 -*-
"""
Created on Wed Feb 8 11:14:28 2023
@author: mrobinson
"""
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import scipy
from scipy.interpolate import interp1d
from scipy import interpolate
from numpy import trapz
from datetime import datetime, timedelta
from matplotlib import dates as mpl_dates
from matplotlib.ticker import (MultipleLocator, AutoMinorLocator)
import os
import pandas as pd
import numpy as np
from sklearn.metrics import mean_squared_error
import geopandas as gp
import pytz
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import dfm_tools as dfmt
import cartopy.crs as ccrs
import contextily as ctx
from shapely.geometry import Point, MultiPoint
from shapely.ops import nearest_points
import pathlib as pl
import xarray as xr
from datetime import timedelta
import datetime as datetime
#%% Load in Measured data
dfIN=pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/21_YSI Data/D1-D9_Combined/Combined_with_2015/All_Combine.csv')
# Filter out invalid data
df1 = dfIN
df1.loc[df1['Temp'] < -30, 'Temp'] = np.nan
df1.loc[df1['Sal'] < 0, 'Sal'] = np.nan
df1['DateTime'] = pd.to_datetime(df1['Date Time Local'])
df1.set_index('DateTime', inplace=True)
df1.index = df1.index.tz_localize(pytz.timezone('America/New_York'),
ambiguous='NaT', nonexistent='NaT')
df1.index = df1.index.tz_convert(pytz.utc)
# Convert to Dataframe
YSI_df = pd.DataFrame(df1, columns=['Station', 'Temp', 'Sal', 'Depth'])
# Set index as DateTime
#Change Station of interest (EB043, EK108, NC310, NC313, NC316, NC318)
Station=['EB043','EK108','NC310','NC313','NC316','NC318']
x=4
i=Station[x]
Station_interest=i
Top=str(Station_interest)+str('A')
Bot=str(Station_interest)+str('C')
#%% Load in EFDC Output
FILE=r"//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/06_Models/02_EFDC/06_EFDC_Outputs/2014-2015/EFDC_Compiled_2014_2015.xlsx"
EFDC_dat = pd.read_excel(FILE, None)
# Loop through stations and add to dataframe
Stations=['EB043', 'EK108', 'NC310', 'NC313', 'NC316', 'NC318']
for Stat in Stations:
# Set index as DateTime
EFDC_dat[Stat].set_index('date fix', inplace=True)
EFDC_dat[Stat].index = pd.to_datetime(EFDC_dat[Stat].index).tz_localize(
pytz.timezone('UTC'))
#%% Load in Telemac Output
FILE_T=r"C:/Users/arey/Downloads/Results_Summarized_Telemac.xlsx"
TM_dat = pd.read_excel(FILE_T, None)
# Loop through stations and adjust times
Stations=['EB043', 'EK108', 'NC310', 'NC313', 'NC316', 'NC318']
for Stat in Stations:
# Set index as DateTime
TM_dat[Stat].set_index('Date_Time', inplace=True)
TM_dat[Stat].index = pd.to_datetime(TM_dat[Stat].index).tz_localize(
pytz.timezone('UTC'))
#%% Load in Water Level Data
# Field Gauge-EAST
FFG_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv")
FFG_pd['DateTime'] = pd.to_datetime(FFG_pd.Date_time)
FFG_pd.set_index(FFG_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
FFG_pd = FFG_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='infer').tz_convert(pytz.utc)
# National Grid Gauge-WEST
NGG1_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv")
NGG1_pd['DateTime'] = pd.to_datetime(NGG1_pd.Date_time)
NGG1_pd.set_index(NGG1_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
NGG1_pd = NGG1_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
# Drop ambiguous times
NGG1_pd = NGG1_pd.dropna()
NGG2_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv")
NGG2_pd['DateTime'] = pd.to_datetime(NGG2_pd.datetime)
NGG2_pd.set_index(NGG2_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
NGG2_pd = NGG2_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
# Drop ambiguous times
NGG2_pd = NGG2_pd.dropna()
#%% Load in Delft Results
runLog = pd.read_excel('C:/Users/arey/files/Projects/Newtown/Model Log NTC.xlsx', 'ModelLog')
dataPath = "FlowFM_map.nc"
histPath = "FlowFM_his.nc"
#read in time series
modelPlot = [105]#, 106]
Delft_WL = [None] * (max(modelPlot)+1)
Delft_T_B = [None] * (max(modelPlot)+1)
Delft_S_B = [None] * (max(modelPlot)+1)
Delft_T_T = [None] * (max(modelPlot)+1)
Delft_S_T = [None] * (max(modelPlot)+1)
# Note, this uses the standard netcdf lib + xarray
for i in modelPlot:
file_nc_hist = pl.Path(runLog['Run Location'][i],
'FlowFM', 'dflowfm', 'output', 'FlowFM_his.nc')
# file_nc_hist = "C:/Users/mrobinson/OneDrive - W.F. Baird & Associates Coastal Engineers Ltd/Documents/11934.202 NTC/06 Models/06 Delft3DFM/Results/Run129/FlowFM_his.nc"
# Hist file path
#file_nc_hist = file_nc_hist.as_posix()
# Open hist file dataset
data_xr = xr.open_mfdataset(file_nc_hist, preprocess=dfmt.preprocess_hisnc)
# Get stations
stations_pd = data_xr['stations'].to_dataframe()
# Get observations
#observations_pd=data_xr['stations'].to_dataframe()
# Convert to Pandas
Delft_WL[i] = data_xr.waterlevel.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108','West_WL']).to_pandas()
#Temp
Delft_T_B[i] = data_xr.temperature.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=0).to_pandas()
Delft_T_T[i] = data_xr.temperature.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=9).to_pandas()
#Salinity
Delft_S_B[i] = data_xr.salinity.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=0).to_pandas()
Delft_S_T[i] = data_xr.salinity.sel(stations=['NC310', 'NC313','NC316','NC318','EB043','EK108'], laydim=9).to_pandas()
# Put in UTC if needed
if i == 104 or i == 105:
Delft_WL[i] = Delft_WL[i].tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc)
Delft_T_B[i] = Delft_T_B[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
Delft_T_T[i] = Delft_T_T[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
Delft_S_B[i] = Delft_S_B[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
Delft_S_T[i] = Delft_S_T[i].tz_localize(pytz.timezone('EST')).tz_convert(pytz.utc)
else:
Delft_WL[i] = Delft_WL[i].tz_localize(
pytz.timezone('UTC'))
#%% Plot Water Levels
i = 105
# Create Figure
fig, ax = plt.subplots(nrows=1, ncols=1, figsize=(18, 8), sharex=False, sharey=True)
fig.suptitle('Comparison of Predicted and Measured Water Level at NGG', fontsize=18)
# Plot Oberservations
NGG2_pd['water_surface_elevation'].multiply(0.3048).plot(label='FFG', color='k', linewidth=2)
# Plot Delft
Delft_WL[i]['West_WL'].plot(label='Delft', color='r', linewidth=2)
# Plot YSI
YSI_df.loc[YSI_df['Station'] == 'NC318A', 'Depth'].add(-1.3).plot(
label='YSI', color='b', linewidth=2, ax=ax)
# Plot EFDC
EFDC_dat['NC318A'].loc[:, 'WSE_m'].plot(label='EFDC', color='m', linewidth=2, ax=ax)
# Plot Telemac
TM_dat['NC318'].loc[:, 'WSE_m'].plot(label='EFDC', color='m', linewidth=2, ax=ax)
# Set Time Bounds
dateMin = pd.to_datetime(datetime.datetime(2014, 11, 1, 0, 0, 0, 0), utc=True)
dateMax = pd.to_datetime(datetime.datetime(2014, 11, 13, 0, 0, 0, 0), utc=True)
# Set X Axis
ax.set_xlim(dateMin, dateMax)
# Set Y Axis
ax.set_ylim(-1.5, 2.5)
plt.show()
#%% Plot of Temperature AJMR
def resampleToObs(model, obs, obsMatchFreq):
# Resample to observation time
modelSampled = model.resample(obsMatchFreq).interpolate().reindex(
obs.index)
return modelSampled
def syncModelRMSE(model, obs):
rmseOut = mean_squared_error(obs.loc[model.notna() & obs.notna()].dropna(
).values, model.loc[model.notna() & obs.notna()].dropna(
).values,squared=False)
return rmseOut
degree_sign = u'\N{DEGREE SIGN}'
# Create Figure
fig, ax = plt.subplots(nrows=2, ncols=6, figsize=(16, 7), sharex=True, sharey=True)
# fig.suptitle('Comparison of Predicted and Measured Temperature at NC316', fontsize=18)
# Set Time Bounds for all axis since linked
# dateMin = pd.to_datetime(datetime.datetime(2015, 7, 15, 0, 0, 0, 0), utc=True)
# dateMax = pd.to_datetime(datetime.datetime(2015, 9, 1, 0, 0, 0, 0), utc=True)
# dateMin = pd.to_datetime(datetime.datetime(2015, 7, 28, 0, 0, 0, 0), utc=True)
# dateMax = pd.to_datetime(datetime.datetime(2015, 8, 7, 0, 0, 0, 0), utc=True)
dateMin = pd.to_datetime(datetime.datetime(2014, 7, 1, 0, 0, 0, 0), utc=True)
dateMax = pd.to_datetime(datetime.datetime(2015, 10, 1, 0, 0, 0, 0), utc=True)
# Loop through stations and plot
for StatIDX, Stat in enumerate(Stations):
ax[0, StatIDX].set_title(Stat)
# Resample to 5 minute data, reindex to observed and calculate RMSD
ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'A', 'Temp']
ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
DelftInterp = resampleToObs(Delft_T_T[i][Stat], ObsDat, '5T')
EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'TEM10'].resample('10T').nearest(), ObsDat, '5T')
# TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'TEM10'].resample('15T').nearest(), ObsDat, '5T')
RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
# RMSE_T = syncModelRMSE(TMInterp, ObsDat)
# Plot Top Layer Temperature
ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[0, StatIDX])
ax[0, StatIDX].plot(Delft_WL[i].index, Delft_T_T[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + degree_sign + 'C')
EFDC_dat[Stat].loc[:, 'TEM10'].plot(linewidth=1, ax=ax[0, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + degree_sign + 'C')
# TM_dat[Stat].loc[:, 'TEM10'].plot(linewidth=1, ax=ax[0, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + degree_sign + 'C')
ax[0, StatIDX].legend(loc='lower right')
# Plot Bottom Layer Temperature
# Resample to 5 minute data, reindex to observed and calculate RMSD
ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'C', 'Temp']
ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
DelftInterp = resampleToObs(Delft_T_B[i][Stat], ObsDat, '5T')
EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'TEM1'].resample('10T').nearest(), ObsDat, '5T')
# TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'TEM1'].resample('15T').nearest(), ObsDat, '5T')
RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
# RMSE_T = syncModelRMSE(TMInterp, ObsDat)
ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[1, StatIDX])
ax[1, StatIDX].plot(Delft_WL[i].index, Delft_T_B[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + degree_sign + 'C')
EFDC_dat[Stat].loc[:, 'TEM1'].plot(linewidth=1, ax=ax[1, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + degree_sign + 'C')
# TM_dat[Stat].loc[:, 'TEM1'].plot(linewidth=1, ax=ax[1, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + degree_sign + 'C')
# Add Legend and Set X Axis label
ax[1, StatIDX].legend(loc='lower right')
ax[1, StatIDX].set_xlabel('UTC Date')
# Set X Axis
ax[0, 0].set_xlim(dateMin, dateMax)
ax[0, 0].xaxis.set_major_formatter(
mpl_dates.ConciseDateFormatter(ax[0, 0].xaxis.get_major_locator()))
# Set Y Axis
# ax[0, 0].set_ylim(18, 28)
ax[0, 0].set_ylim(-10, 30)
ax[0, 0].set_ylabel('Temperature Top (' + degree_sign + 'C)')
ax[1, 0].set_ylabel('Temperature Bottom (' + degree_sign + 'C)')
# Reduce spacing between subplots
plt.tight_layout()
plt.show()
# Save figure
fig.savefig('C:/Users/arey/files/Projects/Newtown/TempSalFigs/Temperature_All.png',
bbox_inches='tight', dpi=300)
#%% Plot Salinity AJMR
Stations=['NC310', 'NC313', 'NC316', 'NC318', 'EB043', 'EK108']
# Create Figure
fig, ax = plt.subplots(nrows=2, ncols=6, figsize=(16, 7), sharex=True, sharey=True)
# Set Time Bounds for all axis since linked
# dateMin = pd.to_datetime(datetime.datetime(2015, 7, 15, 0, 0, 0, 0), utc=True)
# dateMax = pd.to_datetime(datetime.datetime(2015, 9, 1, 0, 0, 0, 0), utc=True)
dateMin = pd.to_datetime(datetime.datetime(2015, 7, 28, 0, 0, 0, 0), utc=True)
dateMax = pd.to_datetime(datetime.datetime(2015, 8, 7, 0, 0, 0, 0), utc=True)
# Loop through stations and plot
for StatIDX, Stat in enumerate(Stations):
ax[0, StatIDX].set_title(Stat)
# Resample to 5 minute data, reindex to observed and calculate RMSE
ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'A', 'Sal']
ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
DelftInterp = resampleToObs(Delft_S_T[i][Stat], ObsDat, '5T')
EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'SAL10'].resample('10T').nearest(), ObsDat, '5T')
# TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'SAL10'].resample('15T').nearest(), ObsDat, '5T')
RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
# RMSE_T = syncModelRMSE(TMInterp, ObsDat)
# Plot Top Layer Salinity
ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[0, StatIDX])
ax[0, StatIDX].plot(Delft_S_T[i].index, Delft_S_T[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + ' psu')
EFDC_dat[Stat].loc[:, 'SAL10'].plot(linewidth=1, ax=ax[0, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + ' psu')
# TM_dat[Stat].loc[:, 'SAL10'].plot(linewidth=1, ax=ax[0, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + ' psu')
ax[0, StatIDX].legend(loc='lower right')
# Resample to 5 minute data, reindex to observed and calculate RMSD
ObsDat = YSI_df.loc[YSI_df['Station'] == Stat + 'C', 'Sal']
ObsDat = ObsDat.loc[(ObsDat.index > dateMin) & (ObsDat.index <= dateMax)]
DelftInterp = resampleToObs(Delft_S_B[i][Stat], ObsDat, '5T')
EFDCInterp = resampleToObs(EFDC_dat[Stat].loc[:, 'SAL1'].resample('10T').nearest(), ObsDat, '5T')
# TMInterp = resampleToObs(TM_dat[Stat].loc[:, 'SAL1'].resample('15T').nearest(), ObsDat, '5T')
RMSE_D = syncModelRMSE(DelftInterp, ObsDat)
RMSE_E = syncModelRMSE(EFDCInterp, ObsDat)
# RMSE_T = syncModelRMSE(TMInterp, ObsDat)
# Plot Bottom Layer Salinity
ObsDat.plot(label='YSI', color='k', linewidth=2, ax=ax[1, StatIDX])
ax[1, StatIDX].plot(Delft_S_B[i].index, Delft_S_B[i][Stat], linestyle='-', label='Delft3D: ' + str(round(RMSE_D, 2)) + ' psu')
EFDC_dat[Stat].loc[:, 'SAL1'].plot(linewidth=1, ax=ax[1, StatIDX], label='EFDC: ' + str(round(RMSE_E, 2)) + ' psu')
# TM_dat[Stat].loc[:, 'SAL1'].plot(linewidth=1, ax=ax[1, StatIDX], label='Telemac: ' + str(round(RMSE_T, 2)) + 'psu')
# Add Legend and Set X Axis label
ax[1, StatIDX].legend(loc='lower right')
ax[1, StatIDX].set_xlabel('UTC Date')
# Set X Axis
ax[0, 0].set_xlim(dateMin, dateMax)
ax[0, 0].xaxis.set_major_formatter(
mpl_dates.ConciseDateFormatter(ax[0, 0].xaxis.get_major_locator()))
# Set Y Axis
ax[0, 0].set_ylim(12, 27)
ax[0, 0].set_ylabel('Salinity Top (psu)')
ax[1, 0].set_ylabel('Salinity Bottom (psu)')
# Reduce spacing between subplots
plt.tight_layout()
plt.show()
# Save figure
fig.savefig('C:/Users/arey/files/Projects/Newtown/TempSalFigs/Salinity_2015_Zoom.png',
bbox_inches='tight', dpi=300)

View File

@ -0,0 +1,455 @@
#%%
#Project: 11934.201 Newtown Creek TPP - Privileged and Confidential
#Confidentiality Note: For internal discussion only.
#Description: This code plots the water levels at Field Facility Gage, and National Grid.
#To run,
#%% # -*- coding: utf-8 -*-
"""
Created on Thu Feb 16 11:54:29 2023
@author: mrobinson
"""
#%%Packages
import os
import pandas as pd
import numpy as np
import geopandas as gp
import pytz
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import dfm_tools as dfmt
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_ncvarproperties, get_stationid_fromstationlist, get_varnamefromattrs
from dfm_tools.get_nc_helpers import get_timesfromnc
import contextily as ctx
from shapely.geometry import Point, MultiPoint
from shapely.ops import nearest_points
import pathlib as pl
import xarray as xr
from datetime import timedelta
import datetime as datetime
from matplotlib import dates as mpl_dates
from matplotlib.ticker import (MultipleLocator, AutoMinorLocator)
from sklearn.metrics import mean_squared_error
#%% Read Model Log
runLog = pd.read_excel("C:/Users/mrobinson/OneDrive - W.F. Baird & Associates Coastal Engineers Ltd/Documents/11934.202 NTC/Model Log NTC.xlsx", 'ModelLog')
dataPath = "FlowFM_map.nc"
histPath = "FlowFM_his.nc"
#%% Load in Water Level Data
# Field Gauge-EAST
FFG_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv")
FFG_pd['DateTime'] = pd.to_datetime(FFG_pd.Date_time)
FFG_pd.set_index(FFG_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
FFG_pd = FFG_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='infer').tz_convert(pytz.utc)
# National Grid Gauge-WEST
NGG1_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv")
NGG1_pd['DateTime'] = pd.to_datetime(NGG1_pd.Date_time)
NGG1_pd.set_index(NGG1_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
NGG1_pd = NGG1_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
# Drop ambiguous times
NGG1_pd = NGG1_pd.dropna()
NGG2_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv")
NGG2_pd['DateTime'] = pd.to_datetime(NGG2_pd.datetime)
NGG2_pd.set_index(NGG2_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
NGG2_pd = NGG2_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
# Drop ambiguous times
NGG2_pd = NGG2_pd.dropna()
#%% Read in time series
modelPlot = [105, 106, 107, 108]
Delft_WL = [None] * (max(modelPlot)+1)
# Note, this uses the standard netcdf lib + xarray
for i in modelPlot:
file_nc_hist = pl.Path(runLog['Run Location'][i],
'FlowFM', 'dflowfm', 'output', 'FlowFM_his.nc')
# Hist file path
file_nc_hist = file_nc_hist.as_posix()
# Open hist file dataset
data_xr = xr.open_mfdataset(file_nc_hist, preprocess=dfmt.preprocess_hisnc)
# Get Variables
vars_pd = get_ncvarproperties(data_xr)
# Get stations
stations_pd = data_xr['stations'].to_dataframe()
# Convert to Pandas
Delft_WL[i] = data_xr.waterlevel.sel(stations=['West_WL', 'East_WL']).to_pandas()
# Put in UTC if needed
if i == 104:
Delft_WL[i] = Delft_WL[i].tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc)
else:
Delft_WL[i] = Delft_WL[i].tz_localize(
pytz.timezone('UTC'))
#%%
modelPlot=[107]
i=107
interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1],
freq="20min")
df1=Delft_WL[i]
df1_index=df1.index.shift(periods=1,freq='0H')
df2=pd.DataFrame(index=df1_index)
df2['West_WL']=df1['West_WL']
df2['East_WL']=df1['East_WL']
df2['West_WL']=df2['West_WL'].shift(30)
df2['East_WL']=df2['East_WL'].shift(30)
x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
x[x==-1.15332]=np.nan
x[0:2584]=np.NAN
x2= np.interp(interpTimes,NGG1_pd.index,NGG1_pd['Water Surface Elevation (ft)']*0.3048)
x2[0:2638]=np.NAN
#RMSE Values
predNGG=np.interp(interpTimes,df2.index, df2['West_WL'])
rms = mean_squared_error(predNGG[2638:], x2[2638:], squared=False)
print(rms)
predFG=np.interp(interpTimes,df2.index, df2['East_WL'])
rms2 = mean_squared_error(predFG[2584:], x[2584:], squared=False)
print(rms2)
#Plot
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(20, 8))
fig.patch.set_facecolor('white')
#interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
# Delft_WL[modelPlot[0]].index[-1],
# freq="20min")
axes[0].title.set_text('National Grid Gauge')
axes[0].scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
x2,s=1,
label='RMSE =' + str(round(rms,2)))
axes[1].title.set_text('Field Facility Gauge')
axes[1].scatter(np.interp(interpTimes,df2.index, df2['East_WL']), x,s=1,
label='RMSE =' + str(round(rms2,2)))
axes[1].set_xlim(-2, 2)
axes[1].set_ylim(-2, 2)
axes[0].set_xlim(-2, 2)
axes[0].set_ylim(-2, 2)
linepts = np.array([-3, 3])
axes[0].plot(linepts, linepts, color='k')
axes[1].plot(linepts, linepts, color='k')
axes[0].set_xlabel('Modelled water level (m)')
axes[0].set_ylabel('Measured water level (m)')
axes[1].set_xlabel('Modelled water level (m)')
axes[1].set_ylabel('Measured water level (m)')
axes[1].grid()
axes[0].grid()
fig.suptitle(runLog['Plotting Legend Entry'][i])
axes[1].legend(loc='lower left')
axes[0].legend(loc='lower left')
#%%
modelPlot=[106]
i=106
#interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
# Delft_WL[modelPlot[0]].index[-1],
# freq="20min")
#Note For 106 Run, period is from 2012-03-01 to 2012-10-30
interpTimes = pd.date_range("2012-03-01", "2012-10-30", freq="20min")
df1=Delft_WL[i]
df1_index=df1.index.shift(periods=1,freq='0H')
df2=pd.DataFrame(index=df1_index)
df2['West_WL']=df1['West_WL']
df2['East_WL']=df1['East_WL']
df2['West_WL']=df2['West_WL'].shift(30)
df2['East_WL']=df2['East_WL'].shift(30)
x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
x[x==-1.15332]=np.nan
x[0:2584]=np.NAN
x2= np.interp(interpTimes,NGG1_pd.index,NGG1_pd['Water Surface Elevation (ft)']*0.3048)
x2[0:2638]=np.NAN
#RMSE Values
predNGG=np.interp(interpTimes,df2.index, df2['West_WL'])
rms = mean_squared_error(predNGG[2638:], x2[2638:], squared=False)
print(rms.round(3))
predFG=np.interp(interpTimes,df2.index, df2['East_WL'])
rms2 = mean_squared_error(predFG[2584:], x[2584:], squared=False)
print(rms2.round(3))
#Plot
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(18, 8))
fig.patch.set_facecolor('white')
plt.rcParams['font.size'] = 16
#interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
# Delft_WL[modelPlot[0]].index[-1],
# freq="20min")
axes[0].title.set_text('National Grid Gauge')
axes[0].scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
x2,s=1,
label='RMSE =' + str(round(rms,3)))
axes[1].title.set_text('Field Facility Gauge')
axes[1].scatter(np.interp(interpTimes,df2.index, df2['East_WL']), x,s=1,
label='RMSE =' + str(round(rms2,3)))
axes[1].set_xlim(-2, 2)
axes[1].set_ylim(-2, 2)
axes[0].set_xlim(-2, 2)
axes[0].set_ylim(-2, 2)
linepts = np.array([-3, 3])
axes[0].plot(linepts, linepts, color='k')
axes[1].plot(linepts, linepts, color='k')
axes[0].set_xlabel('Modelled water level (m)')
axes[0].set_ylabel('Measured water level (m)')
axes[1].set_xlabel('Modelled water level (m)')
axes[1].set_ylabel('Measured water level (m)')
axes[1].grid()
axes[0].grid()
axes[0].set_aspect('equal')
axes[1].set_aspect('equal')
fig.suptitle(runLog['Plotting Legend Entry'][i])
axes[1].legend(loc='lower left')
axes[0].legend(loc='lower left')
#%%
#%%
fig, axes = plt.subplots(nrows=1,ncols=2,figsize=(20,10))
axes[0].plot(FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048,color='red')
axes[0].plot(df2.index,df2['East_WL'])
axes[1].plot(NGG1_pd['DateTime'],NGG1_pd['Water Surface Elevation (ft)']*0.3048,color='red')
axes[1].plot(df2.index,df2['West_WL'])
axes[0].set_xlim([np.datetime64('2012-01-01'), np.datetime64('2013-01-01')])
#%%
modelPlot=[105]
i=105
interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1],
freq="20min")
df1=Delft_WL[i]
df1_index=df1.index.shift(periods=1,freq='0H')
df2=pd.DataFrame(index=df1_index)
df2['West_WL']=df1['West_WL']
df2['East_WL']=df1['East_WL']
df2['West_WL']=df2['West_WL'].shift(30)
df2['East_WL']=df2['East_WL'].shift(30)
x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
x2=np.interp(interpTimes,df2.index, df2['West_WL'])
pred=np.interp(interpTimes,df2.index, df2['West_WL'])
meas=np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048)
#RMSE Values
rms3=mean_squared_error(meas[15:],pred[15:], squared=False)
print(rms3)
"""
predNGG=np.interp(interpTimes,df2.index, df2['West_WL'])
rms = mean_squared_error(predNGG[2638:], x2[2638:], squared=False)
print(rms)
predFG=np.interp(interpTimes,df2.index, df2['East_WL'])
rms2 = mean_squared_error(predFG[2584:], x[2584:], squared=False)
print(rms2)
"""
# Plot
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 10))
fig.patch.set_facecolor('white')
interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1],
freq="20min")
axes.title.set_text('National Grid Gauge')
axes.scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048),s=1,
label='RMSE =' + str(round(rms3,2)))
axes.set_xlim(-2, 2)
axes.set_ylim(-2, 2)
linepts = np.array([-3, 3])
axes.plot(linepts, linepts, color='k')
axes.set_xlabel('Modelled water level (m)')
axes.set_ylabel('Measured water level (m)')
axes.grid()
fig.suptitle(runLog['Plotting Legend Entry'][i])
#%%
modelPlot=[105]
i=105
interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1],
freq="20min")
df1=Delft_WL[i]
df1_index=df1.index.shift(periods=1,freq='0H')
df2=pd.DataFrame(index=df1_index)
df2['West_WL']=df1['West_WL']
df2['East_WL']=df1['East_WL']
df2['West_WL']=df2['West_WL'].shift(30)
df2['East_WL']=df2['East_WL'].shift(30)
x=(np.interp(interpTimes,FFG_pd.index,FFG_pd['Water Surface Elevation (ft)']*0.3048))
x2=np.interp(interpTimes,df2.index, df2['West_WL'])
pred=np.interp(interpTimes,df2.index, df2['West_WL'])
meas=np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048)
#RMSE Values
rms3=mean_squared_error(meas[15:],pred[15:], squared=False)
print(rms3)
# Plot
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 10))
fig.patch.set_facecolor('white')
interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1],
freq="20min")
axes.title.set_text('National Grid Gauge')
axes.scatter(np.interp(interpTimes,df2.index, df2['West_WL']),
np.interp(interpTimes,NGG2_pd.index,NGG2_pd['water_surface_elevation']*0.3048),s=1,
label='RMSE =' + str(round(rms3,3)))
axes.set_xlim(-2, 2)
axes.set_ylim(-2, 2)
linepts = np.array([-3, 3])
axes.plot(linepts, linepts, color='k')
axes.set_xlabel('Modelled water level (m)')
axes.set_ylabel('Measured water level (m)')
axes.grid()
axes.legend(loc='lower left')
axes.set_aspect('equal')
fig.suptitle(runLog['Plotting Legend Entry'][i])

205
NTC_DFM/dfm_tools_23.txt Normal file
View File

@ -0,0 +1,205 @@
# This file may be used to create an environment using:
# $ conda create --name <env> --file <this file>
# platform: win-64
@EXPLICIT
https://conda.anaconda.org/conda-forge/win-64/git-2.39.1-h57928b3_0.conda
https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2022.12.7-h5b45459_0.conda
https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2023.0.0-h57928b3_25922.conda
https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.11-hd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/python_abi-3.9-3_cp39.conda
https://conda.anaconda.org/conda-forge/noarch/tzdata-2022g-h191b570_0.conda
https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.22621.0-h57928b3_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.34.31931-h4c5c07a_10.conda
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/vc-14.3-hb6edc58_10.conda
https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h8ffe710_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/expat-2.5.0-h1537add_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/geos-3.11.1-h1537add_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/icu-70.1-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/jpeg-9e-h8ffe710_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libaec-1.0.6-h63175ca_1.conda
https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.0.9-hcfcfb64_8.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.17-hcfcfb64_0.conda
https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libjpeg-turbo-2.1.4-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libspatialindex-1.9.3-h39d44d4_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.40.0-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.2.4-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libzlib-1.2.13-hcfcfb64_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.4-hcfcfb64_0.conda
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openssl-3.0.7-hcfcfb64_2.conda
https://conda.anaconda.org/conda-forge/win-64/pixman-0.40.0-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pthreads-win32-2.9.1-hfa6e2cd_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/snappy-1.1.9-hfb803bf_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tk-8.6.12-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.4-h63175ca_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/freexl-1.0.6-h67ca5e6_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gettext-0.21.1-h5728263_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/krb5-1.20.1-heb0366b_0.conda
https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.0.9-hcfcfb64_8.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.0.9-hcfcfb64_8.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.39-h19919ed_0.conda
https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-he22b5cd_12.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libssh2-1.10.0-h9a1e1f7_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libxml2-2.10.3-hc3477c8_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libzip-1.9.2-h519de47_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pcre2-10.40-h17e33f8_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/python-3.9.15-h4de0772_0_cpython.conda
https://conda.anaconda.org/conda-forge/win-64/sqlite-3.40.0-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/zlib-1.2.13-hcfcfb64_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.2-h12be248_6.conda
https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/appdirs-1.4.4-pyh9f0ad1d_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/attrs-22.2.0-pyh71513ae_0.conda
https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.3-hdccc3a2_0.conda
https://conda.anaconda.org/conda-forge/win-64/boost-cpp-1.78.0-h9f4b32c_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.0.9-hcfcfb64_8.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/certifi-2022.12.7-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.1-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/docopt-0.6.2-py_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-h546665d_1.conda
https://conda.anaconda.org/conda-forge/noarch/fsspec-2023.1.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/geographiclib-1.52-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h1b1b6ef_5.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.4-py39h1f6ef14_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libcurl-7.87.0-h68f0423_0.conda
https://conda.anaconda.org/conda-forge/win-64/libglib-2.74.1-he8f3873_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libhwloc-2.8.0-h039e092_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libpq-15.1-ha9684e8_3.conda
https://conda.anaconda.org/conda-forge/win-64/libtiff-4.5.0-hf8721a0_2.conda
https://conda.anaconda.org/conda-forge/win-64/llvmlite-0.39.1-py39hd28a505_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lz4-4.2.0-py39hf617134_0.conda
https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.2-py39ha55989b_0.conda
https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.4-py39h1f6ef14_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/networkx-3.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/packaging-23.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.4-py39ha55989b_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pytz-2022.7.1-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0-py39ha55989b_5.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/rtree-1.0.1-py39h09fdee3_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/setuptools-66.1.1-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.3.2.post1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.1.0-pyh8a188c0_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tornado-6.2-py39ha55989b_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.4.0-pyha770c72_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/unicodedata2-15.0.0-py39ha55989b_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/webob-1.8.7-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/wheel-0.38.4-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/win_inet_pton-1.1.0-pyhd8ed1ab_6.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.9-hcd874cb_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/xyzservices-2022.9.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.11.1-pyha770c72_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotli-1.0.9-hcfcfb64_8.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cffi-1.15.1-py39h68f70e3_3.conda
https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.2.0-h9ebe7e4_0.conda
https://conda.anaconda.org/conda-forge/noarch/click-8.1.3-win_pyhd8ed1ab_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/curl-7.87.0-h68f0423_0.conda
https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.0-py39ha55989b_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.2-hbde0cde_0.conda
https://conda.anaconda.org/conda-forge/noarch/geopy-2.3.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/hdf5-1.12.2-nompi_h57737ce_101.conda
https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/joblib-1.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lcms2-2.14-ha5c8aab_1.conda
https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-hf2ab4e4_1015.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libxcb-1.13-hcd874cb_1004.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/munch-2.5.0-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.0-ha2aaf27_2.conda
https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.1.0-py39ha55989b_0.conda
https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pip-23.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/postgresql-15.1-hd87cd2b_3.conda
https://conda.anaconda.org/conda-forge/win-64/proj-9.1.0-heca977f_1.conda
https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tbb-2021.7.0-h91493d7_1.conda
https://conda.anaconda.org/conda-forge/noarch/tqdm-4.64.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/zict-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/branca-0.6.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotlipy-0.7.0-py39ha55989b_1005.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cairo-1.16.0-hd694305_1014.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cryptography-39.0.0-py39hb6bd5e6_0.conda
https://conda.anaconda.org/conda-forge/noarch/dask-core-2023.1.1-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/fonttools-4.38.0-py39ha55989b_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.1-h720ab47_5.conda
https://conda.anaconda.org/conda-forge/win-64/kealib-1.5.0-h61be68b_0.conda
https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.8.1-nompi_h8c042bf_106.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.0.1-h07bf483_22.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/mkl-2022.1.0-h6a75c08_874.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pillow-9.4.0-py39h9767c21_0.conda
https://conda.anaconda.org/conda-forge/win-64/pyproj-3.4.1-py39h9727d73_0.conda
https://conda.anaconda.org/conda-forge/win-64/tiledb-2.13.2-h3132609_0.conda
https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/poppler-22.12.0-h183ae7b_1.conda
https://conda.anaconda.org/conda-forge/noarch/pyopenssl-23.0.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libgdal-3.6.2-h060c9ed_3.conda
https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.14-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/distributed-2023.1.1-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/numpy-1.23.5-py39hbccbffa_0.conda
https://conda.anaconda.org/conda-forge/noarch/requests-2.28.2-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/bokeh-2.4.3-pyhd8ed1ab_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/bottleneck-1.3.6-py39hc266a54_0.conda
https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.5.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cftime-1.6.2-py39hc266a54_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/contourpy-1.0.7-py39h1f6ef14_0.conda
https://conda.anaconda.org/conda-forge/noarch/folium-0.14.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/gdal-3.6.2-py39h3be0312_3.conda
https://conda.anaconda.org/conda-forge/win-64/numba-0.56.4-py39h99ae161_0.conda
https://conda.anaconda.org/conda-forge/win-64/pandas-1.5.3-py39h2ba5b7c_0.conda
https://conda.anaconda.org/conda-forge/noarch/pooch-1.6.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pydap-3.3.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyepsg-0.4.0-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/shapely-2.0.1-py39h7c5f289_0.conda
https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/dask-2023.1.1-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/fiona-1.9.0-py39hb5a1417_0.conda
https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.12.2-pyha770c72_0.conda
https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.6.3-py39haf65ace_0.conda
https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.6.2-nompi_py39h34fa13a_100.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/numba_celltree-0.1.6-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.4-py39hce277b7_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/scipy-1.10.0-py39hfbf2dce_0.conda
https://conda.anaconda.org/conda-forge/noarch/xarray-2023.1.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/win-64/cartopy-0.21.1-py39h25ee47b_0.conda
https://conda.anaconda.org/conda-forge/noarch/contextily-1.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.2.1-py39h6fe01c0_0.conda
https://conda.anaconda.org/conda-forge/noarch/xugrid-0.2.0-pyhd8ed1ab_0.conda
https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.5.0-pyhd8ed1ab_1.conda
https://conda.anaconda.org/conda-forge/noarch/geopandas-0.12.2-pyhd8ed1ab_0.conda

212
NTC_DFM/dfm_tools_23.yml Normal file
View File

@ -0,0 +1,212 @@
name: dfm_tools_23
channels:
- conda-forge
dependencies:
- affine=2.4.0=pyhd8ed1ab_0
- appdirs=1.4.4=pyh9f0ad1d_0
- attrs=22.2.0=pyh71513ae_0
- beautifulsoup4=4.11.1=pyha770c72_0
- blosc=1.21.3=hdccc3a2_0
- bokeh=2.4.3=pyhd8ed1ab_3
- boost-cpp=1.78.0=h9f4b32c_1
- bottleneck=1.3.6=py39hc266a54_0
- branca=0.6.0=pyhd8ed1ab_0
- brotli=1.0.9=hcfcfb64_8
- brotli-bin=1.0.9=hcfcfb64_8
- brotlipy=0.7.0=py39ha55989b_1005
- bzip2=1.0.8=h8ffe710_4
- ca-certificates=2022.12.7=h5b45459_0
- cairo=1.16.0=hd694305_1014
- cartopy=0.21.1=py39h25ee47b_0
- cdsapi=0.5.1=pyhd8ed1ab_0
- certifi=2022.12.7=pyhd8ed1ab_0
- cffi=1.15.1=py39h68f70e3_3
- cfitsio=4.2.0=h9ebe7e4_0
- cftime=1.6.2=py39hc266a54_1
- charset-normalizer=2.1.1=pyhd8ed1ab_0
- click=8.1.3=win_pyhd8ed1ab_2
- click-plugins=1.1.1=py_0
- cligj=0.7.2=pyhd8ed1ab_1
- cloudpickle=2.2.1=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_0
- contextily=1.2.0=pyhd8ed1ab_0
- contourpy=1.0.7=py39h1f6ef14_0
- cryptography=39.0.0=py39hb6bd5e6_0
- curl=7.87.0=h68f0423_0
- cycler=0.11.0=pyhd8ed1ab_0
- cytoolz=0.12.0=py39ha55989b_1
- dask=2023.1.1=pyhd8ed1ab_0
- dask-core=2023.1.1=pyhd8ed1ab_0
- distributed=2023.1.1=pyhd8ed1ab_0
- docopt=0.6.2=py_1
- et_xmlfile=1.1.0=pyhd8ed1ab_0
- expat=2.5.0=h1537add_0
- fiona=1.9.0=py39hb5a1417_0
- folium=0.14.0=pyhd8ed1ab_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=hab24e00_0
- fontconfig=2.14.2=hbde0cde_0
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.38.0=py39ha55989b_1
- freetype=2.12.1=h546665d_1
- freexl=1.0.6=h67ca5e6_1
- fsspec=2023.1.0=pyhd8ed1ab_0
- gdal=3.6.2=py39h3be0312_3
- geographiclib=1.52=pyhd8ed1ab_0
- geopandas=0.12.2=pyhd8ed1ab_0
- geopandas-base=0.12.2=pyha770c72_0
- geopy=2.3.0=pyhd8ed1ab_0
- geos=3.11.1=h1537add_0
- geotiff=1.7.1=h720ab47_5
- gettext=0.21.1=h5728263_0
- git=2.39.1=h57928b3_0
- hdf4=4.2.15=h1b1b6ef_5
- hdf5=1.12.2=nompi_h57737ce_101
- heapdict=1.0.1=py_0
- icu=70.1=h0e60522_0
- idna=3.4=pyhd8ed1ab_0
- intel-openmp=2023.0.0=h57928b3_25922
- jinja2=3.1.2=pyhd8ed1ab_1
- joblib=1.2.0=pyhd8ed1ab_0
- jpeg=9e=h8ffe710_2
- kealib=1.5.0=h61be68b_0
- kiwisolver=1.4.4=py39h1f6ef14_1
- krb5=1.20.1=heb0366b_0
- lcms2=2.14=ha5c8aab_1
- lerc=4.0.0=h63175ca_0
- libaec=1.0.6=h63175ca_1
- libblas=3.9.0=16_win64_mkl
- libbrotlicommon=1.0.9=hcfcfb64_8
- libbrotlidec=1.0.9=hcfcfb64_8
- libbrotlienc=1.0.9=hcfcfb64_8
- libcblas=3.9.0=16_win64_mkl
- libcurl=7.87.0=h68f0423_0
- libdeflate=1.17=hcfcfb64_0
- libffi=3.4.2=h8ffe710_5
- libgdal=3.6.2=h060c9ed_3
- libglib=2.74.1=he8f3873_1
- libhwloc=2.8.0=h039e092_1
- libiconv=1.17=h8ffe710_0
- libjpeg-turbo=2.1.4=hcfcfb64_0
- libkml=1.3.0=hf2ab4e4_1015
- liblapack=3.9.0=16_win64_mkl
- libnetcdf=4.8.1=nompi_h8c042bf_106
- libpng=1.6.39=h19919ed_0
- libpq=15.1=ha9684e8_3
- librttopo=1.1.0=he22b5cd_12
- libspatialindex=1.9.3=h39d44d4_4
- libspatialite=5.0.1=h07bf483_22
- libsqlite=3.40.0=hcfcfb64_0
- libssh2=1.10.0=h9a1e1f7_3
- libtiff=4.5.0=hf8721a0_2
- libwebp-base=1.2.4=h8ffe710_0
- libxcb=1.13=hcd874cb_1004
- libxml2=2.10.3=hc3477c8_0
- libzip=1.9.2=h519de47_1
- libzlib=1.2.13=hcfcfb64_4
- llvmlite=0.39.1=py39hd28a505_1
- locket=1.0.0=pyhd8ed1ab_0
- lz4=4.2.0=py39hf617134_0
- lz4-c=1.9.4=hcfcfb64_0
- m2w64-gcc-libgfortran=5.3.0=6
- m2w64-gcc-libs=5.3.0=7
- m2w64-gcc-libs-core=5.3.0=7
- m2w64-gmp=6.1.0=2
- m2w64-libwinpthread-git=5.0.0.4634.697f757=2
- mapclassify=2.5.0=pyhd8ed1ab_1
- markupsafe=2.1.2=py39ha55989b_0
- matplotlib-base=3.6.3=py39haf65ace_0
- mercantile=1.2.1=pyhd8ed1ab_0
- mkl=2022.1.0=h6a75c08_874
- msgpack-python=1.0.4=py39h1f6ef14_1
- msys2-conda-epoch=20160418=1
- munch=2.5.0=py_0
- munkres=1.1.4=pyh9f0ad1d_0
- netcdf4=1.6.2=nompi_py39h34fa13a_100
- networkx=3.0=pyhd8ed1ab_0
- numba=0.56.4=py39h99ae161_0
- numba_celltree=0.1.6=pyhd8ed1ab_0
- numpy=1.23.5=py39hbccbffa_0
- openjpeg=2.5.0=ha2aaf27_2
- openpyxl=3.1.0=py39ha55989b_0
- openssl=3.0.7=hcfcfb64_2
- packaging=23.0=pyhd8ed1ab_0
- pandas=1.5.3=py39h2ba5b7c_0
- partd=1.3.0=pyhd8ed1ab_0
- pcre2=10.40=h17e33f8_0
- pillow=9.4.0=py39h9767c21_0
- pip=23.0=pyhd8ed1ab_0
- pixman=0.40.0=h8ffe710_0
- pooch=1.6.0=pyhd8ed1ab_0
- poppler=22.12.0=h183ae7b_1
- poppler-data=0.4.11=hd8ed1ab_0
- postgresql=15.1=hd87cd2b_3
- proj=9.1.0=heca977f_1
- psutil=5.9.4=py39ha55989b_0
- pthread-stubs=0.4=hcd874cb_1001
- pthreads-win32=2.9.1=hfa6e2cd_3
- pycparser=2.21=pyhd8ed1ab_0
- pydap=3.3.0=pyhd8ed1ab_0
- pyepsg=0.4.0=py_0
- pyopenssl=23.0.0=pyhd8ed1ab_0
- pyparsing=3.0.9=pyhd8ed1ab_0
- pyproj=3.4.1=py39h9727d73_0
- pyshp=2.3.1=pyhd8ed1ab_0
- pysocks=1.7.1=pyh0701188_6
- python=3.9.15=h4de0772_0_cpython
- python-dateutil=2.8.2=pyhd8ed1ab_0
- python_abi=3.9=3_cp39
- pytz=2022.7.1=pyhd8ed1ab_0
- pyyaml=6.0=py39ha55989b_5
- rasterio=1.3.4=py39hce277b7_0
- requests=2.28.2=pyhd8ed1ab_0
- rtree=1.0.1=py39h09fdee3_1
- scikit-learn=1.2.1=py39h6fe01c0_0
- scipy=1.10.0=py39hfbf2dce_0
- setuptools=66.1.1=pyhd8ed1ab_0
- shapely=2.0.1=py39h7c5f289_0
- six=1.16.0=pyh6c4a22f_0
- snappy=1.1.9=hfb803bf_2
- snuggs=1.4.7=py_0
- sortedcontainers=2.4.0=pyhd8ed1ab_0
- soupsieve=2.3.2.post1=pyhd8ed1ab_0
- sqlite=3.40.0=hcfcfb64_0
- tbb=2021.7.0=h91493d7_1
- tblib=1.7.0=pyhd8ed1ab_0
- threadpoolctl=3.1.0=pyh8a188c0_0
- tiledb=2.13.2=h3132609_0
- tk=8.6.12=h8ffe710_0
- toolz=0.12.0=pyhd8ed1ab_0
- tornado=6.2=py39ha55989b_1
- tqdm=4.64.1=pyhd8ed1ab_0
- typing_extensions=4.4.0=pyha770c72_0
- tzdata=2022g=h191b570_0
- ucrt=10.0.22621.0=h57928b3_0
- unicodedata2=15.0.0=py39ha55989b_0
- urllib3=1.26.14=pyhd8ed1ab_0
- vc=14.3=hb6edc58_10
- vs2015_runtime=14.34.31931=h4c5c07a_10
- webob=1.8.7=pyhd8ed1ab_0
- wheel=0.38.4=pyhd8ed1ab_0
- win_inet_pton=1.1.0=pyhd8ed1ab_6
- xarray=2023.1.0=pyhd8ed1ab_0
- xerces-c=3.2.4=h63175ca_1
- xorg-libxau=1.0.9=hcd874cb_0
- xorg-libxdmcp=1.1.3=hcd874cb_0
- xugrid=0.2.0=pyhd8ed1ab_0
- xyzservices=2022.9.0=pyhd8ed1ab_0
- xz=5.2.6=h8d14728_0
- yaml=0.2.5=h8ffe710_2
- zict=2.2.0=pyhd8ed1ab_0
- zlib=1.2.13=hcfcfb64_4
- zstd=1.5.2=h12be248_6
- pip:
- dfm-tools==0.10.3
- hydrolib-core==0.4.2
- lxml==4.9.2
- meshkernel==2.0.0
- pydantic==1.10.4
prefix: C:\Users\arey\Anaconda3\envs\dfm_tools_23

8
pyextremes/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.10 (pyextremes)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/pyextremes.iml" filepath="$PROJECT_DIR$/.idea/pyextremes.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_SCI_VIEW" value="true" />
<option name="PY_SCI_VIEW_SUGGESTED" value="true" />
</component>
</project>

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="renderExternalDocumentation" value="true" />
</component>
</module>

6
pyextremes/.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

144
pyextremes/WL_Extremes.py Normal file
View File

@ -0,0 +1,144 @@
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 27 10:12:19 2023
@author: usaied
"""
import datetime
#import calendar
#import math
#import re
import pandas as pd
#import seaborn as sns
import matplotlib.dates as mdates
#import dateutil
import numpy as np
import matplotlib.pyplot as plt
#import statsmodels.api as sm
#from statsmodels.tsa.statespace.sarimax import SARIMAX
#from statsmodels.tsa.seasonal import STL
#import sklearn
import scipy
#pip install pyextremes
if __name__ == '__main__':
from pyextremes import get_extremes, get_return_periods
from pyextremes import EVA
from pyextremes.plotting import plot_extremes
# Read Data and Convert Datetime string to Datetime object
d_parser = lambda x: datetime.datetime.strptime(x,'%Y-%m-%d %H:%M')
WL_hourly = pd.read_csv("//srv-oak3.baird.com/Projects/13471.101 Ontario Place Therme/05_Analyses/37_EVA PyExtremes/input_WL.csv", skiprows = 16, parse_dates = ['Datetime'], date_parser = d_parser)
#%% Fix for multiprocessing pool with PyCharm
__file__ = 'WL_Extremes.py'
#%% =========Data Processing and preparation================
# Delete rows with no water level values
WL_hourly = WL_hourly.drop(WL_hourly[(WL_hourly.WL < -900)].index)
WL_hourly.dropna() # drop nan values
# Reset index to Datetime
WL_hourly.set_index('Datetime', inplace=True)
# Hourly resample with linear interpolation
WL_hourly = WL_hourly.asfreq('h') # This will generate missing values
WL_hourly = WL_hourly.interpolate(method = 'linear') #Linear interpolation
# Drop rows from extended WL dataframe
StartDate = WL_hourly.index.min()
EndDate = WL_hourly.index.max()
Length_of_record = (EndDate-StartDate).days/365.2425 # in years
#WL_hr_POR = WL_hourly.drop(WL_hourly[(WL_hourly.index < StartDate) | (WL_hourly.index > EndDate)].index)
#df1['WL'] = WL_hr_POR['WL']
# Resample for Monthly average water levels
# Resample with monthly freq.
WaterLevels_MM = WL_hourly.resample('M').mean() # Monthly resample
WaterLevels_MM = WaterLevels_MM.interpolate(method='linear')
WaterLevels_MM.rename(columns = {'WL':'WL_MM'}, inplace = True)
##### Gaussian Kernel - Develop a continous variable for EVA ###
Length = 30 # 30 Days average
x = np.array(WL_hourly['WL'])
y_static = scipy.ndimage.gaussian_filter(x, sigma=Length*6, order=0) # Note the Gaussian kernel is applied applied over 4 Sigma by default - Therfore multiplying Length (days) by 24hr/4
WL_hourly['Static_WL'] = y_static
WL_hourly['Surge'] = WL_hourly['WL'] - WL_hourly['Static_WL']
Max_Surge = WL_hourly['Surge'].max()
###### Peak Over Threshold - Surge Analysis #####
Threshold = 0.15 # Surge threshold
Storm_Duration = "48H" # Hours
if __name__ == '__main__':
Surge = get_extremes(WL_hourly.Surge, "POT", threshold = 0.14, r="48H")
model = EVA(WL_hourly.Surge)
model.get_extremes(method = "POT", threshold = 0.14, r="48H")
model.plot_extremes()
#Fit Distribution: By default the distribution is selected automatically as best between
#'genextreme' (GEV) and 'gumbel_r' for 'BM' extremes and
#'genpareto' and 'expon' for 'POT' extremes.
#Best distribution is selected using the AIC metric.
#model.fit_model(model='MLE', distribution='genpareto')
if __name__ == '__main__':
model.fit_model()
RP_Surge = model.get_summary(return_period=[2, 5, 10, 25, 50, 100, 200, 500, 1000], alpha=0.95, n_samples=500)
fig, ax = model.plot_diagnostic(alpha=0.95)
# plt.savefig('Surge_EVA.png')
plt.show()
#%%
######## Annual Maxima EVA (BM) Smoothed Static WL Signal ####
Static_WL = get_extremes(WL_hourly.Static_WL, "BM", block_size="365.2425D")
model = EVA(WL_hourly.Static_WL)
model.get_extremes(method = "BM", block_size="365.2425D")
model.plot_extremes()
model.fit_model()
RP_Static = model.get_summary(return_period=[2, 5, 10, 25, 50, 100, 200, 500, 1000], alpha=0.95, n_samples=1000)
fig, ax = model.plot_diagnostic(alpha=0.95)
plt.savefig('Static_EVA.png')
######## Annual Maxima EVA (BM) Hourly WL Signal ####
Combined_WL = get_extremes(WL_hourly.WL, "BM", block_size="365.2425D")
model = EVA(WL_hourly.WL)
model.get_extremes(method = "BM", block_size="365.2425D")
model.plot_extremes()
model.fit_model()
RP_Combined = model.get_summary(return_period=[2, 5, 10, 25, 50, 100, 200, 500, 1000], alpha=0.95, n_samples=1000)
fig, ax = model.plot_diagnostic(alpha=0.95)
plt.savefig('Combined_EVA.png')
# Time Series Plot
fig, ax = plt.subplots(2, figsize=(30,10), dpi=400)
ax[0].plot(WL_hourly.index, WL_hourly['WL'], linewidth=0.5, label='Water Level (m IGLD85)')
ax[0].plot(WL_hourly.index, WL_hourly['Static_WL'], linewidth=1.5, color='black', label='Static Water Level (m IGLD85)')
ax[0].legend()
ax[0].xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
ax[0].xaxis.set_major_locator(mdates.YearLocator(1, month=1, day=1))
ax[0].set_xlim([StartDate, EndDate])
ax[0].set_ylabel("Water Level (m IGLD85)", fontsize=10)
ax[0].grid(color = 'grey', linestyle = '--', linewidth = 0.5)
ax[1].plot(WL_hourly.index, WL_hourly['Surge'], linewidth=0.5, label='Surge (m)')
ax[1].legend()
ax[1].xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
ax[1].xaxis.set_major_locator(mdates.YearLocator(1, month=1, day=1))
ax[1].set_xlim([StartDate, EndDate])
ax[1].set_ylabel("Surge (m)", fontsize=10)
ax[1].grid(color = 'grey', linestyle = '--', linewidth = 0.5)
plt.gca().xaxis.set_major_formatter(mdates.DateFormatter('%Y'))
plt.gca().xaxis.set_major_locator(mdates.YearLocator(1, month=1, day=1))
plt.tight_layout()
plt.savefig('Water_Level_TS.png')

View File

@ -16,6 +16,8 @@ import shapely.wkt
import re import re
import urllib.parse
#%% Setup NWS ID #%% Setup NWS ID
nws = nwswx.WxAPI('api@alexanderrey.ca') nws = nwswx.WxAPI('api@alexanderrey.ca')
@ -154,13 +156,16 @@ for alertIDX in nws_alert_gdf.index:
else: else:
alertEndTime = nws_alert_gdf.loc[alertIDX, 'ends'] alertEndTime = nws_alert_gdf.loc[alertIDX, 'ends']
alertURL = 'https://alerts-v2.weather.gov/#/?id=' + \
urllib.parse.quote(nws_alert_gdf.loc[alertIDX, 'id'])
alertString = alertString + '[' + nws_alert_gdf.loc[alertIDX, 'headline'] + '}' \ alertString = alertString + '[' + nws_alert_gdf.loc[alertIDX, 'headline'] + '}' \
'{' + alertDescription + '}' + \ '{' + alertDescription + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'areaDesc'] + '}' + \ '{' + nws_alert_gdf.loc[alertIDX, 'areaDesc'] + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'onset'] + '}' + \ '{' + nws_alert_gdf.loc[alertIDX, 'onset'] + '}' + \
'{' + alertEndTime + '}' + \ '{' + alertEndTime + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'severity'] + '}' + \ '{' + nws_alert_gdf.loc[alertIDX, 'severity'] + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, '@id'] + ']' '{' + alertURL + ']'
alertData_1D[alertPoint] = alertString alertData_1D[alertPoint] = alertString

View File

@ -0,0 +1,186 @@
name: rasterProcess2
channels:
- conda-forge
dependencies:
- appdirs=1.4.4=pyh9f0ad1d_0
- attrs=22.2.0=pyh71513ae_0
- blosc=1.21.3=hdccc3a2_0
- bokeh=2.4.3=pyhd8ed1ab_3
- boost-cpp=1.78.0=h9f4b32c_1
- bottleneck=1.3.5=py38hbaf524b_1
- branca=0.6.0=pyhd8ed1ab_0
- brotli=1.0.9=hcfcfb64_8
- brotli-bin=1.0.9=hcfcfb64_8
- brotlipy=0.7.0=py38h91455d4_1005
- bzip2=1.0.8=h8ffe710_4
- ca-certificates=2022.12.7=h5b45459_0
- cairo=1.16.0=hd694305_1014
- cffi=1.15.1=py38h57701bc_3
- cfitsio=4.2.0=h9ebe7e4_0
- cftime=1.6.2=py38hbaf524b_1
- charset-normalizer=2.1.1=pyhd8ed1ab_0
- click=8.1.3=win_pyhd8ed1ab_2
- click-plugins=1.1.1=py_0
- cligj=0.7.2=pyhd8ed1ab_1
- cloudpickle=2.2.0=pyhd8ed1ab_0
- colorama=0.4.6=pyhd8ed1ab_0
- contourpy=1.0.7=py38hb1fd069_0
- cryptography=39.0.0=py38h95f5157_0
- curl=7.87.0=h68f0423_0
- cycler=0.11.0=pyhd8ed1ab_0
- cytoolz=0.12.0=py38h91455d4_1
- dask=2023.1.0=pyhd8ed1ab_0
- dask-core=2023.1.0=pyhd8ed1ab_0
- distributed=2023.1.0=pyhd8ed1ab_0
- expat=2.5.0=h1537add_0
- fiona=1.8.22=py38hc97cbf3_5
- folium=0.14.0=pyhd8ed1ab_0
- font-ttf-dejavu-sans-mono=2.37=hab24e00_0
- font-ttf-inconsolata=3.000=h77eed37_0
- font-ttf-source-code-pro=2.038=h77eed37_0
- font-ttf-ubuntu=0.83=hab24e00_0
- fontconfig=2.14.1=hbde0cde_0
- fonts-conda-ecosystem=1=0
- fonts-conda-forge=1=0
- fonttools=4.38.0=py38h91455d4_1
- freetype=2.12.1=h546665d_1
- freexl=1.0.6=h67ca5e6_1
- fsspec=2022.11.0=pyhd8ed1ab_0
- gdal=3.6.2=py38h5a6f081_3
- geopandas=0.12.2=pyhd8ed1ab_0
- geopandas-base=0.12.2=pyha770c72_0
- geos=3.11.1=h1537add_0
- geotiff=1.7.1=h720ab47_5
- gettext=0.21.1=h5728263_0
- hdf4=4.2.15=h1b1b6ef_5
- hdf5=1.12.2=nompi_h57737ce_101
- heapdict=1.0.1=py_0
- icu=70.1=h0e60522_0
- idna=3.4=pyhd8ed1ab_0
- intel-openmp=2023.0.0=h57928b3_25922
- jinja2=3.1.2=pyhd8ed1ab_1
- joblib=1.2.0=pyhd8ed1ab_0
- jpeg=9e=h8ffe710_2
- kealib=1.5.0=h61be68b_0
- kiwisolver=1.4.4=py38hb1fd069_1
- krb5=1.20.1=heb0366b_0
- lcms2=2.14=ha5c8aab_1
- lerc=4.0.0=h63175ca_0
- libaec=1.0.6=h63175ca_1
- libblas=3.9.0=16_win64_mkl
- libbrotlicommon=1.0.9=hcfcfb64_8
3ibbrotlidec=1.0.9=hcfcfb64_8
- libbrotlienc=1.0.9=hcfcfb64_8
- libcblas=3.9.0=16_win64_mkl
- libcurl=7.87.0=h68f0423_0
- libdeflate=1.17=hcfcfb64_0
- libffi=3.4.2=h8ffe710_5
- libgdal=3.6.2=h060c9ed_3
- libglib=2.74.1=he8f3873_1
- libhwloc=2.8.0=h039e092_1
- libiconv=1.17=h8ffe710_0
- libjpeg-turbo=2.1.4=hcfcfb64_0
- libkml=1.3.0=hf2ab4e4_1015
- liblapack=3.9.0=16_win64_mkl
- libnetcdf=4.8.1=nompi_h8c042bf_106
- libpng=1.6.39=h19919ed_0
- libpq=15.1=ha9684e8_3
- librttopo=1.1.0=he22b5cd_12
- libspatialindex=1.9.3=h39d44d4_4
- libspatialite=5.0.1=h07bf483_22
- libsqlite=3.40.0=hcfcfb64_0
- libssh2=1.10.0=h9a1e1f7_3
- libtiff=4.5.0=hf8721a0_2
- libwebp-base=1.2.4=h8ffe710_0
- libxcb=1.13=hcd874cb_1004
- libxml2=2.10.3=hc3477c8_1
- libzip=1.9.2=h519de47_1
- libzlib=1.2.13=hcfcfb64_4
- locket=1.0.0=pyhd8ed1ab_0
- lz4=4.2.0=py38hb6d8784_0
- lz4-c=1.9.3=h8ffe710_1
- m2w64-gcc-libgfortran=5.3.0=6
- m2w64-gcc-libs=5.3.0=7
- m2w64-gcc-libs-core=5.3.0=7
- m2w64-gmp=6.1.0=2
- m2w64-libwinpthread-git=5.0.0.4634.697f757=2
- mapclassify=2.5.0=pyhd8ed1ab_1
- markupsafe=2.1.1=py38h91455d4_2
- matplotlib-base=3.6.2=py38h528a6c7_0
- mkl=2022.1.0=h6a75c08_874
- msgpack-python=1.0.4=py38hb1fd069_1
- msys2-conda-epoch=20160418=1
- munch=2.5.0=py_0
- munkres=1.1.4=pyh9f0ad1d_0
- netcdf4=1.6.2=nompi_py38h78680c8_100
- networkx=3.0=pyhd8ed1ab_0
- numpy=1.24.1=py38h90ce339_0
- openjpeg=2.5.0=ha2aaf27_2
- openssl=3.0.7=hcfcfb64_1
- packaging=23.0=pyhd8ed1ab_0
- pandas=1.5.2=py38h5846ac1_2
- partd=1.3.0=pyhd8ed1ab_0
- pcre2=10.40=h17e33f8_0
- pillow=9.4.0=py38h409c3de_0
- pip=22.3.1=pyhd8ed1ab_0
- pixman=0.40.0=h8ffe710_0
- pooch=1.6.0=pyhd8ed1ab_0
- poppler=22.12.0=h183ae7b_1
- poppler-data=0.4.11=hd8ed1ab_0
- postgresql=15.1=hd87cd2b_3
- proj=9.1.0=heca977f_1
- psutil=5.9.4=py38h91455d4_0
- pthread-stubs=0.4=hcd874cb_1001
- pthreads-win32=2.9.1=hfa6e2cd_3
- pycparser=2.21=pyhd8ed1ab_0
- pyopenssl=23.0.0=pyhd8ed1ab_0
- pyparsing=3.0.9=pyhd8ed1ab_0
- pyproj=3.4.1=py38hac5b721_0
- pysocks=1.7.1=pyh0701188_6
- python=3.8.15=h4de0772_0_cpython
- python-dateutil=2.8.2=pyhd8ed1ab_0
- python_abi=3.8=3_cp38
- pytz=2022.7.1=pyhd8ed1ab_0
- pyyaml=6.0=py38h91455d4_5
- requests=2.28.2=pyhd8ed1ab_0
- rtree=1.0.1=py38h8b54edf_1
- scikit-learn=1.2.0=py38h69724d7_0
- scipy=1.10.0=py38h0f6ee2a_0
- setuptools=66.0.0=pyhd8ed1ab_0
- shapely=2.0.0=py38h9c0aba1_0
- six=1.16.0=pyh6c4a22f_0
- snappy=1.1.9=hfb803bf_2
- sortedcontainers=2.4.0=pyhd8ed1ab_0
- sqlite=3.40.0=hcfcfb64_0
- tbb=2021.7.0=h91493d7_1
- tblib=1.7.0=pyhd8ed1ab_0
- threadpoolctl=3.1.0=pyh8a188c0_0
- tiledb=2.13.2=h3132609_0
- tk=8.6.12=h8ffe710_0
- toolz=0.12.0=pyhd8ed1ab_0
- tornado=6.2=py38h91455d4_1
- typing_extensions=4.4.0=pyha770c72_0
- ucrt=10.0.22621.0=h57928b3_0
- unicodedata2=15.0.0=py38h91455d4_0
- urllib3=1.26.14=pyhd8ed1ab_0
- vc=14.3=hb6edc58_10
- vs2015_runtime=14.34.31931=h4c5c07a_10
- wheel=0.38.4=pyhd8ed1ab_0
- win_inet_pton=1.1.0=pyhd8ed1ab_6
- xarray=2022.12.0=pyhd8ed1ab_0
- xerces-c=3.2.4=h63175ca_1
- xorg-libxau=1.0.9=hcd874cb_0
- xorg-libxdmcp=1.1.3=hcd874cb_0
- xyzservices=2022.9.0=pyhd8ed1ab_0
- xz=5.2.6=h8d14728_0
- yaml=0.2.5=h8ffe710_2
- zict=2.2.0=pyhd8ed1ab_0
- zlib=1.2.13=hcfcfb64_4
- zstd=1.5.2=h7755175_4
- pip:
- capparselib==0.6.6
- certifi==2022.12.7
- h3==4.0.0b2
- lxml==4.6.3
- nwswx==0.1.1
prefix: C:\Users\arey\Anaconda3\envs\rasterProcess2