Mustique and NTC

This commit is contained in:
Alexander Rey 2022-04-07 09:47:53 -04:00
parent 8e480f8928
commit f80d01a5e7
13 changed files with 810 additions and 917 deletions

File diff suppressed because one or more lines are too long

View File

@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4"> <module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager"> <component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" /> <content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" /> <orderEntry type="jdk" jdkName="Python 3.8 (geopandas_forge_mustique)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
</component> </component>
<component name="PyDocumentationSettings"> <component name="PyDocumentationSettings">

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?> <?xml version="1.0" encoding="UTF-8"?>
<project version="4"> <project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (BairdBase)" project-jdk-type="Python SDK" /> <component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (geopandas_forge_mustique)" project-jdk-type="Python SDK" />
</project> </project>

View File

@ -0,0 +1,284 @@
## Python Script to Process EWR Bathymetry Data
# AJMR: March 2022
import pandas as pd
import geopandas as gp
import matplotlib.pyplot as plt
import contextily as ctx
import numpy as np
from shapely import geometry, ops
#%% Read in and process centerline shapefile
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
river_centerline = gp.read_file('//srv-ott3/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_merge_UTMZ15.shp')
river_centerlineExploded = river_centerline.explode(ignore_index=True)
tempMulti = river_centerlineExploded.iloc[[4, 3, 1, 2], 1]
# Put the sub-line coordinates into a list of sublists
outcoords = [list(i.coords) for i in tempMulti]
# Flatten the list of sublists and use it to make a new line
river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
# Interpolate the centerline to every 25 m
distance_delta = 25
distances = np.arange(0, river_centerline_merge.length, distance_delta)
points = [river_centerline_merge.interpolate(distance) for distance in distances] + [river_centerline_merge.boundary[1]]
# multipoint = ops.unary_union(points)
interpolated_centerline = geometry.LineString(points)
# Turn the linestring into a geodataframe and add distance
# river_centerline_merge_gpd2 =\
# gp.GeoDataFrame(geometry=gp.points_from_xy(
# river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
#
# river_centerline_merge_gpd2['Distance'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
# river_centerline_merge_gpd2.loc[0, 'Distance'] = 0
#
# river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['Distance'].cumsum()
# Turn interpolated inestring into a geodataframe and add distance
river_centerline_merge_gpd3 = \
gp.GeoDataFrame(geometry=gp.points_from_xy(
interpolated_centerline.xy[0], interpolated_centerline.xy[1]), crs="EPSG:32615")
river_centerline_merge_gpd3['Distance'] = river_centerline_merge_gpd3.distance(river_centerline_merge_gpd3.shift(1))
river_centerline_merge_gpd3.loc[0, 'Distance'] = 0
river_centerline_merge_gpd3['RiverKM'] = river_centerline_merge_gpd3['Distance'].cumsum()
#%% Read in the processed waterline shapefile
river_edges = gp.read_file('//srv-ott3/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/WaterlineToLines_Grass_Simple5m_RevB.shp')
# Interpolate the edges to every 10 m
distance_delta = 10
distances = np.arange(0, river_edges.geometry[0].length, distance_delta)
points = [river_edges.geometry[0].interpolate(distance) for distance in distances] + [river_edges.geometry[0].boundary[1]]
# multipoint = ops.unary_union(points)
interpolated_edges = geometry.LineString(points)
# Create geodataframes for south edge
river_edges_south = \
gp.GeoDataFrame(geometry=gp.points_from_xy(
interpolated_edges.xy[0], interpolated_edges.xy[1]), crs="EPSG:32615")
river_edges_south['SouthBoundary'] = river_edges_south['geometry']
# Interpolate the edges to every 10 m North
distances = np.arange(0, river_edges.geometry[1].length, distance_delta)
points = [river_edges.geometry[1].interpolate(distance) for distance in distances] + [river_edges.geometry[1].boundary[1]]
# multipoint = ops.unary_union(points)
interpolated_edges = geometry.LineString(points)
# Create geodataframes for North edge
river_edges_north = \
gp.GeoDataFrame(geometry=gp.points_from_xy(
interpolated_edges.xy[0], interpolated_edges.xy[1]), crs="EPSG:32615")
river_edges_north['NorthBoundary'] = river_edges_north['geometry']
# river_edges_south = \
# gp.GeoDataFrame(geometry=gp.points_from_xy(
# river_edges.loc[0, 'geometry'].xy[0], river_edges.loc[0, 'geometry'].xy[1]), crs="EPSG:32615")
# river_edges_south['SouthBoundary'] = river_edges_south['geometry']
# river_edges_north = \
# gp.GeoDataFrame(geometry=gp.points_from_xy(
# river_edges.loc[1, 'geometry'].xy[0], river_edges.loc[1, 'geometry'].xy[1]), crs="EPSG:32615")
# river_edges_north['NorthBoundary'] = river_edges_north['geometry']
#%% Plot the centerline and the waterline
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(20, 14))
fig.patch.set_facecolor('white')
axes.set_xlim(350000, 520000)
axes.set_ylim(5510000, 5580000)
axes.set_xlim(505438, 508368)
axes.set_ylim(5518465, 5520152)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
# river_centerlineExploded.loc[[0], 'geometry'].plot(ax=axes, color='tab:blue')
# river_centerlineExploded.loc[[1], 'geometry'].plot(ax=axes, color='tab:orange')
# river_centerlineExploded.loc[[2], 'geometry'].plot(ax=axes, color='tab:green')
# river_centerlineExploded.loc[[3], 'geometry'].plot(ax=axes, color='tab:red')
# river_centerlineExploded.loc[[4], 'geometry'].plot(ax=axes, color='tab:purple')
river_edges.loc[[0], 'geometry'].plot(ax=axes, color='tab:red', linestyle='-')
river_edges.loc[[1], 'geometry'].plot(ax=axes, color='tab:orange', linestyle='-')
#
# river_centerline_merge_gpd.plot(ax=axes, marker='s')
#
# river_centerline_merge_gpd2.plot(ax=axes, column='RiverKM', marker='o')
#
river_centerline_merge_gpd3.plot(ax=axes, column='RiverKM', marker='^')
plt.show()
#%% Find the closest point on the centerline to the North and South waterlines
river_centerline_Bounds = river_centerline_merge_gpd3.sjoin_nearest(river_edges_south, how='left', distance_col='South_Distance')
# Remove Index Right Column
river_centerline_Bounds = river_centerline_Bounds.drop(['index_right'], axis=1)
river_centerline_Bounds = river_centerline_Bounds.sjoin_nearest(river_edges_north, how='left', distance_col='North_Distance')
river_centerline_Bounds = river_centerline_Bounds.drop(['index_right'], axis=1)
#%% Add points to cenetrline gdf at 10,10,90% of the distance between the two waterline points through the centerline
# Take x and y for south distances from centerline
for t_percent in range(10, 100, 10):
transect_name = 'transectSouth_' + str(t_percent)
river_centerline_Bounds[transect_name] = gp.points_from_xy((river_centerline_Bounds.geometry.x -
river_centerline_Bounds['SouthBoundary'].x) * t_percent * 0.01 + river_centerline_Bounds['SouthBoundary'].x,
(river_centerline_Bounds.geometry.y -
river_centerline_Bounds['SouthBoundary'].y) * t_percent * 0.01 + river_centerline_Bounds['SouthBoundary'].y,
crs="EPSG:32615")
# Take x and y for north distances from centerline
for t_percent in range(10, 100, 10):
transect_name = 'transectNorth_' + str(t_percent)
river_centerline_Bounds[transect_name] = gp.points_from_xy((river_centerline_Bounds.geometry.x -
river_centerline_Bounds['NorthBoundary'].x) * t_percent * 0.01 + river_centerline_Bounds['NorthBoundary'].x,
(river_centerline_Bounds.geometry.y -
river_centerline_Bounds['NorthBoundary'].y) * t_percent * 0.01 + river_centerline_Bounds['NorthBoundary'].y,
crs="EPSG:32615")
#%% Plotting Check
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(20, 14))
fig.patch.set_facecolor('white')
axes.set_xlim(505438, 508368)
axes.set_ylim(5518465, 5520152)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
for t_percent in range(10, 100, 10):
transect_name = 'transectSouth_' + str(t_percent)
river_centerline_Bounds.loc[1:500, transect_name].plot(ax=axes)
transect_name = 'transectNorth_' + str(t_percent)
river_centerline_Bounds.loc[1:500, transect_name].plot(ax=axes)
river_edges.loc[[0], 'geometry'].plot(ax=axes, color='k', linestyle='-')
river_edges.loc[[1], 'geometry'].plot(ax=axes, color='k', linestyle='-')
river_centerline_merge_gpd.plot(ax=axes, color='k')
river_centerline_merge_gpd3.plot(ax=axes, column='RiverKM', marker='^', color='k')
plt.show()
#%% Read in bathymetry observations
bathy_obs = pd.read_csv('//oak-inundation/D/12828.101 EWR_Delft3FM_MTB/Bathy/combined_ZeroRemoved_RevC.xyz',
delim_whitespace=True, header=None, names=['x', 'y', 'z'])
# Create geodataframe
bathy_obs_gdf = gp.GeoDataFrame(bathy_obs, geometry=gp.points_from_xy(
bathy_obs.x, bathy_obs.y), crs="EPSG:32615")
bathy_obs_gdf = bathy_obs_gdf.drop(['x', 'y'], axis=1)
#%% Find the closest point (to a 50 m max) on the transects to observations
river_transect_bathy = dict()
# Does not include 0% or 100%
for t_percent in range(10, 100, 10):
# Set geometry to specific transectfor merge
transect_name = 'transectSouth_' + str(t_percent)
transect_merge_tmp = river_centerline_Bounds.loc[:, ['RiverKM', transect_name]]
transect_merge_tmp.set_geometry(transect_name, inplace=True)
# Create buffered polygons around transect lines
transect_buffer = geometry.LineString(transect_merge_tmp.geometry).buffer(2.5, resolution=8)
transect_buffer_gdf = gp.GeoDataFrame(geometry=[transect_buffer], crs="EPSG:32615")
# Find observations within buffered polygons
bathy_obs_gdf_tmp = bathy_obs_gdf.sjoin(transect_buffer_gdf, how='inner', predicate='within')
bathy_obs_gdf_tmp = bathy_obs_gdf_tmp.drop(['index_right'], axis=1)
river_transect_bathy[transect_name] = transect_merge_tmp.sjoin_nearest(bathy_obs_gdf_tmp, how='left', max_distance=50)
river_transect_bathy[transect_name] = river_transect_bathy[transect_name].drop(['index_right'], axis=1)
bathy_obs_gdf_tmp = []
# # Rename bathy column and drop index
# river_centerline_Bounds.rename(columns={'z': transect_name + '_z'}, inplace=True)
# river_centerline_Bounds = river_centerline_Bounds.drop(['index_right'], axis=1)
# Repeat for North transect
transect_name = 'transectNorth_' + str(t_percent)
transect_merge_tmp = river_centerline_Bounds.loc[:, ['RiverKM', transect_name]]
transect_merge_tmp.set_geometry(transect_name, inplace=True)
transect_buffer = geometry.LineString(transect_merge_tmp.geometry).buffer(2.5, resolution=8)
transect_buffer_gdf = gp.GeoDataFrame(geometry=[transect_buffer], crs="EPSG:32615")
bathy_obs_gdf_tmp = bathy_obs_gdf.sjoin(transect_buffer_gdf, how='inner', predicate='within')
bathy_obs_gdf_tmp = bathy_obs_gdf_tmp.drop(['index_right'], axis=1)
river_transect_bathy[transect_name] = transect_merge_tmp.sjoin_nearest(bathy_obs_gdf_tmp, how='left', max_distance=50)
river_transect_bathy[transect_name] = river_transect_bathy[transect_name].drop(['index_right'], axis=1)
bathy_obs_gdf_tmp = []
# # Rename bathy column and drop index
# river_centerline_Bounds.rename(columns={'z': transect_name + '_z'}, inplace=True)
# river_centerline_Bounds = river_centerline_Bounds.drop(['index_right'], axis=1)
print(t_percent)
#%% Plotting Check
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(20, 14))
fig.patch.set_facecolor('white')
axes.set_xlim(505438, 508368)
axes.set_ylim(5518465, 5520152)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
bathy_obs_gdf.loc[:, :].plot(ax=axes, column='z', vmin=320, vmax=350, marker='^')
for t_percent in range(10, 100, 10):
transect_name = 'transectSouth_' + str(t_percent)
river_transect_bathy[transect_name].loc[1:500, :].plot(ax=axes, column='z', vmin=320, vmax=350)
transect_name = 'transectNorth_' + str(t_percent)
river_transect_bathy[transect_name].loc[1:500, :].plot(ax=axes, column='z', vmin=320, vmax=350)
plt.show()
#%% Merge the lines to 1D profile and save as csv
bathy_FileName = 'C:/Users/arey/files/Projects/Grassy Narrows/Bathymetry/AJMR_Interpolated_bathy_25m.xyz'
shape_FileName = 'C:/Users/arey/files/Projects/Grassy Narrows/Bathymetry/'
# bathy_FileName = '//oak-inundation/D/12828.101 EWR_Delft3FM_MTB/Bathy/AJMR_Interpolated_bathy_10m.xyz'
for t_percent in range(10, 100, 10):
transect_name = 'transectSouth_' + str(t_percent)
# Convert to numpy array
bathyNP = np.vstack((river_transect_bathy[transect_name].geometry.x.values,
river_transect_bathy[transect_name].geometry.y.values,
river_transect_bathy[transect_name].z.values))
# Transpose
bathyNP = bathyNP.T
# Save using pandas
pd.DataFrame(bathyNP[~np.isnan(bathyNP[:, 2]), :]).to_csv(bathy_FileName, sep=' ', header=False, index=False, mode='a')
# Write to shape
# river_transect_bathy[transect_name].to_file(shape_FileName + transect_name + '.shp')
# Repeat for North transect
transect_name = 'transectNorth_' + str(t_percent)
bathyNP = np.vstack((river_transect_bathy[transect_name].geometry.x.values,
river_transect_bathy[transect_name].geometry.y.values,
river_transect_bathy[transect_name].z.values))
bathyNP = bathyNP.T
pd.DataFrame(bathyNP[~np.isnan(bathyNP[:, 2]), :]).to_csv(bathy_FileName, sep=' ', header=False, index=False, mode='a')
# river_transect_bathy[transect_name].to_file(shape_FileName + transect_name)

View File

@ -27,6 +27,7 @@ runLog = pd.read_excel(pth.as_posix(), "ModelLog")
# %% Read Map Model Results # %% Read Map Model Results
modelPlot = [8, 10, 11, 12, 13, 14] modelPlot = [8, 10, 11, 12, 13, 14]
modelPlot = [15, 16, 17] modelPlot = [15, 16, 17]
modelPlot = [18]
dfs_list = [None] * (max(modelPlot) + 1) dfs_list = [None] * (max(modelPlot) + 1)
ds_list = [None] * (max(modelPlot) + 1) ds_list = [None] * (max(modelPlot) + 1)
@ -285,7 +286,7 @@ fig.savefig('//srv-ott3.baird.com/Projects/13539.101 L\'Ansecoy Bay, Mustique/06
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw' mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckex9vtri0o6619p55sl5qiyv/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
x, y, arrow_length = 0.93, 0.95, 0.12 x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12) fontprops = fm.FontProperties(size=12)
modelPlot = [15, 16, 17] modelPlot = [18]
tStepsPlot = np.zeros((5, 6)) tStepsPlot = np.zeros((5, 6))
tStepsPlot[0, :] = range(110, 116) tStepsPlot[0, :] = range(110, 116)
@ -301,7 +302,7 @@ for m in modelPlot:
if m == 8 or m == 17: if m == 8 or m == 17:
disPlot = 7 disPlot = 7
elif m == 11 or m == 15: elif m == 11 or m == 15 or m == 18:
disPlot = 13 disPlot = 13
elif m == 14 or m == 16: elif m == 14 or m == 16:
disPlot = 8 disPlot = 8

View File

@ -46,23 +46,35 @@ def find_time_var(var, time_basename='time'):
importPaths = ['C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Great House', importPaths = ['C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Great House',
'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Greensleeves', 'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Greensleeves',
'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Old Queens Fort'] 'C:/Users/arey/files/Projects/West Coast/Monitor_Feb/Old Queens Fort',
'C:/Users/arey/files/Projects/West Coast/18_03_2022/Great House',
'C:/Users/arey/files/Projects/West Coast/18_03_2022/Greensleeves',
'C:/Users/arey/files/Projects/West Coast/18_03_2022/Old Queens Fort']
siteNames = ['Great House', siteNames = ['Great House',
'Greensleeves',
'Old Queens Fort',
'Great House',
'Greensleeves', 'Greensleeves',
'Old Queens Fort'] 'Old Queens Fort']
timeLabels= ['February', timeLabels= ['February',
'February', 'February',
'February'] 'February',
'March',
'March',
'March']
wave_bts_file = [ wave_bts_file = [
None,
None,
None,
None, None,
None, None,
None] None]
for s in range(0, 3): for s in range(0, 6):
## Define master import path ## Define master import path
importPath = importPaths[s] importPath = importPaths[s]
siteName = siteNames[s] siteName = siteNames[s]
@ -74,10 +86,14 @@ for s in range(0, 3):
GPS_File = None GPS_File = None
for i in range(0, len(importFiles)): for i in range(0, len(importFiles)):
if '.csv' in importFiles[i] and 'Summary' not in importFiles[i]: if '.csv' in importFiles[i] and 'Summary' not in importFiles[i] and 'export' not in importFiles[i]:
OBS_File = importFiles[i] OBS_File = importFiles[i]
elif '.csv' in importFiles[i] and 'export' in importFiles[i]:
GPS_File = importFiles[i]
GPS_Type = 'CSV'
elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]: elif '.xls' in importFiles[i] and 'Summary' not in importFiles[i]:
GPS_File = importFiles[i] GPS_File = importFiles[i]
GPS_Type = 'xls'
#%% Obs Import Data #%% Obs Import Data
if OBS_File is not None: if OBS_File is not None:
@ -86,19 +102,35 @@ for s in range(0, 3):
# Drop rows with metadata # Drop rows with metadata
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()] Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone # Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC') if s < 3:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
else:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
# Set Index # Set Index
Obs_dat.set_index('DateTime', inplace=True) Obs_dat.set_index('DateTime', inplace=True)
#%% GPS Import Data #%% GPS Import Data
if GPS_File is not None: if GPS_File is not None:
GPS = pd.read_excel(importPath + '/' + GPS_File, header=0) if GPS_Type == 'CSV':
#convert GPS data to geodataframe GPS = pd.read_csv(importPath + '/' + GPS_File,
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326")) header=None, names=['Index', 'Date1', 'Time1', 'Date2', 'Time2', 'Northing', 'North', 'Easting', 'East', 'Var1', 'Var2'])
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC') GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
elif GPS_Type == 'xls':
GPS = pd.read_excel(importPath + '/' + GPS_File, header=0)
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(GPS.x, GPS.y, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['time']).dt.tz_localize('UTC')
# Set Datetime as index
GPS_gdf.set_index('DateTime', inplace=True) GPS_gdf.set_index('DateTime', inplace=True)
# Sort by time # Sort by time
@ -154,8 +186,8 @@ for s in range(0, 3):
maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1] maxTime = Obs_geo[Obs_geo['inArea']==True].index[-1]
else: else:
# First and last times if no GPS data # First and last times if no GPS data
minTime = Obs_geo.iloc[20, 0] minTime = Obs_geo.index[20]
maxTime = Obs_geo.iloc[-20, 0] maxTime = Obs_geo.index[-20]
metDate = minTime - timedelta( metDate = minTime - timedelta(
hours = minTime.hour % 6, hours = minTime.hour % 6,
@ -288,14 +320,14 @@ for s in range(0, 3):
params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)', params = ['Depth', 'PSU', 'CH8:Oxygen_Saturation(%)', 'CH1:Temperature(degC)',
'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)'] 'CH6:Turbidity(NTU)', 'CH2:Chlorophyll_a(ppb)']
paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ saturation [%]', 'Temperature [degC]', paramName = ['Depth [m]', 'Salinity [PSU]', 'Dissolved O₂ sat [%]', 'Temperature [degC]',
'Turbidity [FTU]', 'Chl-a [ppb]'] 'Turbidity [FTU]', 'Chl-a [ppb]']
parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae] parmCmap = [cmo.deep, 'cividis', cmo.dense, cmo.thermal, cmo.turbid, cmo.algae]
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0] # paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0] # paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
paramMin = [0.0, 34.0, 100, 26.0, 0, 0] paramMin = [0.0, 33.0, 100, 26.0, 0, 0]
paramMax = [1.0, 36.0, 130, 29.0, 50.0, 12000] paramMax = [1.0, 36.0, 130, 30.0, 75.0, 12000]
fig.patch.set_facecolor('white') fig.patch.set_facecolor('white')
@ -362,7 +394,7 @@ for s in range(0, 3):
axes[paramIDX].set_title(paramName[paramIDX]) axes[paramIDX].set_title(paramName[paramIDX])
axes[paramIDX].set_xlabel('') axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX]) axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
axes[paramIDX].legend(loc='upper right') axes[paramIDX].legend(loc='upper left')
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y')) # axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
@ -400,17 +432,22 @@ for s in range(0, 3):
plt.show() plt.show()
dfOut.to_excel(importPath + '/Summary_Stats_' + siteName + '.xlsx') outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
dfOutFormat.to_excel(importPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
dfOut.to_csv(importPath + '/Summary_Stats_' + siteName + '.csv') if not os.path.exists(outPath):
os.mkdir(outPath)
if not os.path.exists(importPath + '/Figures'): dfOut.to_excel(outPath + '/Summary_Stats_' + siteName + '.xlsx')
os.mkdir(importPath + '/Figures') dfOutFormat.to_excel(outPath + '/Summary_StatsFormat_' + siteName + '.xlsx')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf', dfOut.to_csv(outPath + '/Summary_Stats_' + siteName + '.csv')
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.pdf',
bbox_inches='tight') bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryTimeSeries_' + siteName + '.png', fig.savefig(outPath + '/Figures/SummaryTimeSeries_' + siteName + '.png',
bbox_inches='tight', dpi=500) bbox_inches='tight', dpi=500)
@ -476,14 +513,22 @@ for s in range(0, 3):
ha='center', va='center', fontsize=35, ha='center', va='center', fontsize=35,
xycoords=ax[paramIDX].transAxes) xycoords=ax[paramIDX].transAxes)
plt.show() fig.suptitle(siteName + ', ' + minTime.strftime('%b %#d, %Y') + ' (' + timeLabel + ')', fontsize=30)
if not os.path.exists(importPath + '/Figures'):
os.mkdir(importPath + '/Figures')
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.pdf', plt.show()
outPath = '//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/' + timeLabels[s]
if not os.path.exists(outPath):
os.mkdir(outPath)
if not os.path.exists(outPath + '/Figures'):
os.mkdir(outPath + '/Figures')
fig.savefig(outPath + '/Figures/SummaryMap_' + siteName + '.pdf',
bbox_inches='tight') bbox_inches='tight')
fig.savefig(importPath + '/Figures/SummaryMap_' + siteName + '.png', fig.savefig(outPath + '/Figures/SummaryMap_' + siteName + '.png',
bbox_inches='tight', dpi=500) bbox_inches='tight', dpi=500)
#%% Summary Sheet #%% Summary Sheet

0
Mustique/environment.yml Normal file
View File

BIN
NTC_DFM/EFDC_AirTemp.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

BIN
NTC_DFM/EFDC_NorthBound.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 46 KiB

BIN
NTC_DFM/EFDC_Solar_Rad.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 72 KiB

BIN
NTC_DFM/EFDC_SouthBound.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 47 KiB

View File

@ -1,11 +1,11 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
""" """
@author: aforsythe Script to comapre EFDC boundry conditions against observations
Copied from "P:/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/ADCP_Plot_v4.py"
_AJMR
""" """
import noaa_coops as noaa
from solarpy import beam_irradiance, irradiance_on_plane
from datetime import datetime, timedelta
import pandas as pd import pandas as pd
import matplotlib.pyplot as plt import matplotlib.pyplot as plt
@ -22,745 +22,163 @@ import os
from shapely.geometry import Point from shapely.geometry import Point
import pickle import pickle
# %% Read in data
dataPath = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/05_Analyses/07 ADCP/NC_CurrentMeter_All_Phase1_all_data_2012_05_20/'
df = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_mobile_2013_05_02.xlsx', # %% Import NOAA CO-OPS data at the battery
sheet_name='mag_all_mobile') theBattery = noaa.Station(8518750)
# Convert to geodataframe df_air_temperature = theBattery.get_data(
gdf = gp.GeoDataFrame(df, geometry=gp.points_from_xy(df.Longitude, df.Latitude, crs="EPSG:4326")) begin_date="20120101",
end_date="20121231",
product="air_temperature",
units="metric",
time_zone="gmt")
#convert data to CRS of D3D df_air_temperature['DateTime'] = df_air_temperature.index
gdf.geometry = gdf.geometry.to_crs("EPSG:32118") df_air_temperature['DateTime'] = df_air_temperature['DateTime'].dt.tz_localize('UTC')
# Add new cols to geodataframe
gdf['Distance'] = np.zeros([len(gdf), 1])
gdf['DistanceSmth'] = np.zeros([len(gdf), 1])
gdf['dist'] = np.zeros([len(gdf), 1])
gdf['date'] = np.zeros([len(gdf), 1])
# Set Date df_water_temperature = theBattery.get_data(
gdf['date'] = gdf['year'].astype(str) + '-' + gdf['mon'].astype(str).str.zfill(2) + '-' + gdf['day'].astype( begin_date="20120101",
str).str.zfill(2) end_date="20121231",
product="water_temperature",
units="metric",
time_zone="gmt")
# Column names to distances data for plotting df_water_temperature['DateTime'] = df_water_temperature.index
depths1 = gdf.columns[11:43] # ADCP sensor depth reading column names to list df_water_temperature['DateTime'] = df_water_temperature['DateTime'].dt.tz_localize('UTC')
depths = np.array([float(d.replace('m', '')) for d in depths1]) * -1
#loop through all transects # %% Read in EFDC met bounds
# WEATHER DATA FOR TIME SERIES N
transectCount = 0 # TASER(M,N) = TIME WHEN THE DATA WAS COLLECTED, IT IS IN DAY IF TCASER(N) IS 86400.
transects = gdf['transect'].unique() # PATM(M,N) = ATMOSPHERIC [PRESSURE MILLIBAR]
# TDRY(M,N) = DRY BULB ATMOSPHERIC TEMPERATURE [DEGREE C) ISTOPT(2)=1 OR EQUIL TEMP
kernel = Gaussian2DKernel(x_stddev=0.25) # ISTOPT(2)=2; AQ VERSION SETS ISTOPT(2)=1.
# TWET(M,N) = WET BULB ATM TEMP IF IRELH=0, RELATIVE HUMIDITY (BETWEEN 0 AND 1) IF
for transect_id in range(1, 2): #transects: # IRELH=1
# Select a given trasect # RAIN(M,N) = RAIN FALL RATE LENGTH/TIME, IT IS INCH/DAY IF RAINCVT IS SET TO 7.055560e-006
tMask = (gdf['transect']==transect_id) # (=0.0254/3600.)
# Remove rows without locations # EVAP(M,N) = EVAPORATION RATE IF EVAPCVT>0.
tMask[pd.isna(gdf['Latitude'])] = False # SOLSWR(M,N) = SOLAR SHORT WAVE RADIATION AT WATER SURFACE ENERGY FLUX/UNIT AREA.
# IT IS IN W/M2 IF SOLRCVT = 1.
# Find distance betwen points in m # CLOUD(M,N) = FRACTIONAL CLOUD COVER
gdf.loc[tMask, 'Distance'] = gdf[tMask].distance(gdf[tMask].shift()) efdc_air_in = pd.read_csv('//ott-athena/E/INPUT_FILES/BC_WEATHER/20191206/files/NC_bc_weather_2012_191206.inp',
delim_whitespace=True, skiprows=10,
# Many of the points are recorded as being at the same location... names=['TASER', 'PATM', 'TDRY', 'TWET', 'RAIN', 'EVAP', 'SOLSWR', 'CLOUD'])
# Where the distance is zero, set it to half of the following distance
# Find zeros and iloc after zeros
zeroDist = (gdf['Distance'] == 0) & tMask
zeroDistShift = zeroDist.shift()
zeroDistShift.iloc[0] = False
distShift = gdf['Distance'].shift(-1)
distShift.iloc[-1] = False
gdf.loc[tMask, 'DistanceSmth'] = gdf['Distance'][tMask]
# Set zeros to half of following
gdf.loc[zeroDist, 'DistanceSmth'] = distShift[zeroDist]/2
# Set following to half
gdf.loc[zeroDistShift, 'DistanceSmth'] = gdf['Distance']/2
# Set initial to zero
gdf.loc[gdf[tMask].index[0], 'DistanceSmth'] = 0
# If final location is duplicate, set to previous value
if gdf.loc[gdf[tMask].index[-1], 'Distance']==0:
gdf.loc[gdf[tMask].index[-1], 'DistanceSmth'] = gdf.loc[gdf[tMask].index[-2], 'DistanceSmth']
# Get cumulative sum of distances
gdf.loc[tMask, 'dist'] = gdf.loc[tMask, 'DistanceSmth'].cumsum()
# get velocity data all rows columns 11-43
V = gdf.values[tMask, 11:43].astype(float)
# #__________________________________________________________________________________________________________________________
# %% Plotting
if transectCount == 0:
fig, axes = plt.subplots(nrows=5, ncols=4, figsize=(16, 8))
fig.tight_layout(pad=2.5)
ax = axes.flat
colormap = 'jet'
vmin=0
vmax=0.5
VS = sp.ndimage.filters.gaussian_filter(V, [1, 1], mode='nearest')
# vDatEnd = (~np.isnan(V)).cumsum(1).argmax(1).astype(int) + 1
# VS = convolve(V, kernel)
# for i in range(0, len(VS)):
# VS[i, vDatEnd[i]:-1] = np.nan
pltDat = ax[transectCount].pcolormesh(gdf.loc[tMask, 'dist'], depths, np.transpose(VS),
shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
cbar = fig.colorbar(pltDat, ax=ax[transectCount],
shrink=0.95,aspect=5)
cbar.set_label('Magnitude [m/s]')
ax[transectCount].set_xlabel('Distance Along Transect [m]')
ax[transectCount].set_ylabel('Depth below WSL [m]')
stationStart = next((i for i, j in enumerate(tMask) if j), None)
ax[transectCount].set_title(gdf.loc[stationStart, 'station'])
ax[transectCount].set_ylim(-6, 0)
transectCount = transectCount + 1
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Transects221-241.png',
# bbox_inches='tight', dpi=300)
# %% Save Transects
gdf.loc[:, df.columns != 'geometry'].to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/Transects.nc')
# %% Load in moored data
df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
sheet_name='mag_all_moored')
# Shift col names to put second back in
colIN = list(df_moored_data.columns)
colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
df_moored_data.columns = colOUT
### Moored current meter locations from here:
### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
### Table 3-1, PDF page 65
### Locations change between deployment 1-9 and 10/11
df_moored = pd.DataFrame(
{'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM'],
'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM'],
'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33],
'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90],
'bedElev': [-16, -17, -20, -18, -20]})
gdf_moored_loc = gp.GeoDataFrame(
df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
# Loop through Station IDs for deployments 1-9 and assign locations
# for d in range(1,10):
# depMask = df_moored_data['deployment'] == ('dep' + str(d))
# for stationIDX, station in enumerate(df_moored['depOrig']):
# stationMask = (df_moored_data['station'] == station) & depMask
#
# # Assign geography to station plus deployment
# df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
# df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Loop through Station IDs for deployments 10-11 and assign locations
for d in range(1,12):
depMask = df_moored_data['deployment'] == 'dep' + str(d)
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (df_moored_data['station'] == station) & depMask
# Assign geography to station plus deployment
df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Create geodataframe and convert to USSP
gdf_moored = gp.GeoDataFrame(
df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
#convert data to CRS of D3D
gdf_moored.geometry = gdf_moored.geometry.to_crs("EPSG:32118")
gdf_moored['date'] = pd.to_datetime(gdf_moored['year'].astype(str) + '-' +
gdf_moored['month'].astype(str).str.zfill(2) + '-' +
gdf_moored['day'].astype(str).str.zfill(2) + ' ' +
gdf_moored['hour'].astype(str).str.zfill(2) + ':' +
gdf_moored['minute'].astype(str).str.zfill(2))
# %% ADCP1
gdf_moored.iloc[:, 1:-2].to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP1.nc')
# %% Plot moored data
# Column names to distances data for plotting
depths1 = gdf_moored.columns[8:51] # ADCP sensor depth reading column names to list
depths_moored = np.array([float(d.replace('m', '')) for d in depths1])
colormap = 'jet'
vmin = 0
vmax = 0.2
fig, axes = plt.subplots(nrows=5, ncols=1, figsize=(16, 8))
fig.tight_layout(pad=2)
# Loop through Station IDs for deployments 1-9 and assign locations
for d in range(1,12):
depMask = gdf_moored['deployment'] == ('dep' + str(d))
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (gdf_moored['station'] == station) & depMask
V = gdf_moored.values[stationMask, 8:51].astype(float)
if len(gdf_moored.loc[stationMask, 'date']) != 0:
pltDat = axes[stationIDX].pcolormesh(gdf_moored.loc[stationMask, 'date'],
depths_moored, np.transpose(V),
shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
axes[stationIDX].set_ylim(0, 7)
fmt_half_year = mdates.MonthLocator(interval=1)
axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
if d == 1:
axes[stationIDX].set_ylabel('Elevation [m]')
axes[stationIDX].set_title(station)
cbar = fig.colorbar(pltDat, ax=axes[stationIDX],
shrink=1.1, aspect=3)
cbar.set_label('Magnitude [m/s]')
pltDat.set_clim([0, 0.2])
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2012.png',
# bbox_inches='tight', dpi=300)
# %% Load in ADCP2 data
# Read in locations
df_adcp2_locs = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NCP2_ADCP_D1-D3/AQ_LocationsSO_ADCP_ADV_overview_Coords_20150126_AJMR.xlsx',
sheet_name='ADCP')
gdf_adcp2_locs = gp.GeoDataFrame(df_adcp2_locs,
geometry=gp.points_from_xy(df_adcp2_locs.X_NYSPLI, df_adcp2_locs.Y_NYSPLI), crs="EPSG:2263")
gdf_adcp2_locs = gdf_adcp2_locs.to_crs("EPSG:32118")
adcp2_data_path = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/'
adcp2_paths = ['NCP2_ADCP_D1-D3/ADCP_D1_070314_090814', 'NCP2_ADCP_D1-D3/ADCP_D2_090914_110214',
'NCP2_ADCP_D1-D3/ADCP_D3_110414_010615', 'NCP2_ADCP_D4-D5/D4_010715_030315',
'NCP2_ADCP_D4-D5/D5_030415_050515']
adcp2_gdfs = dict()
for depIDX, adcp2_path in enumerate(adcp2_paths): # Remove final row
adcp2_files = os.listdir(adcp2_data_path + adcp2_path) # returns list of files in adv folder efdc_air_in.drop(efdc_air_in.tail(1).index, inplace=True)
# Correct type
efdc_air_in['TASER'] = efdc_air_in['TASER'].astype(float)
for adcp2_file in adcp2_files: # Create datetime index
if '.xls' in adcp2_file or '.csv' in adcp2_file: efdc_air_in['DateTime'] = pd.to_datetime(efdc_air_in['TASER'], unit='D', origin=pd.Timestamp('2012-01-01')).\
if '.xls' in adcp2_file: dt.tz_localize('EST').dt.tz_convert('UTC')
df_in = pd.read_excel(adcp2_data_path + adcp2_path + '/' + adcp2_file)
else:
df_in = pd.read_csv(adcp2_data_path + adcp2_path + '/' + adcp2_file)
for stationIDX, station in enumerate(df_adcp2_locs['parent_loc_code']): efdc_air_in.set_index('DateTime', inplace=True)
advStrIDX = adcp2_file.find('_')+1
if adcp2_file[advStrIDX:advStrIDX+3] in station:
adcp2_geo_x = np.ones([len(df_in), 1]) * df_adcp2_locs.X_NYSPLI[stationIDX]
adcp2_geo_y = np.ones([len(df_in), 1]) * df_adcp2_locs.Y_NYSPLI[stationIDX]
df_in['date'] = pd.to_datetime(df_in['Year'].astype(str) + '-' + df_in['Month'].astype( # %% Read in EFDC Water Temperature Bounds
str).str.zfill(2) + '-' + df_in['Day'].astype(
str).str.zfill(2) + ' ' + df_in['Hour'].astype(
str).str.zfill(2) + ':' + df_in['Minute'].astype(
str).str.zfill(2) + ':' + df_in['Second'].astype(str).str.zfill(2))
gdf_in = gp.GeoDataFrame( efdc_water_in = pd.read_csv('//ott-athena/E/INPUT_FILES/TEMPERATURE_FILES/RM_North_RM_South/190423/NC_bc_temp_Yr2012_20190423.inp',
df_in, geometry=gp.points_from_xy(adcp2_geo_x, adcp2_geo_y), crs="EPSG:2263") delim_whitespace=True, skiprows=16,
names=['TASER', 'L1', 'L2', 'L3', 'L4', 'L5', 'L6', 'L7', 'L8', 'L9', 'L10'])
if adcp2_file[advStrIDX:advStrIDX + 3] not in adcp2_gdfs: # Find row when the next boundry is reached and create new df
adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]] = dict() efdc_water_South = efdc_water_in.iloc[0:np.where(efdc_water_in['TASER'] == 'AQEA_QA_QC')[0][0], :].copy()
efdc_water_North = efdc_water_in.iloc[np.where(efdc_water_in['TASER'] == 'AQEA_QA_QC')[0][0]+2:
np.where(efdc_water_in['TASER'] == 'AQEA_QA_QC')[0][1], :].copy()
if adcp2_file[0:advStrIDX-1] not in adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]]: # Correct type
adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]] = dict() efdc_water_South.iloc[:, 0] = efdc_water_South.iloc[:, 0].astype(float)
efdc_water_North.iloc[:, 0] = efdc_water_North.iloc[:, 0].astype(float)
if depIDX+1 not in adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]]: # Create datetime index
adcp2_gdfs[adcp2_file[advStrIDX:advStrIDX + 3]][adcp2_file[0:advStrIDX - 1]][depIDX+1] = gdf_in efdc_water_South['DateTime'] = pd.to_datetime(efdc_water_South['TASER'], unit='D', origin=pd.Timestamp('2012-01-01')).\
# %% ADCP2 dt.tz_localize('EST').dt.tz_convert('UTC')
for stationIDX, stat in enumerate(adcp2_gdfs):
for varIDX, var in enumerate(adcp2_gdfs[stat]):
for depIDX, depdat in enumerate(adcp2_gdfs[stat][var]):
ncDat = adcp2_gdfs[stat][var][depdat].iloc[:, 1:-2]
ncDat.to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP2/' +
stat + '_' + var + '_d' + str(depdat) + '.nc')
gdf_adcp2_locs.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADCP2/ADCP2locations.csv') efdc_water_South.set_index('DateTime', inplace=True)
# %% Plot ADCP2 Data efdc_water_North['DateTime'] = pd.to_datetime(efdc_water_North['TASER'], unit='D', origin=pd.Timestamp('2012-01-01')).\
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(9, 8)) dt.tz_localize('EST').dt.tz_convert('UTC')
fig.tight_layout(pad=2) efdc_water_North.set_index('DateTime', inplace=True)
colormap = 'jet' del efdc_water_in
vmin = 0
vmax = 0.2
for stationIDX, stat in enumerate(adcp2_gdfs): # %% Import Clear Sly radiation
for depIDX, depdat in enumerate(adcp2_gdfs[stat]['mag']):
depths1 = adcp2_gdfs[stat]['mag'][depdat].columns[6:-2] # ADCP sensor depth reading column names to list
depths_moored = np.array([float(d.replace('m', '')) for d in depths1])
V = adcp2_gdfs[stat]['mag'][depdat].values[:, 6:-2].astype(float) vnorm = np.array([0, 0, -1])
h = 0 # sea-level
# date = np.arange(datetime(2012, 1, 1), datetime(2012, 12, 31), timedelta(hours=1)).astype(datetime)
solar_date = [datetime(2012, 1, 1) + timedelta(minutes=i) for i in range(0, 60 * 24 * 365, 60)]
solar_date_utc = pd.date_range(start=datetime(2012, 1, 1), end=datetime(2012, 12, 31), freq='1H').tz_localize('EST')
pltDat = axes[stationIDX].pcolormesh(adcp2_gdfs[stat]['mag'][depdat].loc[:, 'date'], lat = 40.7128 # southern hemisphere
depths_moored, np.transpose(V),
shading='auto', vmin=vmin, vmax=vmax, cmap=colormap)
axes[stationIDX].set_ylim(0, 7) # solar_rad = [beam_irradiance(h, i, lat) for i in solar_date]
axes[stationIDX].set_xlim(pd.to_datetime("2014-07-01"), pd.to_datetime('2015-05-15')) solar_rad = [irradiance_on_plane(vnorm, h, i, lat) for i in solar_date]
#axes[stationIDX, depIDX].format_xdata = mdates.DateFormatter('%Y-%m')
fmt_half_year = mdates.MonthLocator(interval=1)
axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
axes[stationIDX].set_title(str(stat))
axes[stationIDX].set_ylabel('Elevation [m]') # %% Merge the dataframes
cbar = fig.colorbar(pltDat, ax=axes[stationIDX], # met_merged = pd.merge(df_air_temperature, efdc_air_in, how='outer', left_index=True, right_index=True)
shrink=1.1, aspect=3) met_merged = pd.merge_ordered(df_air_temperature, efdc_air_in, on='DateTime',
cbar.set_label('Magnitude [m/s]') how='outer', fill_method='ffill')
pltDat.set_clim([0, 0.2])
plt.show() waterTempSouth_merged = pd.merge_ordered(df_water_temperature, efdc_water_South, on='DateTime',
how='outer', fill_method='ffill')
waterTempNorth_merged = pd.merge_ordered(df_water_temperature, efdc_water_North, on='DateTime',
how='outer', fill_method='ffill')
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADCP_2014.png',
# bbox_inches='tight', dpi=300)
# %% Load in Water Level Data #%% Plot air temperature
# Gauge Locations from map
gdf_gaugeLoc = gp.read_file('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/NTC6.kml')
gdf_gaugeLocUSSP = gdf_gaugeLoc.to_crs("EPSG:32118")
# All in Feet NAVD88 fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6))
df_wl_FFG = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
sheet_name='Field_Facility_Gauge')
gdf_wl_FFG = gp.GeoDataFrame(df_wl_FFG,
geometry=gp.points_from_xy(np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].x[2],
np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].y[2]), crs="EPSG:4326")
gdf_wl_FFG.geometry = gdf_wl_FFG.geometry.to_crs("EPSG:32118")
# efdc_air_in['TDRY'].plot(ax=axes, color='black', label='EFDC Air Temperature')
# df_air_temperature['air_temp'].plot(ax=axes, color='blue', label='Obs at The Battery')
# axes.set_xlim(pd.to_datetime('2012-08-01'), pd.to_datetime('2012-08-31'))
df_wl_NGG1 = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx', # efdc_water_North['L10'].astype(float).plot(ax=axes, color='blue', label='EFDC North Boundry L10')
sheet_name='National_Grid_Gauge') # efdc_water_North['L5'].astype(float).plot(ax=axes, color='red', label='EFDC North Boundry L5')
gdf_wl_NGG1 = gp.GeoDataFrame(df_wl_NGG1, # efdc_water_North['L1'].astype(float).plot(ax=axes, color='green', label='EFDC North Boundry L1')
geometry=gp.points_from_xy(np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].x[3],
np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
gdf_wl_NGG1.geometry = gdf_wl_NGG1.geometry.to_crs("EPSG:32118")
# Also includes temperature efdc_water_South['L10'].astype(float).plot(ax=axes, color='blue', label='EFDC South Boundry L10')
df_wl_NGG2 = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP2_NG_TideGauge_20160113/NCP2_NG_TideGauge_20160113.xlsx', efdc_water_South['L5'].astype(float).plot(ax=axes, color='red', label='EFDC South Boundry L5')
sheet_name='Sheet1') efdc_water_South['L1'].astype(float).plot(ax=axes, color='green', label='EFDC South Boundry L1')
gdf_wl_NGG2 = gp.GeoDataFrame(df_wl_NGG2,
geometry=gp.points_from_xy(np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].x[3],
np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
gdf_wl_NGG2.geometry = gdf_wl_NGG2.geometry.to_crs("EPSG:32118")
gdf_wl_FFG.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv') df_water_temperature['water_temp'].plot(ax=axes, color='black', label='Obs at The Battery')
gdf_wl_NGG1.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv') axes.set_xlim(pd.to_datetime('2012-08-01'), pd.to_datetime('2012-08-31'))
gdf_wl_NGG2.to_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv')
axes.set_ylabel('Temperature (deg C)')
# %% Plot Water Level Data
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 4))
axes.plot(gdf_wl_FFG.Date_time,
gdf_wl_FFG['Water Surface Elevation (ft)']*0.3048,
label='Field Facility Gauge')
axes.plot(gdf_wl_NGG1.Date_time,
gdf_wl_NGG1['Water Surface Elevation (ft)']*0.3048,
label='National Grid Gauge Deployment 1')
axes.plot(gdf_wl_NGG2.datetime,
gdf_wl_NGG2['water_surface_elevation']*0.3048,
label='National Grid Gauge Deployment 2')
fmt_half_year = mdates.MonthLocator(interval=6)
axes.xaxis.set_major_locator(fmt_half_year)
axes.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
axes.set_ylabel('Water Surface Elevation [mNAVD88]')
axes.set_title('Water Surface Elevation')
axes.legend()
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/WaterLevel.png',
# bbox_inches='tight', dpi=300)
# %% Load temperature data
df_tdat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/10_Salinity/tData.xlsx',
skiprows=[1])
# Day Month order is reversed
df_tdat['date'] = pd.to_datetime(df_tdat['CollectionDate'])
df_tsample = pd.read_csv('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/10_Salinity/tSampleLocation.csv')
# Create geodataframe and convert to USSP
gdf_tsample = gp.GeoDataFrame(
df_tsample, geometry=gp.points_from_xy(df_tsample.EastCoordinate, df_tsample.NorthCoordinate), crs="EPSG:2263")
#convert data to CRS of D3D
gdf_tsample.geometry = gdf_tsample.geometry.to_crs("EPSG:32118")
tdat_geo = list()
tdat_facility = list()
for i in range(0, len(df_tdat)):
#sampleID = df_tdat['LocationID'][i][0:df_tdat['LocationID'][i].find('_')]
#geoFind = df_tsample.loc[df_tsample['ParentLocationID'] == sampleID]
geoFind = df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].geometry.values
if len(geoFind) !=0:
tdat_geo.append(df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].geometry.values[0])
tdat_facility.append(df_tsample.loc[df_tsample['LocationID'] == df_tdat['LocationID'][i]].SourceArea.values[0])
else:
tdat_geo.append(Point())
tdat_facility.append('')
# Create geodataframe
gdf_tdat = gp.GeoDataFrame(df_tdat, geometry=tdat_geo, crs="EPSG:32118")
gdf_tdat.loc[:, 'SourceArea'] = tdat_facility
gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='ft', 'DepthM'] = gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='ft', 'BeginDepth']*0.3048
gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='cm', 'DepthM'] = gdf_tdat.loc[gdf_tdat.loc[:, 'DepthUnit']=='cm', 'BeginDepth']*0.01
# Import spring observations
df_springDat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/11_Temperature/NC_Spr2012_Benthic_Water_Quality_FieldData&Observations_20121022.xlsx')
# Create geodataframe and convert to USSP
gdf_springDat = gp.GeoDataFrame(
df_springDat, geometry=gp.points_from_xy(df_springDat.x_coord_as_numeric, df_springDat.y_coord_as_numeric), crs="EPSG:2263")
gdf_springDat.geometry = gdf_springDat.geometry.to_crs("EPSG:32118")
# Import Summer observations
df_summerDat = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/11_Temperature/NC_Sum2012_Benthic_Water_Quality_FieldData&Observations_20130205.xlsx')
# Create geodataframe and convert to USSP
gdf_summerDat = gp.GeoDataFrame(
df_summerDat, geometry=gp.points_from_xy(df_summerDat.x_coord_as_numeric, df_summerDat.y_coord_as_numeric), crs="EPSG:2263")
gdf_summerDat.geometry = gdf_summerDat.geometry.to_crs("EPSG:32118")
# Merged
df_SpringSummerDat = pd.concat([df_springDat, gdf_summerDat], ignore_index=True)
gdf_SpringSummerDat = gp.GeoDataFrame(
df_SpringSummerDat, geometry=gp.points_from_xy(df_SpringSummerDat.x_coord_as_numeric, df_SpringSummerDat.y_coord_as_numeric), crs="EPSG:2263")
gdf_SpringSummerDat.geometry = gdf_SpringSummerDat.geometry.to_crs("EPSG:32118")
# %% Save temperature and salinity data
gdf_SpringSummerDat.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/TempSal/Spring_Summer.csv')
# %% Plot Salinity Time series
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 8))
plotMaskStation = (gdf_tdat.SourceArea == 'Newtown Creek') | (gdf_tdat.SourceArea == 'East Branch of Newtown Creek')
plotMaskWater = (gdf_tdat.SampleMedium == 'Surface Water')
plotMask = plotMaskStation & plotMaskWater
pltDat = axes.scatter(gdf_tdat.loc[plotMask, 'date'],
gdf_tdat.loc[plotMask, 'DepthM'], 10,
gdf_tdat.loc[plotMask, 'NumericResult'])
axes.set_ylim(10, 0)
axes.set_ylabel('Depth below water surface [m]')
axes.set_title('Surface Water Salinity Samples')
axes.set_xlabel('Date') axes.set_xlabel('Date')
axes.legend()
fmt_half_year = mdates.MonthLocator(interval=12) # axes.scatter(met_merged['TDRY'], met_merged['air_temp'])
axes.xaxis.set_major_locator(fmt_half_year) # axes.scatter(waterTempSouth_merged['water_temp'], waterTempSouth_merged['L5'])
axes.xaxis.set_major_formatter(mdates.DateFormatter('%Y')) # axes.scatter(waterTempNorth_merged['water_temp'], waterTempSouth_merged['L5'])
cbar = fig.colorbar(pltDat, ax=axes, shrink=0.95) plt.show()
cbar.set_label('Salinity [PSU]')
pltDat.set_clim([5, 40])
fig.show() #%% Plot Solar Radiation
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Salinity.png',
# bbox_inches='tight', dpi=300)
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6))
efdc_air_in['SOLSWR'].plot(ax=axes, color='black', label='EFDC Air Temperature')
axes.plot(solar_date_utc[0:-1], solar_rad, color='blue', label='Solar Radiation')
# %% Plot Temperature Time series axes.set_xlim(pd.to_datetime('2012-08-01'), pd.to_datetime('2012-08-07'))
## Additional salinity obs are here!
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(6, 8))
fig.tight_layout(pad=2)
for i, miles in enumerate(np.arange(0, 3, 0.5)): axes.set_ylabel('Solar Radiation (w/m^2)')
plotMaskDistance = (gdf_SpringSummerDat.miles_from_NC_mouth > miles) & ( axes.set_xlabel('Date')
gdf_SpringSummerDat.miles_from_NC_mouth < miles + 0.5) axes.legend()
plotMaskStation = (gdf_SpringSummerDat.subfacility_code == 'Newtown Creek')
plotMaskVar = (gdf_SpringSummerDat.chemical_name == 'Temperature (field)')# Temperature (field)'
plotMask = plotMaskDistance & plotMaskStation & plotMaskVar
pltDat = axes[i].scatter(gdf_SpringSummerDat.loc[plotMask, 'location_start_date'],
gdf_SpringSummerDat.loc[plotMask, 'elev']*0.3048, 10,
gdf_SpringSummerDat.loc[plotMask, 'result_value'])
axes[i].set_ylim(-10, 0)
axes[i].set_ylabel('Elevation [m]')
cbar = fig.colorbar(pltDat, ax=axes[i], shrink=0.95)
cbar.set_label('Temp [degC]')
pltDat.set_clim([5, 30])
fmt_half_year = mdates.MonthLocator(interval=1)
axes[i].xaxis.set_major_locator(fmt_half_year)
axes[i].xaxis.set_major_formatter(mdates.DateFormatter('%m-%Y'))
axes[0].set_title('Surface Water Temperature Samples')
axes[i].set_xlabel('Date')
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/Temperature.png',
# bbox_inches='tight', dpi=300)
# %% Load in ADV data
# Read in locations
df_adv_locs = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/Velocity_Data_Compiled/Phase2/Locations/AQ_LocationsSO_ADCP_ADV_overview_Coords_20150126_AJMR.xlsx',
sheet_name='ADV2')
gdf_adv_locs = gp.GeoDataFrame(df_adv_locs,
geometry=gp.points_from_xy(df_adv_locs.X_NYSPLI, df_adv_locs.Y_NYSPLI), crs="EPSG:2263")
gdf_adv_locs = gdf_adv_locs.to_crs("EPSG:32118")
adv_data_path = '//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/Velocity_Data_Compiled/Phase2/Raw'
adv_paths = ['NCP2_ADV_D1-D2/ADV_D1_070914_080214', 'NCP2_ADV_D1-D2/ADV_D2_080214_090814',
'NCP2_ADV_D3-D4/ADV_D3_090914_100614', 'NCP2_ADV_D3-D4/ADV_D4_100714_110214',
'NCP2_ADV_D5-D6/ADV_D5_110414_120214', 'NCP2_ADV_D5-D6/ADV_D6_120414_010615']
adv_gdfs = dict()
adv_dfs = dict()
for depIDX, adv_path in enumerate(adv_paths):
adv_files = os.listdir(adv_data_path + '/' + adv_path) # returns list of files in adv folder
for adv_file in adv_files:
if '.txt' in adv_file and 'Readme' not in adv_file:
df_in = pd.read_csv(adv_data_path + '/' + adv_path + '/' + adv_file, delim_whitespace=True, skipinitialspace=True)
for stationIDX, station in enumerate(gdf_adv_locs['parent_loc_code']):
if adv_file[-9:-6] in station:
# adv_geo_x = np.ones([len(df_in), 1]) * df_adv_locs.X_NYSPLI[stationIDX]
# adv_geo_y = np.ones([len(df_in), 1]) * df_adv_locs.Y_NYSPLI[stationIDX]
df_in['date'] = pd.to_datetime(df_in['Year'].astype(str) + '-' + df_in['Month'].astype(
str).str.zfill(2) + '-' + df_in['Day'].astype(
str).str.zfill(2) + ' ' + df_in['Hour'].astype(
str).str.zfill(2) + ':' + df_in['Minute'].astype(
str).str.zfill(2) + ':' + df_in['Second'].astype(str).str.zfill(2))
df_in.set_index('date', inplace=True)
df_in.drop(columns=['Year', 'Month', 'Day', 'Hour', 'Minute', 'Second'], inplace=True)
df_in.dropna(how='all', axis=1, inplace=True)
colName = df_in.columns
df_in[colName] = df_in[colName].astype('float32')
# Location
if adv_file[-9:-6] not in adv_gdfs:
adv_gdfs[adv_file[-9:-6]] = dict()
adv_dfs[adv_file[-9:-6]] = dict()
# D1-6
if adv_file[-6:-4] not in adv_gdfs[adv_file[-9:-6]]:
adv_gdfs[adv_file[-9:-6]][adv_file[-6:-4]] = gdf_adv_locs.iloc[stationIDX]
adv_dfs[adv_file[-9:-6]][adv_file[-6:-4]] = df_in
else:
adv_gdfs[adv_file[-9:-6]][adv_file[-6:-4]] = gdf_adv_locs.iloc[stationIDX]
adv_dfs[adv_file[-9:-6]][adv_file[-6:-4]].loc[:, colName] = df_in
print('ADV:' + adv_file[-9:-6] +
'; ' + adv_file[-6:-4] +
'; var:' + adv_file[3:6])
# with open('ADV.pickle', 'wb') as f:
# pickle.dump(adv_dfs, f)
# %% Save ADV to NetCDF
for stationIDX, stat in enumerate(adv_dfs):
for depIDX, depdat in enumerate(adv_dfs[stat]):
ncDat = adv_dfs[stat][depdat]
ncDat.columns = ncDat.columns.str.replace(r"[()]", "_")
ncDat.columns = ncDat.columns.str.replace(r"[/]", "_")
ncDat.to_xarray().to_netcdf(
'C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADV/' + stat + '_' + depdat + '.nc')
gdf_adv_locs.to_csv('C:/Users/arey/files/Projects/Newtown/DataFigs/NetCDF/ADV/ADVlocations.csv')
# %% Plot ADV Data
fig, axes = plt.subplots(nrows=6, ncols=1, figsize=(9, 8))
fig.tight_layout(pad=2)
for stationIDX, stat in enumerate(adv_dfs):
for depIDX, depdat in enumerate(adv_dfs[stat]):
# adv_dfs[stat][depdat]['vel'].iloc[::60, :].plot(ax=axes[stationIDX])
if 'Velocity' in adv_dfs[stat][depdat].columns:
plotingDat = adv_dfs[stat][depdat].loc[:, 'Velocity'].resample('1s').mean()
else:
plotingDat = adv_dfs[stat][depdat].loc[:, 'Velocity_m_s_'].resample('1s').mean()
axes[stationIDX].plot(plotingDat.index, plotingDat)
# axes[stationIDX].set_ylim(0, 7)
axes[stationIDX].set_xlim(pd.to_datetime("2014-07-01"), pd.to_datetime('2015-02-01'))
# #axes[stationIDX, depIDX].format_xdata = mdates.DateFormatter('%Y-%m')
fmt_half_year = mdates.MonthLocator(interval=1)
axes[stationIDX].xaxis.set_major_locator(fmt_half_year)
axes[stationIDX].xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m'))
axes[stationIDX].set_title(str(stat))
#
axes[stationIDX].set_ylabel('ADV Velocity [m/s]')
# cbar.set_label('Velocity Magnitude [m/s]')
# pltDat.set_clim([0, 0.2])
plt.show() plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/ADV_raw.png',
bbox_inches='tight', dpi=300)
# %% Plot Map
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
locTableNames = []
locTableX = []
locTableY = []
axes.set_xlim(303500, 306500)
axes.set_ylim(61000, 63750)
gdf.plot(ax=axes, markersize=10, color='blue', label='Mobile ADCP')
for x, y, label in zip(gdf.drop_duplicates(subset="station", keep='last').geometry.x,
gdf.drop_duplicates(subset="station", keep='last').geometry.y,
gdf.drop_duplicates(subset="station", keep='last').station):
axes.annotate(label, xy=(x, y), xytext=(20, 3), textcoords="offset points", color='blue',
bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
gdf_SpringSummerDat.plot(ax=axes, markersize=12, color='magenta', label='Temperature & Salinity')
for x, y, label in zip(gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').geometry.x,
gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').geometry.y,
gdf_SpringSummerDat.drop_duplicates(subset="loc_name", keep='last').loc_name):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_moored_loc.plot(ax=axes, markersize=20, color='red', label='Moored ADCP 2012')
for x, y, label in zip(gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').geometry.x,
gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').geometry.y,
gdf_moored_loc.drop_duplicates(subset="depCurr", keep='last').depCurr):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_adcp2_locs.plot(ax=axes, markersize=20, color='orange', label='Moored ADCP 2014')
for x, y, label in zip(gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').geometry.x,
gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').geometry.y,
gdf_adcp2_locs.drop_duplicates(subset="parent_loc_code", keep='last').parent_loc_code):
axes.annotate(label, xy=(x, y), xytext=(-65, 3), textcoords="offset points", color='orange',
bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_adv_locs.plot(ax=axes, markersize=20, color='green', label='Moored ADV 2014')
for x, y, label in zip(gdf_adv_locs.geometry.x,
gdf_adv_locs.geometry.y,
gdf_adv_locs.parent_loc_code):
axes.annotate(label, xy=(x, y), xytext=(-30, -30), textcoords="offset points", color='green',
bbox=dict(boxstyle="square,pad=0.3", fc="white", ec="k", lw=0))
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
gdf_gaugeLocUSSP.loc[2:3, 'geometry'].plot(ax=axes, markersize=20, color='yellow', label='Water Level Gauge')
for x, y, label in zip(gdf_gaugeLocUSSP.loc[2:3, 'geometry'].x,
gdf_gaugeLocUSSP.loc[2:3, 'geometry'].y,
gdf_gaugeLocUSSP.loc[2:3, 'Name']):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
for x, y, label in zip(gdf.geometry.x,
gdf.geometry.y,
gdf.station + '_' + gdf.transect.astype(str) + gdf['min'].astype(str) + gdf.second.astype(str)):
locTableNames.append(label)
locTableX.append(x)
locTableY.append(y)
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
axes.set_xlabel('New York State Plane Easting [m]')
axes.set_ylabel('New York State Plane Northing [m]')
axes.legend()
# axes[1].set_xlim(303500, 306500)
# axes[1].set_ylim(61000, 63750)
# gdf_SpringSummerDat.plot(ax=axes[1], markersize=12, color='magenta', label='Temperature & Salinity')
# axes[1].set_xlabel('New York State Plane Easting [m]')
# axes[1].legend()
# ctx.add_basemap(axes[1], source=mapbox, crs='EPSG:32118')
fig.show()
# fig.savefig('C:/Users/arey/files/Projects/Newtown/DataFigs/DataMap_ADV.png',
# bbox_inches='tight', dpi=300)
# %% Import grid shapefile and find cells
delftGrid = gp.read_file('C:/Users/arey/files/Projects/Newtown/Topology data of 2D network.shp')
delftGrid = delftGrid.set_crs("EPSG:32118")
delftGrid['centroid'] = delftGrid.geometry.centroid
obsPts = gp.GeoDataFrame(locTableNames, geometry=gp.points_from_xy(locTableX, locTableY), crs="EPSG:32118")
joinPTS = gp.sjoin(obsPts, delftGrid, op='within')
uniqueJoinPTS = joinPTS.index_right.unique()
groupdObsLabels = joinPTS.groupby(by='index_right').agg({0:lambda x:list(x)})
uniqueDelftGrid = delftGrid.iloc[uniqueJoinPTS, :]
uniqueDelftGrid['Station Names'] = groupdObsLabels
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(8, 8))
axes.set_xlim(303500, 306500)
axes.set_ylim(61000, 63750)
delftGrid.plot(ax=axes, markersize=10, color='gray', label='Mobile ADCP')
delftGrid.loc[uniqueJoinPTS, 'geometry'].plot(ax=axes, markersize=10, color='blue', label='Mobile ADCP')
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32118')
axes.set_xlabel('New York State Plane Easting [m]')
axes.set_ylabel('New York State Plane Northing [m]')
fig.show()
uniqueDelftGrid.to_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/06_Models/06_Delft3DFM/RectConnect2_Obs_PTS.xlsx')