Azure translate and others

This commit is contained in:
Alexander Rey 2023-01-17 14:36:51 -05:00
parent 704977b42f
commit 91ce9fac30
42 changed files with 3331 additions and 96 deletions

8
Azure/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

8
Azure/.idea/Azure.iml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
Azure/.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (geopandas_forge_mustique)" project-jdk-type="Python SDK" />
</project>

8
Azure/.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/Azure.iml" filepath="$PROJECT_DIR$/.idea/Azure.iml" />
</modules>
</component>
</project>

6
Azure/.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

36
Azure/AzureTranslate.py Normal file
View File

@ -0,0 +1,36 @@
# Sample script to translate a document on azure
import requests
endpoint = "https://translateazuretesting.cognitiveservices.azure.com/translator/text/batch/v1.0"
key = '0cad779367054a3cb47d374de2ad110c'
path = '/batches'
constructed_url = endpoint + path
payload= {
"inputs": [
{
"source": {
"sourceUrl": "https://translatedatastore.blob.core.windows.net/translate-source?sp=rl&st=2023-01-10T20:06:57Z&se=2024-01-11T04:06:57Z&spr=https&sv=2021-06-08&sr=c&sig=payaBXqpPpVZGG4y4h8Bncjlfzf5T3JN2HFbtCCXMeI%3D",
"storageSource": "AzureBlob",
"language": "fr"
},
"targets": [
{
"targetUrl": "https://translatedatastore.blob.core.windows.net/translate-output?sp=rwl&st=2023-01-10T20:07:41Z&se=2024-01-11T04:07:41Z&spr=https&sv=2021-06-08&sr=c&sig=C8Ivgssa4W5AaeKtE4%2B3xZElNPa%2FJxiXo93897dpBxI%3D",
"storageSource": "AzureBlob",
"category": "general",
"language": "en"
}
]
}
]
}
headers = {
'Ocp-Apim-Subscription-Key': key,
'Content-Type': 'application/json'
}
response = requests.post(constructed_url, headers=headers, json=payload)
print(f'response status code: {response.status_code}\nresponse status: {response.reason}\nresponse headers: {response.headers}')

View File

@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="jdk" jdkName="Python 3.8 (dfm_tools_env)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (D3DFM)" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (dfm_tools_env)" project-jdk-type="Python SDK" />
</project>

View File

@ -5,12 +5,20 @@
#%% Import
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import geopandas as gp
gp.options.use_pygeos = True
from shapely import geometry, ops
# Map Plotting
import cartopy.crs as ccrs
import contextily as ctx
# Interpolation
import scipy as sp
from scipy.interpolate import griddata
from scipy.interpolate import LinearNDInterpolator, interp1d
@ -18,6 +26,7 @@ from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc, get_ncfilelist, get_hisstationlist, get_ncvardimlist, get_timesfromnc
import pathlib as pl
import datetime
#%% Read Model Log
# runLog = pd.read_excel("Y:/12828.101 English Wabigoon River/06_Models/00_Delft3D/ModelRuns.xlsx", "Sensitivity")
@ -49,13 +58,205 @@ river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFr
river_centerline_merge_gpd2.iloc[0, 1] = 0
river_centerline_merge_gpd2.iloc[0, 2] = 0
#%% Import Validation Data
# Read in combined dataset
obs_IN = pd.read_csv("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/output/combined dataset-SGJ.csv")
# Add Datetime
obs_IN['DateTime'] = pd.to_datetime(obs_IN.Sampledate_x)
# Convert to geodataframe
obs_gdf = gp.GeoDataFrame(obs_IN, geometry=gp.points_from_xy(
obs_IN.loc[:, 'Longitude'], obs_IN.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Add centerline and reset index
obs_gdf = gp.sjoin_nearest(river_centerline_merge_gpd2, obs_gdf, how='right', max_distance=250).reset_index()
# Sediment Hg GeoDataFrame where media is Sediment
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgSed = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_HgSed.loc[obs_HgSed.Unit == 'NG/G', 'Sample_NG/G'] = obs_HgSed.Samplevalue
obs_HgSed.loc[obs_HgSed.Unit == 'ng/g', 'Sample_NG/G'] = obs_HgSed.Samplevalue
obs_HgSed.loc[obs_HgSed.Unit == 'UG/G', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 1000
obs_HgSed.loc[obs_HgSed.Unit == 'ug/g', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 1000
obs_HgSed.loc[obs_HgSed.Unit == 'MG/KG', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 0.001
obs_HgSed.loc[obs_HgSed.Unit == 'mg/kg', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 0.001
obs_HgSed.to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/HgSedDB2.geojson', driver="GeoJSON")
# Group sediment core data to take average/min/max
obs_HgSed_Avg = obs_HgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Merge Index
obs_HgSed_Avg.columns = obs_HgSed_Avg.columns.map('|'.join).str.strip('|')
# Fix Names
obs_HgSed_Avg.rename(columns={"RiverKM|mean": "RiverKM", "Longitude|mean": "Longitude",
"Latitude|mean": "Latitude", "DateTime|mean": "DateTime"}, inplace=True)
# Create new GeoDataFrame
obs_HgSed_Avg = gp.GeoDataFrame(obs_HgSed_Avg, geometry=gp.points_from_xy(
obs_HgSed_Avg.loc[:, 'Longitude'], obs_HgSed_Avg.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Water Hg GeoDataFrame where media is SurfaceWater (SW)
obsMask = (((obs_gdf.Media_x == 'SW')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)-Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgWater = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_HgWater.loc[obs_HgWater.Unit == 'NG/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue
obs_HgWater.loc[obs_HgWater.Unit == 'ng/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue
obs_HgWater.loc[obs_HgWater.Unit == 'UG/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000
obs_HgWater.loc[obs_HgWater.Unit == 'ug/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000
obs_HgWater.loc[obs_HgWater.Unit == 'MG/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000000
obs_HgWater.loc[obs_HgWater.Unit == 'mg/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000000
# Import Sediment MeHg
# Sediment MeHg GeoDataFrame
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_MeHgSed = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'NG/G', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'ng/g', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'UG/G', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 1000
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'ug/g', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 1000
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'MG/KG', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 0.001
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'mg/kg', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 0.001
# Group sediment core data to take average/min/max
obs_MeHgSed_Avg = obs_MeHgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Merge Index
obs_MeHgSed_Avg.columns = obs_MeHgSed_Avg.columns.map('|'.join).str.strip('|')
# Fix Names
obs_MeHgSed_Avg.rename(columns={"RiverKM|mean": "RiverKM", "Longitude|mean": "Longitude",
"Latitude|mean": "Latitude", "DateTime|mean": "DateTime"}, inplace=True)
# Create new GeoDataFrame
obs_MeHgSed_Avg = gp.GeoDataFrame(obs_MeHgSed_Avg, geometry=gp.points_from_xy(
obs_MeHgSed_Avg.loc[:, 'Longitude'], obs_MeHgSed_Avg.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# MeHg in Water
obsMask = (((obs_gdf.Media_x == 'SW')) &
((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_MeHgWater = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'NG/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'ng/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'UG/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'ug/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'MG/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000000
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'mg/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000000
#%% Plot Observations
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(30, 15))
axes.set_xlim(494649, 513647)
axes.set_ylim(5514653, 5525256)
# Add basemap
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes, source=mapbox, crs='EPSG:26915')
obs_HgSed_Avg.plot(column='Sample_NG/G|mean', axes=axes, vmin=0, vmax=10000)
plt.show()
#%% Interpolate Hg to grid
# Read in model points and roughness
bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
# Loop through parameters
for i in range(0, 5):
if i == 0:
dataOUT = obs_HgSed_Avg['Sample_NG/G|mean']
dataOutx = obs_HgSed_Avg.geometry.x
dataOuty = obs_HgSed_Avg.geometry.y
dateFileName = 'HgSed_meanDB'
elif i == 1:
dataOUT = obs_MeHgSed_Avg['Sample_NG/G|mean']
dataOutx = obs_MeHgSed_Avg.geometry.x
dataOuty = obs_MeHgSed_Avg.geometry.y
dateFileName = 'MeHgSed_meanDB'
elif i == 2:
dataOUT = obs_HgWater['Sample_NG/L']
dataOutx = obs_HgWater.geometry.x
dataOuty = obs_HgWater.geometry.y
dateFileName = 'HgWater_allDB'
elif i == 3:
dataOUT = obs_MeHgWater['Sample_NG/L']
dataOutx = obs_MeHgWater.geometry.x
dataOuty = obs_MeHgWater.geometry.y
dateFileName = 'MeHgWater_allDB'
elif i == 4:
# Synthetic from Reed's Sheet
dataOUT = ((10 - 1) * np.exp(-0.08 * np.array(river_centerline_merge_gpd2.loc[:, 'RiverKM']) / 1000) + 1) * 1000
dataOutx = river_centerline_merge_gpd2.geometry.x
dataOuty = river_centerline_merge_gpd2.geometry.y
dateFileName = 'ReedHgSed'
gridInterp = griddata(np.array([dataOutx, dataOuty]).T, dataOUT, np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterp))
gridInterpOut = np.transpose(gridInterpOut)
# Set Wetlands to zero
# Interpolate Roughness
RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z, np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
method='nearest')
gridInterpOut[RoughInterp==0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/' + dateFileName + '.xyz',
gridInterpOut, delimiter=" ")
#%% Import using DFM Functions
modelPlot = 27
## Degine import path
# file_nc_map = pl.Path(runLog['Run Location'][modelPlot], 'Water_Quality', 'output', 'deltashell_map.nc')
file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42_WAQ.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42_WAQ.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42B_WAQ_Coverage.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42C_WAQ_Coverage2.dsproj_data/Water_Quality/output/deltashell_map.nc")
#file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42E_WAQ_DIDO.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_43_OceanMesh5_B.dsproj_data/Water_Quality/output/deltashell_map.nc")
file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_43_OceanMesh5_C.dsproj_data/Water_Quality/output/deltashell_map.nc")
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
@ -79,14 +280,14 @@ dataVars = ['FrHgIM1', 'FrHgIM2', 'FrHgIM3', 'FrHgDOC', 'FrHgPOC', 'FrHg
'HgHgS1O', 'MmMmS1O',
'HgMmO', 'HgS1MmS1O', 'MmHgO', 'MmS1HgS1O',
'oPhoDeg', 'oPhoOxy', 'oPhoRed',
'f_minPOC1', 'MnODCS1']
'f_minPOC1', 'MnODCS1', 'Continuity']
# Create Pandas Output Array
dataIN_x = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_face_x',
varname='mesh2d_agg_face_x',
silent=True)
dataIN_y = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_face_y',
varname='mesh2d_agg_face_y',
silent=True)
dataIN_gp = gp.GeoDataFrame(geometry=gp.points_from_xy(dataIN_x, dataIN_y, crs="EPSG:32615"))
@ -96,15 +297,16 @@ dataIN_gp = gp.GeoDataFrame(geometry=gp.points_from_xy(dataIN_x, dataIN_y, crs="
for vIDX, v in enumerate(dataVars):
# Extract data from NetCDF file
dataIN = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_' + v, timestep=len(tSteps)-1,
silent=True, layer=0)
varname='mesh2d_agg_' + v, timestep=len(tSteps)-5,
silent=True) #, layer=0
dataIN_gp[v] = np.array(dataIN[0][0][:])
print(v)
#
#%% Create Dataframe along river centerline for plotting
dataOUT = gp.sjoin_nearest(river_centerline_merge_gpd2, dataIN_gp, how='left', max_distance=100)
dataOUT.set_index('RiverKM', inplace=True)
#%% Plot Partition Data
@ -188,15 +390,21 @@ ax = axes.flat
#Hg in Water
ax[0].set_title('Hg in Water')
ax[0].plot(dataOUT.index, dataOUT['Hg'] * 1000000, linewidth=3, label='Hg')
ax[0].scatter(obs_HgWater.RiverKM, obs_HgWater['Sample_NG/L'], 2, label='Hg Obs', color='y')
ax[0].set_xlim([0, 100000])
ax[0].set_ylim([0, 100])
ax[0].legend()
ax[0].set_ylabel('Hg [ng/L]')
ax[0].set_xlabel('Distance Along Flume [m]')
# Mm in Water
# MeHg in Water
ax[1].set_title('MeHg in Water')
ax[1].plot(dataOUT.index, dataOUT['Mm'] * 1000000, linewidth=3, label='MeHg')
ax[1].scatter(obs_MeHgWater.RiverKM, obs_MeHgWater['Sample_NG/L'], 2, label='Hg Obs', color='y')
ax[1].set_xlim([0, 100000])
ax[1].set_ylim([0, 2])
ax[1].legend()
ax[1].set_ylabel('MeHg [ng/L]')
@ -206,24 +414,42 @@ ax[1].set_xlabel('Distance Along Flume [m]')
ax[2].set_title('Hg in Sediment')
ax[2].plot(dataOUT.index, dataOUT['HgS1'] * 1*10 ** 9 / (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='HgS1')
# ax[2].scatter(obs_HgSed.RiverKM, obs_HgSed['Sample_NG/G'], 2, label='Hg Obs', color='y')
ax[2].scatter(obs_HgSed_Avg.RiverKM, obs_HgSed_Avg['Sample_NG/G|mean'], 2, label='Hg Obs', color='y')
ax[2].set_xlim([0, 100000])
ax[2].set_ylim([0, 20000])
ax[2].legend()
ax[2].set_ylabel('HgS1 [ng/g]')
ax[2].set_xlabel('Distance Along Flume [m]')
# Mm in Sediment
# MeHg in Sediment
ax[3].set_title('MeHg in Sediment')
ax[3].plot(dataOUT.index, dataOUT['MmS1'] * 1*10 ** 9 / (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='MeHgS1')
ax[3].scatter(obs_MeHgSed_Avg.RiverKM, obs_MeHgSed_Avg['Sample_NG/G|mean'], 2, label='MeHg Obs', color='y')
ax[3].set_ylim([0, 60])
ax[3].set_xlim([0, 100000])
ax[3].legend()
ax[3].set_ylabel('MeHg [ng/g]')
ax[3].set_xlabel('Distance Along Flume [m]')
# ax[3].set_title('Continuity')
# ax[3].plot(dataOUT.index, dataOUT['Continuity'],
# linewidth=3, label='Continuity')
# ax[3].set_ylim([0, 1])
#
# ax[3].legend()
# ax[3].set_ylabel('Continuity [-]')
# ax[3].set_xlabel('Distance Along Flume [m]')
# ax[3].set_ylim([0, 2])
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgTotals.png',
bbox_inches='tight', dpi=200)
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgTotals.png',
# bbox_inches='tight', dpi=200)
#%% Plot POC total Data
@ -272,8 +498,8 @@ ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/POCTotals.png',
bbox_inches='tight', dpi=200)
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/POCTotals.png',
# bbox_inches='tight', dpi=200)
#%% More Total Data
@ -295,7 +521,7 @@ ax[0].set_xlabel('Distance Along Flume [m]')
# IM1 in Water
ax[1].set_title('IM1 (Sand) in Water')
ax[1].plot(dataOUT.index, dataOUT['IM1'], linewidth=3, label='IM1')
ax[1].set_ylim([0, 50])
ax[1].legend()
ax[1].set_ylabel('IM1 [g/m3]')
ax[1].set_xlabel('Distance Along Flume [m]')
@ -318,8 +544,8 @@ ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/InorganicTotals.png',
bbox_inches='tight', dpi=200)
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/InorganicTotals.png',
# bbox_inches='tight', dpi=200)
#%% Plot Fluxes
@ -332,6 +558,7 @@ ax = axes.flat
ax[0].set_title('IM1 (Sand) Resuspension Flux')
ax[0].plot(dataOUT.index, dataOUT['fResS1IM1'], linewidth=3, label='IM1 Resuspension')
ax[0].plot(dataOUT.index, dataOUT['fSedIM1'], linewidth=3, label='IM1 Sedimentation')
ax[0].set_ylim([0, 2000])
ax[0].legend()
ax[0].set_ylabel('IM1 (Sand) Sedimentation and Resuspension [g/m2/d]')
@ -343,6 +570,7 @@ ax[1].plot(dataOUT.index, dataOUT['fResS1DetC'], linewidth=3, label='DetC Resusp
ax[1].plot(dataOUT.index, dataOUT['fResS1OOC'], linewidth=3, label='OOC Resuspension')
ax[1].plot(dataOUT.index, dataOUT['fSedPOC1'], linewidth=3, label='POC1 (Fast) Sedimentation')
ax[1].plot(dataOUT.index, dataOUT['fSedPOC2'], linewidth=3, label='POC2 (Slow) Sedimentation')
ax[1].set_ylim([0, 100])
ax[1].legend()
ax[1].set_ylabel('POC Sedimentation and Resuspension [g/m2/d]')
@ -352,6 +580,7 @@ ax[1].set_xlabel('Distance Along Flume [m]')
ax[2].set_title('Hg Resuspension Flux')
ax[2].plot(dataOUT.index, dataOUT['fResS1Hg'], linewidth=3, label='Hg Resuspension')
ax[2].plot(dataOUT.index, dataOUT['fSedHg'], linewidth=3, label='Hg Sedimentation')
ax[2].set_ylim([0, 0.01])
ax[2].legend()
ax[2].set_ylabel('Hg Sedimentation and Resuspension [g/m2/d]')
@ -368,8 +597,8 @@ ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgFluxes.png',
bbox_inches='tight', dpi=200)
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgFluxes.png',
# bbox_inches='tight', dpi=200)
#%% Plot More Fluxes
@ -635,4 +864,53 @@ dataTableOut['Hg Diffusion [g/m3/d]'] = dataOUT['oPhoRed'].mean()
dataTableOut_df = pd.DataFrame(data=dataTableOut, index=[0])
dataTableOut_df = (dataTableOut_df.T)
dataTableOut_df.to_excel('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ModelOutputs_Sept26.xlsx')
dataTableOut_df.to_excel('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ModelOutputs_Sept26.xlsx')
#%% Save as geotiff
tiffPlot = dataIN_gp['HgS1']
# Blank zero sed
# tiffPlot[tiffPlot == 0] = np.nan
# Blank outside of hull
# sedPlot[~regularMask2[i]] = np.nan
# Convert to xarray dataset
tiffPlot_xr = xr.DataArray(data=tiffPlot.T,
dims=["x", "y"],
coords=dict(
x=(["x"], X2[i][0, :]),
y=(["y"], Y2[i][:, 0])))
# Transpose for geotiff writing
tiffPlot_xr = tiffPlot_xr.transpose('y', 'x')
tiffPlot_xr.rio.write_crs("epsg:32615", inplace=True)
tiffPlot_xr.rio.to_raster('C:/Users/arey/files/Projects/Grassy Narrows/Slides/' +
runLog['Run Name'][i] + '_mag.tif')
tiffPlot = shear2[i]
# Blank zero sed
tiffPlot[tiffPlot == 0] = np.nan
# Blank outside of hull
# sedPlot[~regularMask2[i]] = np.nan
# Convert to xarray dataset
tiffPlot_xr = xr.DataArray(data=tiffPlot.T,
dims=["x", "y"],
coords=dict(
x=(["x"], X2[i][0, :]),
y=(["y"], Y2[i][:, 0])))
# Transpose for geotiff writing
tiffPlot_xr = tiffPlot_xr.transpose('y', 'x')
tiffPlot_xr.rio.write_crs("epsg:32615", inplace=True)
tiffPlot_xr.rio.to_raster('C:/Users/arey/files/Projects/Grassy Narrows/Slides/' +
runLog['Run Name'][i] + '_shear.tif')

View File

@ -36,7 +36,8 @@ runLog = pd.read_excel(pth.as_posix(), "Test runs", skiprows=1)
dataPath = "FlowFM_map.nc"
#%% Import using DFM functions
modelPlot = [24]
modelPlot = [18, 35]
# stormTime = [datetime.datetime(2005, 8, 7, 0, 0, 0)]
@ -57,8 +58,8 @@ mag2 = [None] * (max(modelPlot)+1)
shear2 = [None] * (max(modelPlot)+1)
for i in modelPlot:
file_nc_map = os.path.join(runLog['Run Location'][i], runLog['Run Name'][i], runLog['Run Name'][i][0:4] + '.dsproj_data',
'FlowFM', 'output', 'FlowFM_map.nc')
file_nc_map = os.path.join(runLog['Run Location'][i],
'output','FlowFM_map.nc')
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
@ -102,8 +103,8 @@ for i in modelPlot:
# Create a list of grid points where the water depth is greater than zero
wd_mask = (wd[i][:] > 0)
alphaPoints = list(map(tuple, np.column_stack((facex[i][wd_mask.tolist()],
facey[i][wd_mask.tolist()])).data))
alphaPoints = list(map(tuple, np.column_stack((facex[i][wd_mask[0].tolist()],
facey[i][wd_mask[0].tolist()])).data))
# Find the concave hull
alphaPoly = alphashape.alphashape(alphaPoints, 0.02)
@ -120,14 +121,14 @@ for i in modelPlot:
# maskland_dist=50, method='nearest')
X2[i], Y2[i], mag2[i] = scatter_to_regulargrid(xcoords=facex[i], ycoords=facey[i],
ncellx=8000, ncelly=6000, values=mag_mean[i][0, :],
ncellx=4000, ncelly=3000, values=mag_mean[i][0, :],
maskland_dist=50, method='nearest')
X2[i], Y2[i], shear2[i] = scatter_to_regulargrid(xcoords=facex[i], ycoords=facey[i],
ncellx=8000, ncelly=6000, values=shear[i][0, :],
ncellx=4000, ncelly=3000, values=shear[i][0, :],
maskland_dist=50, method='nearest')
#%% Save as geotiff
modelPlot = [24]
modelPlot = [18, 35]
sedIDX = 0
@ -151,7 +152,7 @@ for i in modelPlot:
tiffPlot_xr = tiffPlot_xr.transpose('y', 'x')
tiffPlot_xr.rio.write_crs("epsg:32615", inplace=True)
tiffPlot_xr.rio.to_raster('C:/Users/arey/files/Projects/Grassy Narrows/Slides/' +
runLog['Run Name'][i] + '_mag.tif')
runLog['Run Title'][i] + '_mag.tif')
tiffPlot = shear2[i]
@ -173,7 +174,7 @@ for i in modelPlot:
tiffPlot_xr = tiffPlot_xr.transpose('y', 'x')
tiffPlot_xr.rio.write_crs("epsg:32615", inplace=True)
tiffPlot_xr.rio.to_raster('C:/Users/arey/files/Projects/Grassy Narrows/Slides/' +
runLog['Run Name'][i] + '_shear.tif')
runLog['Run Title'][i] + '_shear.tif')
#%% Plot using DFM functions
modelPlot = [24]
@ -230,4 +231,6 @@ for i in modelPlot:
# fig.savefig(
# 'C:/Users/arey/files/Projects/LSU/Modelling/Figures/' +
# 'StormVelDisWide.png', bbox_inches='tight', dpi=400)
# 'StormVelDisWide.png', bbox_inches='tight', dpi=400)

View File

@ -0,0 +1,305 @@
#%% Plotting script for LSU D3D data
# Alexander Rey, 2022
import os
import pandas as pd
import geopandas as gp
import netCDF4 as nc
import math
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import matplotlib.cm as cm
import datetime as datetime
from scipy.interpolate import LinearNDInterpolator, interp1d
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc, get_ncfilelist, get_hisstationlist, get_ncvardimlist, get_timesfromnc
import cartopy.crs as ccrs
import contextily as ctx
from dfm_tools.regulargrid import scatter_to_regulargrid
import pathlib as pl
import alphashape
import rioxarray
import xarray as xr
from shapely import geometry, ops
#%% Read in centerline shapefile
river_centerline = gp.read_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15.shp')
river_centerlineExploded = river_centerline.explode(ignore_index=True)
river_centerlineExploded.reset_index(inplace=True)
tempMulti = river_centerlineExploded.iloc[[5,0,1,2,3,4,6,7,9], 4]
# Put the sub-line coordinates into a list of sublists
outcoords = [list(i.coords) for i in tempMulti]
# Flatten the list of sublists and use it to make a new line
river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
river_centerline_merge_gpd2 =\
gp.GeoDataFrame(geometry=gp.points_from_xy(
river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
# Add distance along centerline
river_centerline_merge_gpd2['DistanceFromPrevious'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFromPrevious'].cumsum()
river_centerline_merge_gpd2.iloc[0, 1] = 0
river_centerline_merge_gpd2.iloc[0, 2] = 0
#%% Save River Centerline as Shapefile
river_centerline_merge_gpd2.to_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15_RiverKM.shp')
#%% Read Model Log
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "12828.101 English Wabigoon River", "06_Models", "00_Delft3D", "ModelRuns.xlsx")
runLog = pd.read_excel(pth.as_posix(), "Test runs", skiprows=1)
dataPath = "FlowFM_map.nc"
#%% Import using DFM functions
#modelPlot = [35, 36, 37, 39, 40, 41, 42, 43, 44, 45]
modelPlot = [18, 35]
dataIN_gp = [None] * (max(modelPlot)+1)
dataOUT = [None] * (max(modelPlot)+1)
for i in modelPlot:
if (i == 39) or (i == 40) or (i == 41) or (i == 42) or (i == 43) or (i == 44) or (i == 45):
Flume = True
else:
Flume = False
# Find Map File in Output Folder
outputFiles = os.listdir(os.path.join(runLog['Run Location'][i],
'output'))
for file in outputFiles:
if file.endswith("map.nc"):
mapFile = file
file_nc_map = os.path.join(runLog['Run Location'][i],
'output', mapFile)
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
# Find nearest time step to desired time
# tStep = []
# for s in stormTime:
# abs_deltas_from_target_date = np.absolute(tSteps - s)
# tStep.append(np.argmin(abs_deltas_from_target_date))
# Otherwise, define a timestep
tStep = len(tSteps)-2
# Get Var info
vars_pd, dims_pd = get_ncvardimlist(file_nc=file_nc_map)
# Define extraction variables
# if (i == 37) or (i == 39):
# dataVars = ['s1', 'waterdepth', 'taus', 'ucmaga']
# else:
dataVars = ['s1', 'waterdepth', 'taus', 'ucmag']
# Create Pandas Output Array
dataIN_x = get_ncmodeldata(file_nc=file_nc_map,
varname='mesh2d_face_x',
silent=True)
dataIN_y = get_ncmodeldata(file_nc=file_nc_map,
varname='mesh2d_face_y',
silent=True)
dataIN_gp[i] = gp.GeoDataFrame(geometry=gp.points_from_xy(dataIN_x, dataIN_y, crs="EPSG:32615"))
# Extract variables
for vIDX, v in enumerate(dataVars):
# Extract data from NetCDF file
if len(vars_pd.loc['mesh2d_' + v, 'shape']) == 3:
dataIN = get_ncmodeldata(file_nc=file_nc_map,
varname='mesh2d_' + v, timestep=len(tSteps) - 1,
silent=True, layer='all')
dataIN = dataIN.mean(axis=2)
else:
dataIN = get_ncmodeldata(file_nc=file_nc_map,
varname='mesh2d_' + v, timestep=len(tSteps) - 1,
silent=True) # , layer=0
dataIN_gp[i][v] = np.array(dataIN[0][:])
print(v)
# Extract Bed Level
dataIN = get_ncmodeldata(file_nc=file_nc_map,
varname='mesh2d_flowelem_bl',
silent=True) # , layer=0
dataIN_gp[i]['mesh2d_flowelem_bl'] = np.array(dataIN[:])
print('mesh2d_flowelem_bl')
# Create Dataframe along river centerline for plotting
if Flume == True:
dataOUT[i] = dataIN_gp[i]
dataOUT[i]['RiverKM'] = dataIN_x
else:
dataOUT[i] = gp.sjoin_nearest(river_centerline_merge_gpd2, dataIN_gp[i], how='left', max_distance=100)
dataOUT[i].set_index('RiverKM', inplace=True)
#%% Key Profile Sites
KeySites = dict()
KeySites['Dryden'] = 0
KeySites['Rugby Creek'] = 2650
KeySites['Eagle River'] = 44200
KeySites['Beaver Creek'] = 38600
KeySites['Clay Lake'] = 90000
KeySites['Gullwing River'] = 20100
KeySites['Cowamula'] = 57400
# KeySites['Buller Creek'] = 66000
# Structure Sites
StructureSites = dict()
StructureSites['Mutrie Lake'] = 54214
StructureSites['Highway 105'] = 64998
StructureSites['Quibell'] = 70000
StructureSites['Wainwright Dam'] = 6013
#%% Plot Data along centerline
fig, axes = plt.subplots(nrows=5, ncols=1, figsize=(9, 9), sharex=True)
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
modelPlot = [35, 37, 40, 41, 42, 43, 44]
modelPlot = [40, 41, 42, 43, 44]
modelPlot = [35, 37, 40, 45]
modelPlot = [18, 35]
for i in modelPlot:
# S1
ax[0].set_title('Water Level')
ax[0].plot(dataOUT[i].index, dataOUT[i]['s1'], linewidth=3, label=runLog['Run Title'][i])
ax[0].set_ylim([330, 345])
# ax[0].set_xlim([0, 10000])
ax[0].set_xlim([0, 90000])
ax[0].legend(loc='upper right')
ax[0].set_ylabel('Water Level [m]')
# Shear
ax[1].set_title('Shear Stress')
ax[1].plot(dataOUT[i].index, dataOUT[i]['taus'], linewidth=3, label=runLog['Run Title'][i])
ax[1].set_ylim([0, 2])
ax[1].legend(loc='upper right')
ax[1].set_ylabel('Shear Stress [N/m^2]')
# Water Depth
ax[2].set_title('Water Depth')
ax[2].plot(dataOUT[i].index, dataOUT[i]['waterdepth'], linewidth=3, label=runLog['Run Title'][i])
ax[2].set_ylim([0, 20])
ax[2].legend(loc='upper right')
ax[2].set_ylabel('Water Depth [m]')
# Bed Level
ax[3].set_title('Bed Level')
ax[3].plot(dataOUT[i].index, dataOUT[i]['mesh2d_flowelem_bl'], linewidth=3, label=runLog['Run Title'][i])
# ax[3].set_ylim([320, 340])
ax[3].legend(loc='upper right')
ax[3].set_ylabel('Bed Level [m]')
# Velocity
ax[4].set_title('Depth Averaged Velocity')
ax[4].plot(dataOUT[i].index, dataOUT[i]['ucmag'], linewidth=3, label=runLog['Run Title'][i])
ax[4].set_ylim([0, 2])
ax[4].legend(loc='upper right')
ax[4].set_ylabel('Velocity [m/s]')
ax[4].set_xlabel('Distance Along River [m]')
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/DomainHDCompare.png',
# bbox_inches='tight', dpi=200)
fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/07_PostProcessing/HD_Profile_Compare.png',
bbox_inches='tight', dpi=200)
#%% Plot select data along centerline
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(9, 6), sharex=True)
ax = axes.flat
# fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(9, 4), sharex=True)
# ax[0] = axes
fig.patch.set_facecolor('white')
# Expand plot to allow space for legend
# fig.subplots_adjust(left=-2)
modelPlot = [18, 35]
for i in modelPlot:
# Velocity
ax[0].set_title('Depth Averaged Velocity Along River Centerline')
ax[0].plot(dataOUT[i].index / 1000, dataOUT[i]['ucmag'], linewidth=3, label=runLog['Run Title'][i], zorder=1)
ax[0].set_ylim([0, 2])
ax[0].set_xlim([0, 90])
ax[0].set_ylabel('Velocity [m/s]')
# Shear
ax[1].set_title('Shear Stress Along River Centerline')
ax[1].plot(dataOUT[i].index/1000, dataOUT[i]['taus'], linewidth=3, label=runLog['Run Title'][i], zorder=1)
ax[1].set_ylim([0, 2])
ax[1].set_xlim([0, 90])
ax[1].set_ylabel('Shear Stress [N/$\mathregular{m^2}$]')
ax[1].set_xlabel('Distance Along River [km]')
# Add in structure sites and key sites
for site in KeySites.keys():
for axID in range(0, 2):
if site == 'Dryden':
ax[axID].scatter(KeySites[site]/1000, 1, marker='^', color='k', s=30, label='Inflow', zorder=2)
else:
ax[axID].scatter(KeySites[site]/1000, 1, marker='^', color='k', s=30, zorder=10)
ax[axID].text(KeySites[site]/1000, 1.1, site, rotation=45)
for structureSite in StructureSites.keys():
for axID in range(0, 2):
if structureSite == 'Mutrie Lake':
ax[axID].scatter(StructureSites[structureSite]/1000, 1, marker='s', color='k', label='Hydraulic Structure',
s=30, zorder=10)
else:
ax[axID].scatter(StructureSites[structureSite]/1000, 1, marker='s', color='k', s=30, zorder=10)
ax[axID].text(StructureSites[structureSite] / 1000, 1.1, structureSite, rotation=45)
# Add remobilization threshold lines
ax[0].plot([0, 90000], [0.1, 0.1], linestyle='--', linewidth=1, label='Silt Remobilization Threshold', zorder=5)
ax[0].plot([0, 90000], [1.3, 1.3], linestyle='--', linewidth=1, label='Sand Remobilization Threshold', zorder=5)
ax[0].legend(bbox_to_anchor=(1.01, 0.5), loc="upper left")
fig.tight_layout(pad=1)
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/07_PostProcessing/HD_Profile_Compare_DAV.png',
# bbox_inches='tight', dpi=300)
fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/07_PostProcessing/HD_Profile_Compare_RevD.png',
bbox_inches='tight', dpi=300)

999
EWR_D3D/EWR_Plotting.py Normal file
View File

@ -0,0 +1,999 @@
#%% Plotting EWR Flume Tests
# Alexander Rey, 2022
#%% Import
import pandas as pd
import numpy as np
import math
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import geopandas as gp
gp.options.use_pygeos = True
from shapely import geometry, ops
# Map Plotting
import cartopy.crs as ccrs
import contextily as ctx
# Interpolation
import scipy as sp
from scipy.interpolate import griddata
from scipy.interpolate import LinearNDInterpolator, interp1d
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc, get_ncfilelist, get_hisstationlist, get_ncvardimlist, get_timesfromnc
import pathlib as pl
import datetime
#%% Read Model Log
# runLog = pd.read_excel("Y:/12828.101 English Wabigoon River/06_Models/00_Delft3D/ModelRuns.xlsx", "Sensitivity")
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "12828.101 English Wabigoon River", "06_Models", "00_Delft3D", "ModelRuns.xlsx")
runLog = pd.read_excel(pth.as_posix(), "Sensitivity")
dataPath = "FlowFM_map.nc"
#%% Read in centerline shapefile
river_centerline = gp.read_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15.shp')
river_centerlineExploded = river_centerline.explode(ignore_index=True)
river_centerlineExploded.reset_index(inplace=True)
tempMulti = river_centerlineExploded.iloc[[5,0,1,2,3,4,6,7,9], 4]
# Put the sub-line coordinates into a list of sublists
outcoords = [list(i.coords) for i in tempMulti]
# Flatten the list of sublists and use it to make a new line
river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
river_centerline_merge_gpd2 =\
gp.GeoDataFrame(geometry=gp.points_from_xy(
river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
# Add distance along centerline
river_centerline_merge_gpd2['DistanceFromPrevious'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFromPrevious'].cumsum()
river_centerline_merge_gpd2.iloc[0, 1] = 0
river_centerline_merge_gpd2.iloc[0, 2] = 0
#%% Import Validation Data
# Read in combined dataset
obs_IN = pd.read_csv("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/output/combined dataset-SGJ.csv")
# Add Datetime
obs_IN['DateTime'] = pd.to_datetime(obs_IN.Sampledate_x)
# Convert to geodataframe
obs_gdf = gp.GeoDataFrame(obs_IN, geometry=gp.points_from_xy(
obs_IN.loc[:, 'Longitude'], obs_IN.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Add centerline and reset index
obs_gdf = gp.sjoin_nearest(river_centerline_merge_gpd2, obs_gdf, how='right', max_distance=250).reset_index()
# Sediment Hg GeoDataFrame where media is Sediment
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgSed = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_HgSed.loc[obs_HgSed.Unit == 'NG/G', 'Sample_NG/G'] = obs_HgSed.Samplevalue
obs_HgSed.loc[obs_HgSed.Unit == 'ng/g', 'Sample_NG/G'] = obs_HgSed.Samplevalue
obs_HgSed.loc[obs_HgSed.Unit == 'UG/G', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 1000
obs_HgSed.loc[obs_HgSed.Unit == 'ug/g', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 1000
obs_HgSed.loc[obs_HgSed.Unit == 'MG/KG', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 0.001
obs_HgSed.loc[obs_HgSed.Unit == 'mg/kg', 'Sample_NG/G'] = obs_HgSed.Samplevalue * 0.001
# obs_HgSed.to_file('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/HgSedDB2.geojson', driver="GeoJSON")
# Group sediment core data to take average/min/max
obs_HgSed_Avg = obs_HgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Merge Index
obs_HgSed_Avg.columns = obs_HgSed_Avg.columns.map('|'.join).str.strip('|')
# Fix Names
obs_HgSed_Avg.rename(columns={"RiverKM|mean": "RiverKM", "Longitude|mean": "Longitude",
"Latitude|mean": "Latitude", "DateTime|mean": "DateTime"}, inplace=True)
# Create new GeoDataFrame
obs_HgSed_Avg = gp.GeoDataFrame(obs_HgSed_Avg, geometry=gp.points_from_xy(
obs_HgSed_Avg.loc[:, 'Longitude'], obs_HgSed_Avg.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Water Hg GeoDataFrame where media is SurfaceWater (SW)
obsMask = (((obs_gdf.Media_x == 'SW')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)-Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgWater = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_HgWater.loc[obs_HgWater.Unit == 'NG/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue
obs_HgWater.loc[obs_HgWater.Unit == 'ng/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue
obs_HgWater.loc[obs_HgWater.Unit == 'UG/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000
obs_HgWater.loc[obs_HgWater.Unit == 'ug/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000
obs_HgWater.loc[obs_HgWater.Unit == 'MG/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000000
obs_HgWater.loc[obs_HgWater.Unit == 'mg/L', 'Sample_NG/L'] = obs_HgWater.Samplevalue * 1000000
# Import Sediment MeHg
# Sediment MeHg GeoDataFrame
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_MeHgSed = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'NG/G', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'ng/g', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'UG/G', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 1000
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'ug/g', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 1000
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'MG/KG', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 0.001
obs_MeHgSed.loc[obs_MeHgSed.Unit == 'mg/kg', 'Sample_NG/G'] = obs_MeHgSed.Samplevalue * 0.001
# Group sediment core data to take average/min/max
obs_MeHgSed_Avg = obs_MeHgSed.groupby(['Sitecode', 'DateTime']).agg({'Sample_NG/G': ['mean', 'min', 'max'],
'RiverKM': ['mean'],
'Longitude': ['mean'],
'Latitude': ['mean'],
'DateTime': ['mean']}).reset_index()
# Merge Index
obs_MeHgSed_Avg.columns = obs_MeHgSed_Avg.columns.map('|'.join).str.strip('|')
# Fix Names
obs_MeHgSed_Avg.rename(columns={"RiverKM|mean": "RiverKM", "Longitude|mean": "Longitude",
"Latitude|mean": "Latitude", "DateTime|mean": "DateTime"}, inplace=True)
# Create new GeoDataFrame
obs_MeHgSed_Avg = gp.GeoDataFrame(obs_MeHgSed_Avg, geometry=gp.points_from_xy(
obs_MeHgSed_Avg.loc[:, 'Longitude'], obs_MeHgSed_Avg.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# MeHg in Water
obsMask = (((obs_gdf.Media_x == 'SW')) &
((obs_gdf.Parameter == 'Methylmercury (as MeHg)') | (obs_gdf.Parameter == 'Methyl mercury')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_MeHgWater = obs_gdf.loc[obsMask, :]
# Adjust Units
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'NG/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'ng/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'UG/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'ug/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'MG/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000000
obs_MeHgWater.loc[obs_MeHgWater.Unit == 'mg/L', 'Sample_NG/L'] = obs_MeHgWater.Samplevalue * 1000000
#%% Plot Observations
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(30, 15))
axes.set_xlim(494649, 513647)
axes.set_ylim(5514653, 5525256)
# Add basemap
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
ctx.add_basemap(axes, source=mapbox, crs='EPSG:26915')
obs_HgSed_Avg.plot(column='Sample_NG/G|mean', axes=axes, vmin=0, vmax=10000)
plt.show()
#%% Plot Profiles
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(7, 7))
obs_HgSed.plot.scatter('RiverKM', 'TopDepth_x', 12, 'Sample_NG/G', ax=axes, vmax=20000)
axes.invert_yaxis()
axes.set_xlabel('Distance Along River [m]')
axes.set_ylabel('Sample Depth [m]')
axes.set_xlim([0, 100000])
plt.show()
# fig.savefig("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/Figures/HgDepthWide.png",
# bbox_inches='tight', dpi=200)
#%% Interpolate Hg to grid
# Read in model points and roughness
bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
# Loop through parameters
for i in range(0, 5):
if i == 0:
dataOUT = obs_HgSed_Avg['Sample_NG/G|mean']
dataOutx = obs_HgSed_Avg.geometry.x
dataOuty = obs_HgSed_Avg.geometry.y
dateFileName = 'HgSed_meanDB'
elif i == 1:
dataOUT = obs_MeHgSed_Avg['Sample_NG/G|mean']
dataOutx = obs_MeHgSed_Avg.geometry.x
dataOuty = obs_MeHgSed_Avg.geometry.y
dateFileName = 'MeHgSed_meanDB'
elif i == 2:
dataOUT = obs_HgWater['Sample_NG/L']
dataOutx = obs_HgWater.geometry.x
dataOuty = obs_HgWater.geometry.y
dateFileName = 'HgWater_allDB'
elif i == 3:
dataOUT = obs_MeHgWater['Sample_NG/L']
dataOutx = obs_MeHgWater.geometry.x
dataOuty = obs_MeHgWater.geometry.y
dateFileName = 'MeHgWater_allDB'
elif i == 4:
# Synthetic from Reed's Sheet
dataOUT = ((10 - 1) * np.exp(-0.08 * np.array(river_centerline_merge_gpd2.loc[:, 'RiverKM']) / 1000) + 1) * 1000
dataOutx = river_centerline_merge_gpd2.geometry.x
dataOuty = river_centerline_merge_gpd2.geometry.y
dateFileName = 'ReedHgSed'
gridInterp = griddata(np.array([dataOutx, dataOuty]).T, dataOUT, np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
gridInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, gridInterp))
gridInterpOut = np.transpose(gridInterpOut)
# Set Wetlands to zero
# Interpolate Roughness
RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z, np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
method='nearest')
gridInterpOut[RoughInterp==0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/' + dateFileName + '.xyz',
gridInterpOut, delimiter=" ")
#%% Import using DFM Functions
modelPlot = 27
## Degine import path
# file_nc_map = pl.Path(runLog['Run Location'][modelPlot], 'Water_Quality', 'output', 'deltashell_map.nc')
# file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42_WAQ.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42B_WAQ_Coverage.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42C_WAQ_Coverage2.dsproj_data/Water_Quality/output/deltashell_map.nc")
#file_nc_map = pl.Path("//ott-diomedes.baird.com/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_42E_WAQ_DIDO.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//ott-diomedes/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_43_OceanMesh5_B.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("D:/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_43_OceanMesh5_D.dsproj_data/Water_Quality/output/deltashell_map.nc")
#file_nc_map = pl.Path("D:/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_43_OceanMesh5_E.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//oak-inundation/D/Projects/12828.101_EWR_Delft3FM/TR_42_WAQ/TR_44_OceanMesh5.dsproj_data/Water_Quality/output/deltashell_map.nc")
# file_nc_map = pl.Path("//oak-inundation/D/Projects/12828.101_EWR_Delft3FM/Flume/RiverFlumeHD50.dsproj_data/Water_Quality/output/deltashell_map.nc")
file_nc_map = pl.Path("//oak-inundation/D/Projects/12828.101_EWR_Delft3FM/Flume/RiverFlumeArg_Test2/deltashell_map.nc")
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
# Print file variables
A = get_ncvardimlist(file_nc_map.as_posix())
# print(A)
# Define extraction variables
dataVars = ['FrHgIM1', 'FrHgIM2', 'FrHgIM3', 'FrHgDOC', 'FrHgPOC', 'FrHgPHYT', 'FrHgDis',
'FrHgIM1S1', 'FrHgIM2S1', 'FrHgIM3S1', 'FrHgDOCS1', 'FrHgPOCS1', 'FrHgPHYTS1', 'FrHgDisS1',
'FrHgIM1S2', 'FrHgIM2S2', 'FrHgIM3S2', 'FrHgDOCS2', 'FrHgPOCS2', 'FrHgPHYTS2', 'FrHgDisS2',
'FrMmIM1', 'FrMmIM2', 'FrMmIM3', 'FrMmDOC', 'FrMmPOC', 'FrMmPHYT', 'FrMmDis',
'FrMmIM1S1', 'FrMmIM2S1', 'FrMmIM3S1', 'FrMmDOCS1', 'FrMmPOCS1', 'FrMmPHYTS1', 'FrMmDisS1',
'FrMmIM1S2', 'FrMmIM2S2', 'FrMmIM3S2', 'FrMmDOCS2', 'FrMmPOCS2', 'FrMmPHYTS2', 'FrMmDisS2',
'IM1', 'IM2', 'IM3', 'Hg', 'Mm', 'H0', 'POC1', 'POC2',
'IM1S1', 'IM2S1', 'IM3S1', 'HgS1', 'MmS1', 'DetCS1', 'OOCS1',
'IM1S2', 'IM2S2', 'IM3S2', 'HgS2', 'MmS2', 'DetCS2', 'OOCS2',
'fResS1IM1', 'fSedIM1', 'fResS1IM2', 'fSedIM2',
'fResS1DetC', 'fSedPOC1', 'fResS1OOC', 'fSedPOC2',
'fSedDM', 'fResS1DM', 'fDigS1DM', 'fBurS1DM', 'fDigS2DM', 'fBurS2DM',
'fSedHg', 'fResS1Hg', 'fSedMm', 'fResS1Mm',
'HgHgS1O', 'MmMmS1O',
'HgMmO', 'HgS1MmS1O', 'MmHgO', 'MmS1HgS1O',
'fBurS1Hg', 'fDigS1Hg', 'fBurS1Mm', 'fDigS1Mm',
'oPhoDeg', 'oPhoOxy', 'oPhoRed',
'f_minPOC1', 'MnODCS1', 'Continuity',
'volume', 'Depth']
# Define if sediment or water variable
# Read depth average for water and bottom layer for sediment
sedTerms = ['S1', 'S2', 'Sed', 'Res', 'Bur', 'Dig']
# Create Pandas Output Array
dataIN_x = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_face_x',
silent=True)
dataIN_y = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_face_y',
silent=True)
dataIN_gp = gp.GeoDataFrame(geometry=gp.points_from_xy(dataIN_x, dataIN_y, crs="EPSG:32615"))
# Extract variables
for vIDX, v in enumerate(dataVars):
# Extract data from NetCDF file
# If sediment variable read full 3d variable and then use the values from the bottom layer
if any(s in v for s in sedTerms):
dataIN = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_' + v, timestep=len(tSteps) - 2,
silent=True) # , #mesh2d_agg_2d_
dataIN_gp[v] = np.array(dataIN[0][5][:])
# Else (Water) read 2d variable
else:
dataIN = get_ncmodeldata(file_nc=file_nc_map.as_posix(),
varname='mesh2d_2d_' + v, timestep=len(tSteps) - 2,
silent=True) # , #mesh2d_agg_2d_
dataIN_gp[v] = np.array(dataIN[0][:])
# Add data to Pandas Array
# For 2D Runs
# dataIN_gp[v] = np.array(dataIN[0][0][:])
# For 3D Runs
# dataIN_gp[v] = np.array(dataIN[0][:])
print(v)
#
#%% Create Dataframe along river centerline for plotting
dataOUT = gp.sjoin_nearest(river_centerline_merge_gpd2, dataIN_gp, how='left', max_distance=100)
dataOUT.set_index('RiverKM', inplace=True)
#%% Create Dataframe along Flume for plotting
dataOUT = dataIN_gp
dataOUT['RiverKM'] = dataIN_x
dataOUT.set_index('RiverKM', inplace=True)
#%% Read in river sections shapefile
river_sections_shp = 'C:/Users/arey/files/Projects/Grassy Narrows/GIS/RiverSegmentsModel.shp'
river_sections = gp.read_file(river_sections_shp)
river_sections_names = ['1_Dryden', '2_RugbyCreek', 'Eagle_River', 'Full_Domain']
#%% Sum data over area
# Identify 2d flux variables
fluxVars2d = ['fSedHg', 'fResS1Hg', 'fSedMm', 'fResS1Mm',
'fBurS1Hg', 'fDigS1Hg', 'fBurS1Mm', 'fDigS1Mm',
'HgS1MmS1O', 'MmS1HgS1O',
'fSedDM', 'fResS1DM',
'fDigS1DM', 'fBurS1DM', 'fDigS2DM', 'fBurS2DM']
# Create geoDataFrame for summing
dataSum_2d_gp = gp.GeoDataFrame(geometry=gp.points_from_xy(dataIN_x, dataIN_y, crs="EPSG:32615"))
# Area = Volumne / Depth
dataSum_2d_gp['Area'] = dataIN_gp['volume'] / dataIN_gp['Depth']
# Create dict for summing
dataSum_2d = dict()
# Loop through river sections shapefile
for rIDX, rSHP in enumerate(river_sections.geometry):
# Create dict for summing
dataSum_2d[river_sections_names[rIDX]] = dict()
for vIDX, v in enumerate(fluxVars2d):
# Change from g/day to ng/day
dataSum_2d[river_sections_names[rIDX]][v] = sum(dataIN_gp.loc[dataIN_gp.within(rSHP), v] *
dataSum_2d_gp.loc[dataIN_gp.within(rSHP), 'Area']) * 1e9
# Convert to Pandas DataFrame
dataSum_2d_pd = pd.DataFrame(dataSum_2d)
# dataSum_2d_pd.to_excel('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ModelOutput/GrassyNarrows_2dFluxes.xlsx')
#%% Sum data over water volumne
# Identify 3d flux variables
fluxVars3d = ['oPhoOxy', 'oPhoRed', 'oPhoDeg',
'HgMmO', 'MmHgO']
# Create dict for summing
dataSum_3d = dict()
# Loop through river sections shapefile
for rIDX, rSHP in enumerate(river_sections.geometry):
# Create dict for summing
dataSum_3d[river_sections_names[rIDX]] = dict()
for vIDX, v in enumerate(fluxVars3d):
# Change from g/day to ng/day
dataSum_3d[river_sections_names[rIDX]][v] = sum(dataIN_gp.loc[dataIN_gp.within(rSHP), v] /
dataIN_gp.loc[dataIN_gp.within(rSHP), 'volume'])* 1e9
# Convert to Pandas DataFrame
dataSum_3d_pd = pd.DataFrame(dataSum_3d)
# dataSum_3d_pd.to_excel('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ModelOutput/GrassyNarrows_3dFluxes.xlsx')
#%% Plot Partition Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
#Hg Parition in Water
ax[0].set_title('Hg Fraction in Water')
ax[0].plot(dataOUT.index, dataOUT['FrHgIM1'], linewidth=3, label='IM1')
ax[0].plot(dataOUT.index, dataOUT['FrHgIM2'], linewidth=3, label='IM2')
ax[0].plot(dataOUT.index, dataOUT['FrHgIM3'], linewidth=3, label='IM3')
ax[0].plot(dataOUT.index, dataOUT['FrHgPOC'], linewidth=3, label='POC')
ax[0].plot(dataOUT.index, dataOUT['FrHgDOC'], linewidth=3, label='DOC')
ax[0].plot(dataOUT.index, dataOUT['FrHgPHYT'], linewidth=3, label='PHYT')
ax[0].plot(dataOUT.index, dataOUT['FrHgDis'], linewidth=3, label='Dissolved')
ax[0].legend()
ax[0].set_ylabel('Percentage Hg')
ax[0].set_xlabel('Distance Along River [m]')
ax[0].set_ylim([0, 1])
#Mm Parition in Water
ax[1].set_title('MeHg Fraction in Water')
ax[1].plot(dataOUT.index, dataOUT['FrMmIM1'], linewidth=3, label='IM1')
ax[1].plot(dataOUT.index, dataOUT['FrMmIM2'], linewidth=3, label='IM2')
ax[1].plot(dataOUT.index, dataOUT['FrMmIM3'], linewidth=3, label='IM3')
ax[1].plot(dataOUT.index, dataOUT['FrMmPOC'], linewidth=3, label='POC')
ax[1].plot(dataOUT.index, dataOUT['FrMmDOC'], linewidth=3, label='DOC')
ax[1].plot(dataOUT.index, dataOUT['FrMmPHYT'], linewidth=3, label='PHYT')
ax[1].plot(dataOUT.index, dataOUT['FrMmDis'], linewidth=3, label='Dissolved')
ax[1].legend()
ax[1].set_ylabel('Percentage MeHg')
ax[1].set_xlabel('Distance Along River [m]')
ax[1].set_ylim([0, 1])
#Hg Parition in S1
ax[2].set_title('Hg Fraction in Sediment')
ax[2].plot(dataOUT.index, dataOUT['FrHgIM1S1'], linewidth=3, label='IM1')
ax[2].plot(dataOUT.index, dataOUT['FrHgIM2S1'], linewidth=3, label='IM2')
ax[2].plot(dataOUT.index, dataOUT['FrHgIM3S1'], linewidth=3, label='IM3')
ax[2].plot(dataOUT.index, dataOUT['FrHgPOCS1'], linewidth=3, label='POC')
ax[2].plot(dataOUT.index, dataOUT['FrHgDOCS1'], linewidth=3, label='DOC')
ax[2].plot(dataOUT.index, dataOUT['FrHgPHYTS1'], linewidth=3, label='PHYT')
ax[2].plot(dataOUT.index, dataOUT['FrHgDisS1'], linewidth=3, label='Dissolved')
ax[2].legend()
ax[2].set_ylabel('Percentage Hg')
ax[2].set_xlabel('Distance Along River [m]')
ax[2].set_ylim([0, 1])
#Mm Parition in S1
ax[3].set_title('MeHg Fraction in Sediment')
ax[3].plot(dataOUT.index, dataOUT['FrMmIM1S1'], linewidth=3, label='IM1')
ax[3].plot(dataOUT.index, dataOUT['FrMmIM2S1'], linewidth=3, label='IM2')
ax[3].plot(dataOUT.index, dataOUT['FrMmIM3S1'], linewidth=3, label='IM3')
ax[3].plot(dataOUT.index, dataOUT['FrMmPOCS1'], linewidth=3, label='POC')
ax[3].plot(dataOUT.index, dataOUT['FrMmDOCS1'], linewidth=3, label='DOC')
ax[3].plot(dataOUT.index, dataOUT['FrMmPHYTS1'], linewidth=3, label='PHYT')
ax[3].plot(dataOUT.index, dataOUT['FrMmDisS1'], linewidth=3, label='Dissolved')
ax[3].legend()
ax[3].set_ylabel('Percentage MeHg')
ax[3].set_xlabel('Distance Along River [m]')
ax[3].set_ylim([0, 1])
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/Partitioning.png',
# bbox_inches='tight', dpi=200)
#%% Plot Total Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
#Hg in Water
ax[0].set_title('Hg in Water')
ax[0].plot(dataOUT.index, dataOUT['Hg'] * 1000000, linewidth=3, label='Hg')
ax[0].scatter(obs_HgWater.RiverKM, obs_HgWater['Sample_NG/L'], 5, label='Hg Obs', color='k')
ax[0].set_xlim([0, 100000])
ax[0].set_ylim([0, 100])
ax[0].legend()
ax[0].set_ylabel('Hg [ng/L]')
ax[0].set_xlabel('Distance Along River [m]')
# MeHg in Water
ax[1].set_title('MeHg in Water')
ax[1].plot(dataOUT.index, dataOUT['Mm'] * 1000000, linewidth=3, label='MeHg')
ax[1].scatter(obs_MeHgWater.RiverKM, obs_MeHgWater['Sample_NG/L'], 5, label='Hg Obs', color='k')
ax[1].set_xlim([0, 100000])
ax[1].set_ylim([0, 3])
ax[1].legend()
ax[1].set_ylabel('MeHg [ng/L]')
ax[1].set_xlabel('Distance Along River [m]')
#Hg in Sediment
ax[2].set_title('Hg in Sediment')
ax[2].plot(dataOUT.index, dataOUT['HgS1'] * 1*10 ** 9 / (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='HgS1')
ax[2].scatter(obs_HgSed_Avg.RiverKM, obs_HgSed_Avg['Sample_NG/G|mean'], 5, label='Hg Obs', color='k')
ax[2].set_xlim([0, 100000])
ax[2].set_ylim([0, 20000])
ax[2].legend()
ax[2].set_ylabel('HgS1 [ng/g]')
ax[2].set_xlabel('Distance Along River [m]')
# MeHg in Sediment
ax[3].set_title('MeHg in Sediment')
ax[3].plot(dataOUT.index, dataOUT['MmS1'] * 1*10 ** 9 / (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='MeHgS1')
ax[3].scatter(obs_MeHgSed_Avg.RiverKM, obs_MeHgSed_Avg['Sample_NG/G|mean'], 5, label='MeHg Obs', color='k')
ax[3].set_ylim([0, 60])
ax[3].set_xlim([0, 100000])
ax[3].legend()
ax[3].set_ylabel('MeHg [ng/g]')
ax[3].set_xlabel('Distance Along River [m]')
# ax[3].set_title('Continuity')
# ax[3].plot(dataOUT.index, dataOUT['Continuity'],
# linewidth=3, label='Continuity')
# ax[3].set_ylim([0, 1])
#
# ax[3].legend()
# ax[3].set_ylabel('Continuity [-]')
# ax[3].set_xlabel('Distance Along Flume [m]')
# ax[3].set_ylim([0, 2])
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/HgTotals.png',
# bbox_inches='tight', dpi=200)
#%% Plot POC total Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# POC1 in Water
ax[0].set_title('POC1 (Fast) in Water')
ax[0].plot(dataOUT.index, dataOUT['POC1'], linewidth=3, label='POC1')
ax[0].legend()
ax[0].set_ylabel('POC1 [g/m3]')
ax[0].set_xlabel('Distance Along River [m]')
ax[0].set_ylim([0, 1])
ax[0].set_xlim([0, 100000])
# POC2 in Water
ax[1].set_title('POC2 (Slow) in Water')
ax[1].plot(dataOUT.index, dataOUT['POC2'], linewidth=3, label='POC2')
ax[1].legend()
ax[1].set_ylabel('POC2 [g/m3]')
ax[1].set_xlabel('Distance Along River [m]')
ax[1].set_ylim([0, 1])
ax[1].set_xlim([0, 100000])
#DetC in sediment
ax[2].set_title('DetC (Fast) in Sediment')
ax[2].plot(dataOUT.index, dataOUT['DetCS1']
/ (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1'])
, linewidth=3, label='DetCS1')
ax[2].legend()
ax[2].set_ylabel('DetCS1 [g/g]')
ax[2].set_xlabel('Distance Along River [m]')
ax[2].set_ylim([0, 0.02])
ax[2].set_xlim([0, 100000])
# OOC in Sediment
ax[3].set_title('OOC (Slow) in Sediment')
ax[3].plot(dataOUT.index, dataOUT['OOCS1']
/ (dataOUT['IM1S1'] + dataOUT['IM2S1'] + dataOUT['IM3S1'] + dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='OOCS1')
ax[3].legend()
ax[3].set_ylabel('OOCS1 [g/g]')
ax[3].set_xlabel('Distance Along River [m]')
ax[3].set_ylim([0, 0.02])
ax[3].set_xlim([0, 100000])
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/POCTotals.png',
# bbox_inches='tight', dpi=200)
#%% More Total Data
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
#H0 in Water
ax[0].set_title('Hg(0) in Water')
ax[0].plot(dataOUT.index, dataOUT['H0']* 1000000, linewidth=3, label='Hg(0)')
ax[0].legend()
ax[0].set_ylabel('Hg(0) [ng/L]')
ax[0].set_xlabel('Distance Along River [m]')
ax[0].set_ylim([0, 0.35])
ax[0].set_xlim([0, 100000])
# IM1 in Water
ax[1].set_title('IM1 (Sand) in Water')
ax[1].plot(dataOUT.index, dataOUT['IM1'], linewidth=3, label='IM1')
ax[1].legend()
ax[1].set_ylabel('IM1 [g/m3]')
ax[1].set_xlabel('Distance Along River [m]')
ax[1].set_ylim([0, 20])
ax[1].set_xlim([0, 100000])
# IM2 in Water
ax[2].set_title('IM2 (Silt) in Water')
ax[2].plot(dataOUT.index, dataOUT['IM2'], linewidth=3, label='IM2')
ax[2].legend()
ax[2].set_ylabel('IM2 [g/m3]')
ax[2].set_xlabel('Distance Along Flume [m]')
ax[2].set_ylim([0, 10])
ax[2].set_xlim([0, 100000])
# IM3 in Water
ax[3].set_title('IM3 (Woodchips) in Water')
ax[3].plot(dataOUT.index, dataOUT['IM3'], linewidth=3, label='IM3 (Woodchips)')
ax[3].legend()
ax[3].set_ylabel('IM3 [g/m3]')
ax[3].set_xlabel('Distance Along River [m]')
# ax[3].set_ylim([0, 0.02])
ax[3].set_xlim([0, 100000])
plt.show()
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/InorganicTotals.png',
# bbox_inches='tight', dpi=200)
#%% Plot Fluxes
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=3)
ax = axes.flat
# IM1 Sediment Flux
ax[0].set_title('IM1 (Sand) Resuspension Flux')
ax[0].plot(dataOUT.index, dataOUT['fResS1IM1'], linewidth=3, label='IM1 Resuspension')
ax[0].plot(dataOUT.index, dataOUT['fSedIM1'], linewidth=3, label='IM1 Sedimentation')
ax[0].set_ylim([0, 200])
ax[0].set_xlim([0, 100000])
ax[0].legend()
ax[0].set_ylabel('IM1 (Sand) Sedimentation and Resuspension [g/m2/d]')
ax[0].set_xlabel('Distance Along River [m]')
# POC Sediment Flux
ax[1].set_title('POC Resuspension Flux')
ax[1].plot(dataOUT.index, dataOUT['fResS1DetC'], linewidth=3, label='DetC Resuspension')
ax[1].plot(dataOUT.index, dataOUT['fResS1OOC'], linewidth=3, label='OOC Resuspension')
ax[1].plot(dataOUT.index, dataOUT['fSedPOC1'], linewidth=3, label='POC1 (Fast) Sedimentation')
ax[1].plot(dataOUT.index, dataOUT['fSedPOC2'], linewidth=3, label='POC2 (Slow) Sedimentation')
ax[1].set_ylim([0, 2])
ax[1].set_xlim([0, 100000])
ax[1].legend()
ax[1].set_ylabel('POC Sedimentation and Resuspension [g/m2/d]')
ax[1].set_xlabel('Distance Along River [m]')
# Hg Sediment Flux
ax[2].set_title('Hg Resuspension Flux')
ax[2].plot(dataOUT.index, dataOUT['fResS1Hg'], linewidth=3, label='Hg Resuspension')
ax[2].plot(dataOUT.index, dataOUT['fSedHg'], linewidth=3, label='Hg Sedimentation')
ax[2].set_ylim([0, 0.002])
ax[2].set_xlim([0, 100000])
ax[2].legend()
ax[2].set_ylabel('Hg Sedimentation and Resuspension [g/m2/d]')
ax[2].set_xlabel('Distance Along River [m]')
# Mm Sediment Flux
ax[3].set_title('MeHg Resuspension Flux')
ax[3].plot(dataOUT.index, dataOUT['fResS1Mm'], linewidth=3, label='MeHg Resuspension')
ax[3].plot(dataOUT.index, dataOUT['fSedMm'], linewidth=3, label='MeHg Sedimentation')
ax[3].legend()
ax[3].set_ylabel('MeHg Sedimentation and Resuspension [g/m2/d]')
ax[3].set_xlabel('Distance Along River [m]')
ax[3].set_xlim([0, 100000])
ax[3].set_ylim([0, 0.0001])
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/HgFluxes.png',
# bbox_inches='tight', dpi=200)
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/HgFluxes.png',
# bbox_inches='tight', dpi=200)
#%% Plot More Fluxes
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=4)
ax = axes.flat
# Methylation Flux
ax[0].set_title('Methylation Flux in Water')
ax[0].plot(dataOUT.index, dataOUT['HgMmO'], linewidth=3, label='Mehtylation')
ax[0].plot(dataOUT.index, dataOUT['MmHgO'], linewidth=3, label='Demehtylation')
ax[0].legend()
ax[0].set_ylabel('Methylation Flux in Water [g/m3/d]')
ax[0].set_xlabel('Distance Along River [m]')
# Methylation Flux in Sediment
ax[1].set_title('Methylation Flux in Sediment')
ax[1].plot(dataOUT.index, dataOUT['HgS1MmS1O'] * 1000000,
linewidth=3, label='Mehtylation')
ax[1].plot(dataOUT.index, dataOUT['MmS1HgS1O'] * 1000000,
linewidth=3, label='Demehtylation')
ax[1].legend()
ax[1].set_ylabel('Methylation Flux in Sediment [ug/m2/d]')
ax[1].set_xlabel('Distance Along River [m]')
# Hg Diffusion Flux
# ax[2].set_title('Diffusion Flux')
# ax[2].plot(dataOUT.index, dataOUT['HgHgS1O'], linewidth=3, label='Hg Diffusion')
# ax[2].plot(dataOUT.index, dataOUT['MmMmS1O'], linewidth=3, label='MeHg Diffusion')
#
# ax[2].legend()
# ax[2].set_ylabel('Diffusion [g/m2/d]')
# ax[2].set_xlabel('Distance Along River [m]')
# POC Degradation Flux
ax[2].set_title('POC Degradation Flux in Water')
ax[2].plot(dataOUT.index, dataOUT['f_minPOC1'], linewidth=3, label='POC Degradation Water')
ax[2].legend()
ax[2].set_ylabel('POC Degradation [g/m3/d]')
ax[2].set_xlabel('Distance Along River [m]')
# POC Degradation Flux
ax[3].set_title('POC Degradation Flux in Sediment')
ax[3].plot(dataOUT.index, dataOUT['MnODCS1']
/ (dataOUT['DetCS1'] + dataOUT['OOCS1']),
linewidth=3, label='POC Degradation Sediment')
ax[3].legend()
ax[3].set_ylabel('POC Degradation [g/gPOC/d')
ax[3].set_xlabel('Distance Along River [m]')
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/POCFluxes.png',
# bbox_inches='tight', dpi=200)
# fig.savefig('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/06_Models/00_Delft3D/12_WAQ/FullDomainFigures/POCFluxes.png',
# bbox_inches='tight', dpi=200)
#%% Photo Fluxes
fig, axes = plt.subplots(nrows=2, ncols=2, figsize=(9, 9))
fig.patch.set_facecolor('white')
fig.tight_layout(pad=4)
ax = axes.flat
# Diffusion Flux
ax[0].set_title('Diffusion Flux')
ax[0].plot(dataOUT.index, dataOUT['HgHgS1O'], linewidth=3, label='Hg Diffusion')
ax[0].plot(dataOUT.index, dataOUT['MmMmS1O'], linewidth=3, label='MeHg Diffusion')
ax[0].legend()
ax[0].set_ylabel('Diffusion [g/m2/d]')
ax[0].set_xlabel('Distance Along Flume [m]')
# Photo Oxidation Flux
ax[1].set_title('Photo Oxidation Flux')
ax[1].plot(dataOUT.index, dataOUT['oPhoOxy'], linewidth=3, label='Photo Oxidation Flux')
ax[1].legend()
ax[1].set_ylabel('Photo Oxidation Flux [g/m3/d]')
ax[1].set_xlabel('Distance Along Flume [m]')
# Photo Degradation Flux
ax[2].set_title('Photo Degradation Flux')
ax[2].plot(dataOUT.index, dataOUT['oPhoDeg'], linewidth=3, label='Photo Degradation Flux')
ax[2].legend()
ax[2].set_ylabel('Photo Degradation Flux [g/m3/d]')
ax[2].set_xlabel('Distance Along Flume [m]')
# Photo Reduction Flux
ax[3].set_title('Photo Reduction Flux')
ax[3].plot(dataOUT.index, dataOUT['oPhoRed'], linewidth=3, label='Photo Reduction Flux')
ax[3].legend()
ax[3].set_ylabel('Photo Reduction Flux [g/m3/d]')
ax[3].set_xlabel('Distance Along Flume [m]')
plt.show()
# fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ProcessFigures/H0Fluxes.png',
# bbox_inches='tight', dpi=200)
#%% Make summary table
dataTableOut = dict()
#Hg Parition in Water
dataTableOut['Hg:IM1 Fraction'] = dataOUT['FrHgIM1'].mean()
dataTableOut['Hg:IM2 Fraction'] = dataOUT['FrHgIM2'].mean()
dataTableOut['Hg:IM3 Fraction'] = dataOUT['FrHgIM3'].mean()
dataTableOut['Hg:POC Fraction'] = dataOUT['FrHgPOC'].mean()
dataTableOut['Hg:DOC Fraction'] = dataOUT['FrHgDOC'].mean()
dataTableOut['Hg:Phyt Fraction'] = dataOUT['FrHgPHYT'].mean()
dataTableOut['Hg:Dissolved Fraction'] = dataOUT['FrHgDis'].mean()
#MeHg Parition in Water
dataTableOut['MeHg:IM1 Fraction'] = dataOUT['FrMmIM1'].mean()
dataTableOut['MeHg:IM2 Fraction'] = dataOUT['FrMmIM2'].mean()
dataTableOut['MeHg:IM3 Fraction'] = dataOUT['FrMmIM3'].mean()
dataTableOut['MeHg:POC Fraction'] = dataOUT['FrMmPOC'].mean()
dataTableOut['MeHg:DOC Fraction'] = dataOUT['FrMmDOC'].mean()
dataTableOut['MeHg:Phyt Fraction'] = dataOUT['FrMmPHYT'].mean()
dataTableOut['MeHg:Dissolved Fraction'] = dataOUT['FrMmDis'].mean()
#Hg Parition in S1
dataTableOut['Hg:IM1 Fraction S1'] = dataOUT['FrHgIM1S1'].mean()
dataTableOut['Hg:IM2 Fraction S1'] = dataOUT['FrHgIM2S1'].mean()
dataTableOut['Hg:IM3 Fraction S1'] = dataOUT['FrHgIM3S1'].mean()
dataTableOut['Hg:POC Fraction S1'] = dataOUT['FrHgPOCS1'].mean()
dataTableOut['Hg:DOC Fraction S1'] = dataOUT['FrHgDOCS1'].mean()
dataTableOut['Hg:Phyt Fraction S1'] = dataOUT['FrHgPHYTS1'].mean()
dataTableOut['Hg:Dissolved Fraction S1'] = dataOUT['FrHgDisS1'].mean()
#MeHg Parition in S1
dataTableOut['MeHg:IM1 Fraction S1'] = dataOUT['FrMmIM1S1'].mean()
dataTableOut['MeHg:IM2 Fraction S1'] = dataOUT['FrMmIM2S1'].mean()
dataTableOut['MeHg:IM3 Fraction S1'] = dataOUT['FrMmIM3S1'].mean()
dataTableOut['MeHg:POC Fraction S1'] = dataOUT['FrMmPOCS1'].mean()
dataTableOut['MeHg:DOC Fraction S1'] = dataOUT['FrMmDOCS1'].mean()
dataTableOut['MeHg:Phyt Fraction S1'] = dataOUT['FrMmPHYT'].mean()
dataTableOut['MeHg:Dissolved Fraction S1'] = dataOUT['FrMmDisS1'].mean()
#Hg in Water
dataTableOut['Hg in Water [ng/L]'] = dataOUT['Hg'].mean() * 1000000
# Mm in Water
dataTableOut['MeHg in Water [ng/L]'] = dataOUT['Mm'].mean() * 1000000
#Hg in Sediment
dataTableOut['Hg in Sediment [ng/g]'] = dataOUT['HgS1'].mean() * 1*10 ** 9 /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# Mm in Sediment
dataTableOut['MeHg in Sediment [ng/g]'] = dataOUT['MmS1'].mean() * 1*10 ** 9 /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# Hg(0) in Water
dataTableOut['Hg(0) in Water [ng/L]'] = dataOUT['H0'].mean() * 1000000
# IM1 in Water
ax[1].set_title('IM1 (Sand) in Water')
dataTableOut['IM1 (Sand) in Water [g/m3]'] = dataOUT['IM1'].mean()
# IM2 in Water
dataTableOut['IM2 (Silt) in Water [g/m3]'] = dataOUT['IM2'].mean()
# IM3 in Water
dataTableOut['IM3 (Woodchips) in Water [g/m3]'] = dataOUT['IM3'].mean()
# POC1 in Water
dataTableOut['POC1 (Fast) in Water [g/m3]'] = dataOUT['POC1'].mean()
# POC2 in Water
dataTableOut['POC1 (Slow) in Water [g/m3]'] = dataOUT['POC2'].mean()
#DetC in sediment
dataTableOut['DetC (Fast) in Sediment [g/g]'] = dataOUT['DetCS1'].mean() /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# OOC in Sediment
dataTableOut['OOC (Slow) in Sediment [g/g]'] = dataOUT['OOCS1'].mean() /\
(dataOUT['IM1S1'].mean() + dataOUT['IM2S1'].mean() + dataOUT['IM3S1'].mean() +
dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# IM1 Resuspension
dataTableOut['IM1 (Sand) Resuspension Flux [g/m2/d]'] = dataOUT['fResS1IM1'].mean()
# IM1 Sedimentation
dataTableOut['IM1 (Sand) Sedimentation Flux [g/m2/d]'] = dataOUT['fSedIM1'].mean()
# DetC Resuspension
dataTableOut['DetC Resuspension Flux [g/m2/d]'] = dataOUT['fResS1DetC'].mean()
# OOC Resuspension
dataTableOut['OOC Resuspension Flux [g/m2/d]'] = dataOUT['fResS1OOC'].mean()
# POC1 Sedimentation
dataTableOut['POC1 (Fast) Sedimentation [g/m2/d]'] = dataOUT['fSedPOC1'].mean()
# POC2 Sedimentation
dataTableOut['POC2 (Slow) Sedimentation [g/m2/d]'] = dataOUT['fSedPOC2'].mean()
# Hg Resuspension
dataTableOut['Hg Resuspension Flux [g/m2/d]'] = dataOUT['fResS1Hg'].mean()
# Hg Sedimentation
dataTableOut['Hg Sedimentation Flux [g/m2/d]'] = dataOUT['fSedHg'].mean()
# MeHg Resuspension
dataTableOut['MeHg Resuspension Flux [g/m2/d]'] = dataOUT['fResS1Mm'].mean()
# MeHg Sedimentation
dataTableOut['MeHg Sedimentation Flux [g/m2/d]'] = dataOUT['fSedMm'].mean()
# Methylation Flux in Water
dataTableOut['Methylation Flux in Water [g/m3/d]'] = dataOUT['HgMmO'].mean()
# Demehtylation Flux in Water
dataTableOut['Demehtylation Flux in Water [g/m3/d]'] = dataOUT['MmHgO'].mean()
# Methylation Flux in Sediment
dataTableOut['Methylation Flux in Sediment [g/gHg/d]'] = dataOUT['HgS1MmS1O'].mean() / dataOUT['HgS1'].mean()
# Demehtylation Flux in Sediment
dataTableOut['Demehtylation Flux in Sediment [g/gMm/d]'] = dataOUT['MmS1HgS1O'].mean() / dataOUT['MmS1'].mean()
# POC Degradation Flux in Water
dataTableOut['POC Degradation Flux in Water [g/m3/d]'] = dataOUT['f_minPOC1'].mean()
# POC Degradation Flux in Sediment
dataTableOut['Demehtylation Flux in Water [g/gPOC/d]'] = dataOUT['MnODCS1'].mean() /\
(dataOUT['DetCS1'].mean() + dataOUT['OOCS1'].mean())
# Hg Diffusion Flux
dataTableOut['Hg Diffusion [g/m2/d]'] = dataOUT['HgHgS1O'].mean()
# MeHg Diffusion Flux
dataTableOut['MeHg Diffusion [g/m2/d]'] = dataOUT['MmMmS1O'].mean()
# Photo Oxidation Flux
dataTableOut['Hg Diffusion [g/m3/d]'] = dataOUT['oPhoOxy'].mean()
# Photo Degradation Flux
dataTableOut['MeHg Diffusion [g/m3/d]'] = dataOUT['oPhoDeg'].mean()
# Photo Reduction Flux
dataTableOut['Hg Diffusion [g/m3/d]'] = dataOUT['oPhoRed'].mean()
#%% Save Data Table as Excel
dataTableOut_df = pd.DataFrame(data=dataTableOut, index=[0])
dataTableOut_df = (dataTableOut_df.T)
dataTableOut_df.to_excel('C:/Users/arey/files/Projects/Grassy Narrows/Modelling/ModelOutputs_Sept26.xlsx')

207
EWR_D3D/dfm-tools-ajmr.txt Normal file
View File

@ -0,0 +1,207 @@
# This file may be used to create an environment using:
# $ conda create --name <env> --file <this file>
# platform: win-64
@EXPLICIT
https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2022.9.24-h5b45459_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2022.1.0-h57928b3_3787.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.11-hd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.20348.0-h57928b3_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.29.30139-h890b9b1_7.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/vc-14.2-hb210afc_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h8ffe710_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/expat-2.4.9-h1537add_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/geos-3.11.0-h39d44d4_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/icu-70.1-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/jpeg-9e-h8ffe710_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.14-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libogg-1.3.4-h8ffe710_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libspatialindex-1.9.3-h39d44d4_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.39.3-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.2.4-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libzlib-1.2.12-hcfcfb64_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.3-h8ffe710_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openssl-1.1.1q-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pcre-8.45-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pixman-0.40.0-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/snappy-1.1.9-h82413e6_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tbb-2021.6.0-h91493d7_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tk-8.6.12-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.3-h0e60522_5.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/freexl-1.0.6-h67ca5e6_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gettext-0.19.8.1-h5728263_1009.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h0e5069d_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/krb5-1.19.3-h1176d77_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libclang13-14.0.6-default_h77d9078_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.38-h19919ed_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h2842628_11.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libssh2-1.10.0-h680486a_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libvorbis-1.3.7-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libxml2-2.10.2-h99b13fb_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libzip-1.9.2-hfed4ece_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/mkl-2022.1.0-h6a75c08_874.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pcre2-10.37-hdfff0fc_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/sqlite-3.39.3-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/zlib-1.2.12-hcfcfb64_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.2-h7755175_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.1-h74325e0_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/boost-cpp-1.78.0-h9f4b32c_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-h546665d_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libclang-14.0.6-default_h77d9078_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libcurl-7.83.1-h789b8ee_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libglib-2.74.0-h79619a9_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libpq-14.5-hfcc5ef8_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libtiff-4.4.0-h8e97e67_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/python-3.8.13-h9a09f29_0_cpython.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.9-hcd874cb_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/affine-2.3.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotli-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/certifi-2022.9.24-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.1.0-h5a969a9_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/curl-7.83.1-h789b8ee_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.0.1-py_1001.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.0-h720f74d_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.8.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geographiclib-1.52-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/glib-tools-2.74.0-h12be248_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/hdf5-1.12.2-nompi_h2a0e4a3_100.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lcms2-2.12-h2a16943_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-hf2ab4e4_1015.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libxcb-1.13-hcd874cb_1004.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/networkx-2.8.6-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.0-hc9384bd_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/postgresql-14.5-h1c22c4f_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/proj-9.0.1-h1cfcee9_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-2_cp38.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/setuptools-65.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.1.0-pyh8a188c0_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/xyzservices-2022.9.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cairo-1.16.0-hd694305_1014.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cffi-1.15.1-py38hd8c33c5_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/click-8.1.3-py38haa244fe_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.0-py38h294d835_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geopy-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.1-h714bc5f_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/glib-2.74.0-h12be248_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/joblib-1.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/kealib-1.4.15-hdf81f3a_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.4-py38hbd9d945_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.8.1-nompi_h85765be_104.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.0.1-hd9530bf_19.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lz4-4.0.0-py38he18b7d8_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.1-py38h294d835_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.4-py38hbd9d945_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/munch-2.5.0-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/numpy-1.23.3-py38h8503d5b_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.0.10-py38h91455d4_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pillow-9.2.0-py38h37aa274_2.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.2-py38h91455d4_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyproj-3.4.0-py38h5a72d38_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0-py38h294d835_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/rtree-1.0.0-py38h8b54edf_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tiledb-2.11.3-h5689973_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tornado-6.1-py38h294d835_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/unicodedata2-14.0.0-py38h294d835_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/win_inet_pton-1.1.0-py38haa244fe_4.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/zict-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/bottleneck-1.3.5-py38hbdcd294_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotlipy-0.7.0-py38h294d835_1004.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cftime-1.6.2-py38hbaf524b_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/click-log-0.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/contourpy-1.0.5-py38hb1fd069_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cryptography-37.0.4-py38hb7941b4_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.9.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fonttools-4.37.3-py38h91455d4_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gstreamer-1.20.3-h6b5321d_2.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pandas-1.5.0-py38h5846ac1_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/poppler-22.04.0-hb57f792_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pygeos-0.13-py38h8678f91_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/scipy-1.9.1-py38h91810f7_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/shapely-1.8.4-py38h91759cc_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/sip-6.6.2-py38h885f38d_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/trimesh-3.15.2-pyh1a96a4e_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/bokeh-2.4.3-pyhd8ed1ab_3.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/branca-0.5.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.11.1-pyha770c72_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gst-plugins-base-1.20.3-h001b923_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libgdal-3.5.2-hc386656_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.5.3-py38h3268a40_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.6.1-nompi_py38h78680c8_100.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyqt5-sip-12.11.0-py38h885f38d_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.1.2-py38hc27f28a_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/xarray-2022.6.0-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cartopy-0.21.0-py38h395e5b8_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gdal-3.5.2-py38h658b046_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.4.3-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/qt-main-5.15.6-hf0cf448_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.2-py38h8b19869_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/distributed-2022.9.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fiona-1.8.21-py38h4ea64ce_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyqt-5.15.7-py38h75e37d8_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.12.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/contextily-1.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/dask-2022.9.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/folium-0.12.1.post1-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.5.3-py38haa244fe_2.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyepsg-0.4.0-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geopandas-0.11.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/alphashape-1.3.1-pyh44b312d_0.tar.bz2

207
EWR_D3D/dfm_tools_env.txt Normal file
View File

@ -0,0 +1,207 @@
# This file may be used to create an environment using:
# $ conda create --name <env> --file <this file>
# platform: win-64
@EXPLICIT
https://conda.anaconda.org/conda-forge/win-64/ca-certificates-2022.9.24-h5b45459_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/intel-openmp-2022.1.0-h57928b3_3787.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/msys2-conda-epoch-20160418-1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.11-hd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/ucrt-10.0.20348.0-h57928b3_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gmp-6.1.0-2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-libwinpthread-git-5.0.0.4634.697f757-2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/vs2015_runtime-14.29.30139-h890b9b1_7.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-core-5.3.0-7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/vc-14.2-hb210afc_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/bzip2-1.0.8-h8ffe710_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/expat-2.4.9-h1537add_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/geos-3.11.0-h39d44d4_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/icu-70.1-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/jpeg-9e-h8ffe710_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lerc-4.0.0-h63175ca_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlicommon-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libdeflate-1.14-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libffi-3.4.2-h8ffe710_5.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libiconv-1.17-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libogg-1.3.4-h8ffe710_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libspatialindex-1.9.3-h39d44d4_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libsqlite-3.39.3-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libwebp-base-1.2.4-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libzlib-1.2.12-hcfcfb64_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lz4-c-1.9.3-h8ffe710_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libgfortran-5.3.0-6.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openssl-1.1.1q-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pcre-8.45-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pixman-0.40.0-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/snappy-1.1.9-h82413e6_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tbb-2021.6.0-h91493d7_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tk-8.6.12-h8ffe710_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xerces-c-3.2.3-h0e60522_5.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xz-5.2.6-h8d14728_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/yaml-0.2.5-h8ffe710_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/freexl-1.0.6-h67ca5e6_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gettext-0.19.8.1-h5728263_1009.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/hdf4-4.2.15-h0e5069d_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/krb5-1.19.3-h1176d77_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlidec-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libbrotlienc-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libclang13-14.0.6-default_h77d9078_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libpng-1.6.38-h19919ed_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/librttopo-1.1.0-h2842628_11.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libssh2-1.10.0-h680486a_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libvorbis-1.3.7-h0e60522_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libxml2-2.10.2-h99b13fb_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libzip-1.9.2-hfed4ece_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/m2w64-gcc-libs-5.3.0-7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/mkl-2022.1.0-h6a75c08_874.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pcre2-10.37-hdfff0fc_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/sqlite-3.39.3-hcfcfb64_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/zlib-1.2.12-hcfcfb64_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/zstd-1.5.2-h7755175_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/blosc-1.21.1-h74325e0_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/boost-cpp-1.78.0-h9f4b32c_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotli-bin-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/freetype-2.12.1-h546665d_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libblas-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libclang-14.0.6-default_h77d9078_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libcurl-7.83.1-h789b8ee_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libglib-2.74.0-h79619a9_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libpq-14.5-hfcc5ef8_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libtiff-4.4.0-h8e97e67_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pthread-stubs-0.4-hcd874cb_1001.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/python-3.8.13-h9a09f29_0_cpython.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xorg-libxau-1.0.9-hcd874cb_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/xorg-libxdmcp-1.1.3-hcd874cb_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/affine-2.3.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/attrs-22.1.0-pyh71513ae_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotli-1.0.9-h8ffe710_7.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/certifi-2022.9.24-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cfitsio-4.1.0-h5a969a9_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-2.1.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cloudpickle-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.5-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/curl-7.83.1-h789b8ee_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cycler-0.11.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.0.1-py_1001.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fontconfig-2.14.0-h720f74d_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/fsspec-2022.8.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geographiclib-1.52-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/glib-tools-2.74.0-h12be248_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/hdf5-1.12.2-nompi_h2a0e4a3_100.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/heapdict-1.0.1-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lcms2-2.12-h2a16943_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libcblas-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libkml-1.3.0-hf2ab4e4_1015.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/liblapack-3.9.0-16_win64_mkl.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libxcb-1.13-hcd874cb_1004.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/networkx-2.8.6-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openjpeg-2.5.0-hc9384bd_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/ply-3.11-py_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/postgresql-14.5-h1c22c4f_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/proj-9.0.1-h1cfcee9_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pycparser-2.21-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.0.9-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/python_abi-3.8-2_cp38.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pytz-2022.2.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/setuptools-65.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/tblib-1.7.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.1.0-pyh8a188c0_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.3.0-pyha770c72_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/wheel-0.37.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/xyzservices-2022.9.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cairo-1.16.0-hd694305_1014.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cffi-1.15.1-py38hd8c33c5_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/click-8.1.3-py38haa244fe_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cytoolz-0.12.0-py38h294d835_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geopy-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/geotiff-1.7.1-h714bc5f_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/glib-2.74.0-h12be248_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/joblib-1.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/kealib-1.4.15-hdf81f3a_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/kiwisolver-1.4.4-py38hbd9d945_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libnetcdf-4.8.1-nompi_h85765be_104.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libspatialite-5.0.1-hd9530bf_19.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/lz4-4.0.0-py38he18b7d8_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/markupsafe-2.1.1-py38h294d835_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/msgpack-python-1.0.4-py38hbd9d945_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/munch-2.5.0-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/numpy-1.23.3-py38h8503d5b_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/openpyxl-3.0.10-py38h91455d4_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/packaging-21.3-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/partd-1.3.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pillow-9.2.0-py38h37aa274_2.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pip-22.2.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/psutil-5.9.2-py38h91455d4_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyproj-3.4.0-py38h5a72d38_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.8.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyyaml-6.0-py38h294d835_4.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/rtree-1.0.0-py38h8b54edf_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tiledb-2.11.3-h5689973_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/tornado-6.1-py38h294d835_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/unicodedata2-14.0.0-py38h294d835_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/win_inet_pton-1.1.0-py38haa244fe_4.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/zict-2.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/bottleneck-1.3.5-py38hbdcd294_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/brotlipy-0.7.0-py38h294d835_1004.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cftime-1.6.2-py38hbaf524b_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/click-log-0.4.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/contourpy-1.0.5-py38hb1fd069_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cryptography-37.0.4-py38hb7941b4_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/dask-core-2022.9.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fonttools-4.37.3-py38h91455d4_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gstreamer-1.20.3-h6b5321d_2.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.2-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/mercantile-1.2.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pandas-1.5.0-py38h5846ac1_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/poppler-22.04.0-hb57f792_3.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pygeos-0.13-py38h8678f91_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyh0701188_6.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/scipy-1.9.1-py38h91810f7_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/shapely-1.8.4-py38h91759cc_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/sip-6.6.2-py38h885f38d_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/trimesh-3.15.2-pyh1a96a4e_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/bokeh-2.4.3-pyhd8ed1ab_3.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/branca-0.5.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geopandas-base-0.11.1-pyha770c72_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gst-plugins-base-1.20.3-h001b923_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/libgdal-3.5.2-hc386656_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/matplotlib-base-3.5.3-py38h3268a40_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/netcdf4-1.6.1-nompi_py38h78680c8_100.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyopenssl-22.0.0-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyqt5-sip-12.11.0-py38h885f38d_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/scikit-learn-1.1.2-py38hc27f28a_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/xarray-2022.6.0-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/cartopy-0.21.0-py38h395e5b8_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/gdal-3.5.2-py38h658b046_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/mapclassify-2.4.3-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/qt-main-5.15.6-hf0cf448_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/rasterio-1.3.2-py38h8b19869_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/urllib3-1.26.11-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/distributed-2022.9.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/fiona-1.8.21-py38h4ea64ce_2.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/pyqt-5.15.7-py38h75e37d8_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/requests-2.28.1-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/rioxarray-0.12.2-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/contextily-1.2.0-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/dask-2022.9.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/folium-0.12.1.post1-pyhd8ed1ab_1.tar.bz2
https://conda.anaconda.org/conda-forge/win-64/matplotlib-3.5.3-py38haa244fe_2.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/pyepsg-0.4.0-py_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/geopandas-0.11.1-pyhd8ed1ab_0.tar.bz2
https://conda.anaconda.org/conda-forge/noarch/alphashape-1.3.1-pyh44b312d_0.tar.bz2

View File

@ -2,7 +2,7 @@
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="jdk" jdkName="Python 3.9 (py39)" jdkType="Python SDK" />
<orderEntry type="jdk" jdkName="Python 3.8 (geopandas_forge_mustique)" jdkType="Python SDK" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">

View File

@ -1,4 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (py39)" project-jdk-type="Python SDK" />
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (geopandas_forge_mustique)" project-jdk-type="Python SDK" />
</project>

45
EWR_Data/CFSR_Wind.py Normal file
View File

@ -0,0 +1,45 @@
## Process CFSv2 Wind NetCDF Data
# AJMR: December 2022
import xarray as xr
import netCDF4 as nc
import numpy as np
import os
import matplotlib.pyplot as plt
from datetime import datetime
#%% Setup paths
cdfv2Paths = os.listdir('C:/Users/arey/Downloads/wnd10m.cdas1.201104-gdas.201103grb2.nc/')
# Add path to each file
cdfv2Paths = ['C:/Users/arey/Downloads/wnd10m.cdas1.201104-gdas.201103grb2.nc/' + i for i in cdfv2Paths]
#%% Set up multiple file dataset
cfsv2_mf = nc.MFDataset(cdfv2Paths)
#%% Read in NC data
cfsv2_windU = cfsv2_mf.variables['U_GRD_L103']
cfsv2_windV = cfsv2_mf.variables['V_GRD_L103']
#%% Process time from NetCDF file
nctimes = cfsv2_mf.variables['valid_date_time'][:] # get values
# Convert numpy char array to list of strings
nctimesStr = [''.join(row) for row in nctimes.astype(str)]
ncDatetime = [datetime.strptime(date, "%Y%m%d%H") for date in nctimesStr]
#%% Plot
# Set up figure
fig, ax = plt.subplots(1, 1, figsize=(10, 10))
# Plot U and V wind speeds
# ax.plot(ncDatetime, np.sqrt(cfsv2_windU[:, 0, 0]**2 + cfsv2_windV[:, 0, 0]**2), label='CFSv2 Wind Magnitude')
ax.plot(ncDatetime)
plt.show()

View File

@ -2,14 +2,23 @@
"cells": [
{
"cell_type": "code",
"execution_count": 10,
"execution_count": 1,
"metadata": {
"collapsed": true,
"pycharm": {
"name": "#%% Setup\n"
}
},
"outputs": [],
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"C:\\Users\\arey\\Anaconda3\\envs\\py39\\lib\\site-packages\\geopandas\\_compat.py:111: UserWarning: The Shapely GEOS version (3.10.2-CAPI-1.16.0) is incompatible with the GEOS version PyGEOS was compiled with (3.10.3-CAPI-1.16.1). Conversions between both will be slow.\n",
" warnings.warn(\n"
]
}
],
"source": [
"#import jupyter\n",
"import pandas as pd\n",
@ -702,11 +711,7 @@
{
"cell_type": "code",
"execution_count": 31,
"metadata": {
"pycharm": {
"name": "#%%\n"
}
},
"metadata": {},
"outputs": [],
"source": [
"# sc = axes.scatter(EWR_gdf_filt_UTM.loc[(EWR_gdf_filt_UTM['param_name'].isin(plotvars)) &\n",
@ -949,8 +954,6 @@
},
"outputs": [],
"source": [
"\n",
"\n",
"mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'\n",
"\n",
"fig, axes = plt.subplots(nrows=3, ncols=1, figsize=(10, 14))\n",
@ -1038,4 +1041,4 @@
},
"nbformat": 4,
"nbformat_minor": 1
}
}

150
EWR_Data/EWR_Hg_Interp.py Normal file
View File

@ -0,0 +1,150 @@
## Process EWR Hg Data into a surface
# AJMR: September 2022
#%% Setup
import pandas as pd
import geopandas as gp
import gpxpy
import gpxpy.gpx
import numpy as np
import matplotlib.pyplot as plt
import contextily as ctx
import datetime
import scipy as sp
from scipy.interpolate import griddata
from shapely import geometry, ops
#%% Read in consolidated dataset
df_in = pd.read_csv('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/output/mercury sediment subset-SGJ.csv')
# Convert to geopandas
dataIN_gp = gp.GeoDataFrame(df_in, geometry=gp.points_from_xy(df_in.Longitude, df_in.Latitude, crs="EPSG:4326"))
# Convert to UTM
dataIN_gp.to_crs(crs='EPSG:32615', inplace=True)
#%% Read in centerline shapefile
river_centerline = gp.read_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15.shp')
river_centerlineExploded = river_centerline.explode(ignore_index=True)
river_centerlineExploded.reset_index(inplace=True)
tempMulti = river_centerlineExploded.iloc[[5,0,1,2,3,4,6,7,9], 4]
# Put the sub-line coordinates into a list of sublists
outcoords = [list(i.coords) for i in tempMulti]
# Flatten the list of sublists and use it to make a new line
river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
river_centerline_merge_gpd2 =\
gp.GeoDataFrame(geometry=gp.points_from_xy(
river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
# Add distance along centerline
river_centerline_merge_gpd2['DistanceFromPrevious'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFromPrevious'].cumsum()
river_centerline_merge_gpd2.iloc[0, 1] = 0
river_centerline_merge_gpd2.iloc[0, 2] = 0
#%% Add riverkm distances to dataset
dataOUT = gp.sjoin_nearest(river_centerline_merge_gpd2, dataIN_gp, how='left', max_distance=100)
#%% Take only surface values
dataOUTmax = dataOUT.groupby(['Samplesiteid'], as_index=False)['Samplevalue'].max()
# Merge geometry and river KM
dataOUTmax = pd.merge(dataOUTmax, dataOUT[['Samplesiteid', 'RiverKM']], on='Samplesiteid', how='left')
dataOUTmax = pd.merge(dataOUTmax, dataOUT[['Samplesiteid', 'geometry']], on='Samplesiteid', how='left')
#%% Set River KM as index
dataOUT.set_index('RiverKM', inplace=True)
dataOUTmax.set_index('RiverKM', inplace=True)
#%% River Profile Plotting
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6))
axes.scatter(dataOUT.index, dataOUT.Samplevalue)
axes.scatter(dataOUTmax.index, dataOUTmax.Samplevalue)
plt.show()
#%% Hg Map Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 7))
fig.patch.set_facecolor('white')
# Full system limits
# axes.set_xlim(350000, 520000)
# axes.set_ylim(5510000, 5580000)
# # Clay Lake Limits
# axes.set_xlim(466418, 515846)
# axes.set_ylim(5513437, 5545362)
# Dryden Limits
axes.set_xlim(466418, 515846)
axes.set_ylim(5513437, 5545362)
# Add Basemap
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
# Plot Hg
# dataOUTmax.plot(ax=axes, column='Samplevalue', legend=True, vmin=0, vmax=20000)
# Plot Hg At surface
dataOUT.loc[((dataOUT.TopDepth == 0) | (dataOUT.TopDepth.isna()))].plot(
ax=axes, column='Samplevalue', legend=True, vmin=0, vmax=10000)
plt.show()
#%% Interpolate Hg
bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
HgInterp = griddata(np.array([dataOUTmax.geometry.x, dataOUTmax.geometry.y]).T,
np.array(dataOUTmax.Samplevalue).T, np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
HgInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, HgInterp))
HgInterpOut = np.transpose(HgInterpOut)
# Set Wetlands to zero
# Interpolate Roughness
RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z, np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
method='nearest')
HgInterpOut[RoughInterp==0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/SurfaceInterFiltDB.xyz',
HgInterpOut, delimiter=" ")
#%% Hg Interp Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 7))
fig.patch.set_facecolor('white')
# Clay Lake Limits
axes.set_xlim(466418, 515846)
axes.set_ylim(5513437, 5545362)
# Add Basemap
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
# Plot Hg
axes.scatter(HgInterpOut[:, 0], HgInterpOut[:, 1], 5, HgInterpOut[:, 2],
vmin=0, vmax=5000)
plt.show()

View File

@ -0,0 +1,189 @@
## Process EWR Hg Data into a surface
# AJMR: September 2022
#%% Setup
import pandas as pd
import geopandas as gp
import gpxpy
import gpxpy.gpx
import numpy as np
import matplotlib.pyplot as plt
import contextily as ctx
import datetime
import scipy as sp
from scipy.interpolate import griddata
from shapely import geometry, ops
#%% Read in centerline shapefile
river_centerline = gp.read_file('//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/03_Data/02_Physical/16_Waterline/Centerline_for_Modelling_UTMZ15.shp')
river_centerlineExploded = river_centerline.explode(ignore_index=True)
river_centerlineExploded.reset_index(inplace=True)
tempMulti = river_centerlineExploded.iloc[[5,0,1,2,3,4,6,7,9], 4]
# Put the sub-line coordinates into a list of sublists
outcoords = [list(i.coords) for i in tempMulti]
# Flatten the list of sublists and use it to make a new line
river_centerline_merge = geometry.LineString([i for sublist in outcoords for i in sublist])
river_centerline_merge_gpd = gp.GeoSeries(river_centerline_merge)
river_centerline_merge_gpd2 =\
gp.GeoDataFrame(geometry=gp.points_from_xy(
river_centerline_merge.xy[0], river_centerline_merge.xy[1], crs="EPSG:32615"))
# Add distance along centerline
river_centerline_merge_gpd2['DistanceFromPrevious'] = river_centerline_merge_gpd2.distance(river_centerline_merge_gpd2.shift(1))
river_centerline_merge_gpd2['RiverKM'] = river_centerline_merge_gpd2['DistanceFromPrevious'].cumsum()
river_centerline_merge_gpd2.iloc[0, 1] = 0
river_centerline_merge_gpd2.iloc[0, 2] = 0
#%% Read in combined dataset
obs_IN = pd.read_csv("//srv-ott3.baird.com/Projects/12828.101 English Wabigoon River/05_Analyses/02_Data Analysis/output/combined dataset-SGJ.csv")
# Add Datetime
obs_IN['DateTime'] = pd.to_datetime(obs_IN.Sampledate_x)
#%% Process combined datsaset
# Convert to geodataframe
obs_gdf = gp.GeoDataFrame(obs_IN, geometry=gp.points_from_xy(
obs_IN.loc[:, 'Longitude'], obs_IN.loc[:, 'Latitude'], crs="EPSG:4326")).to_crs(crs="EPSG:32615")
# Add centerline and reset index
obs_gdf = gp.sjoin_nearest(river_centerline_merge_gpd2, obs_gdf, how='right', max_distance=250).reset_index()
# Sediment Hg GeoDataFrame where media is Sediment
obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
| (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
(obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna() &
((obs_gdf.TopDepth_x == 0) | (obs_gdf.TopDepth_x.isna())))
# obsMask = (((obs_gdf.Media_x == 'SED') | (obs_gdf.Media_x == 'SOIL')) &
# ((obs_gdf.Parameter == 'Mercury') | (obs_gdf.Parameter == 'Mercury (Hg) Total')
# | (obs_gdf.Parameter == 'Total Mercury') | (obs_gdf.Parameter == 'Mercury (Hg)')) &
# (obs_gdf.DateTime > datetime.datetime(2000, 1, 1)) & obs_gdf.RiverKM.notna())
obs_HgSed = obs_gdf.loc[obsMask, :].copy()
# Adjust Units
obs_HgSed.loc[obs_HgSed.Unit == 'NG/G', 'Sample_NG/G'] = obs_HgSed.loc[obs_HgSed.Unit == 'NG/G', 'Samplevalue']
obs_HgSed.loc[obs_HgSed.Unit == 'ng/g', 'Sample_NG/G'] = obs_HgSed.loc[obs_HgSed.Unit == 'ng/g', 'Samplevalue']
obs_HgSed.loc[obs_HgSed.Unit == 'UG/G', 'Sample_NG/G'] = obs_HgSed.loc[obs_HgSed.Unit == 'UG/G', 'Samplevalue'] * 1000
obs_HgSed.loc[obs_HgSed.Unit == 'ug/g', 'Sample_NG/G'] = obs_HgSed.loc[obs_HgSed.Unit == 'ug/g', 'Samplevalue'] * 1000
obs_HgSed.loc[obs_HgSed.Unit == 'MG/KG', 'Sample_NG/G'] = obs_HgSed.loc[obs_HgSed.Unit == 'MG/KG', 'Samplevalue'] * 0.001
obs_HgSed.loc[obs_HgSed.Unit == 'mg/kg', 'Sample_NG/G'] = obs_HgSed.loc[obs_HgSed.Unit == 'mg/kg', 'Samplevalue'] * 0.001
#%% River Profile Plotting
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(6, 6))
axes.scatter(obs_HgSed.RiverKM, obs_HgSed.Samplevalue)
axes.scatter(obs_HgSed.RiverKM, obs_HgSed.Samplevalue)
axes.set_ylim(0, 20000)
axes.set_xlim(0, 100000)
axes.set_xlabel('Distance Along River [m]')
axes.set_ylabel('Surface Sediment Hg [ng/g]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_SurfaceProfile.png',
bbox_inches='tight', dpi=300)
#%% Plot Vertical Profiles
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(7, 7))
obs_HgSed.plot.scatter('RiverKM', 'TopDepth_x', 12, 'Sample_NG/G',
ax=axes, vmax=20000, cmap='viridis')
axes.invert_yaxis()
axes.set_xlabel('Distance Along River [m]')
axes.set_ylabel('Sample Depth [cm]')
axes.set_xlim([0, 100000])
# Get colorbar axis to set a legend
cax = fig.get_axes()[1]
#and we can modify it, i.e.:
cax.set_ylabel('Sediment Hg (ng/g)')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Hg_Vertical.png',
bbox_inches='tight', dpi=300)
#%% Hg Map Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 7))
fig.patch.set_facecolor('white')
# Full system limits
# axes.set_xlim(350000, 520000)
# axes.set_ylim(5510000, 5580000)
# # Clay Lake Limits
# axes.set_xlim(466418, 515846)
# axes.set_ylim(5513437, 5545362)
# Dryden Limits
axes.set_xlim(497697, 515846)
axes.set_ylim(5513437, 5525166)
# Add Basemap
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
# Plot Hg At surface
obs_HgSed.plot(ax=axes, column='Samplevalue', legend=True,
vmin=0, vmax=5000, markersize=2, cmap='viridis',
legend_kwds={'label': 'Surface Sediment Hg (ng/g)'})
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/SurfaceHgMap.png',
bbox_inches='tight', dpi=300)
#%% Interpolate Hg
bathy_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Bed_Level.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
rough_xyz = pd.read_csv('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/Roughness.xyz',
names=['x', 'y', 'z'], header=0, delim_whitespace=True)
HgInterp = griddata(np.array([dataOUTmax.geometry.x, dataOUTmax.geometry.y]).T,
np.array(dataOUTmax.Samplevalue).T, np.array([bathy_xyz.x, bathy_xyz.y]).T,
method='nearest')
HgInterpOut = np.vstack((bathy_xyz.x, bathy_xyz.y, HgInterp))
HgInterpOut = np.transpose(HgInterpOut)
# Set Wetlands to zero
# Interpolate Roughness
RoughInterp = sp.interpolate.griddata(np.transpose(np.array([rough_xyz.x, rough_xyz.y])), rough_xyz.z, np.transpose(np.array([bathy_xyz.x, bathy_xyz.y])),
method='nearest')
HgInterpOut[RoughInterp==0.05, 2] = 0
np.savetxt('C:/Users/arey/files/Projects/Grassy Narrows/LocalData/SurfaceInterFiltDB.xyz',
HgInterpOut, delimiter=" ")
#%% Hg Interp Plotting
mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb1e/tiles/256/{z}/{x}/{y}@2x?access_token=pk.eyJ1IjoiYWxleGFuZGVyMDA0MiIsImEiOiJjazVmdG4zbncwMHY4M2VrcThwZGUzZDFhIn0.w6oDHoo1eCeRlSBpwzwVtw'
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 7))
fig.patch.set_facecolor('white')
# Clay Lake Limits
axes.set_xlim(466418, 515846)
axes.set_ylim(5513437, 5545362)
# Add Basemap
ctx.add_basemap(axes, source=mapbox, crs='EPSG:32615')
# Plot Hg
axes.scatter(HgInterpOut[:, 0], HgInterpOut[:, 1], 5, HgInterpOut[:, 2],
vmin=0, vmax=5000)
plt.show()

View File

@ -24,11 +24,11 @@ import matplotlib.font_manager as fm
import os
import sys
sys.path.append('C:\\Users\\arey\\Repo\\Python_Baird\\Mustique\\adcptool')
sys.path.append('C:/Users/arey/Repo/Python_Baird/Mustique')
sys.path.append('C:/Users/arey/Repo/Python_Baird/Mustique/adcptool')
from adcploader import *
# %% Read Model Log
pth = pl.Path("//srv-ott3.baird.com/", "Projects", "13509.101 NCC New Edinburgh Ottawa",
@ -128,7 +128,7 @@ plt.show()
# %% Read Model Results at point
modelPlot = [1]
modelPlot = [5]
ds_list = [None] * (max(modelPlot) + 1)
dsT = [None] * (15)
@ -167,23 +167,23 @@ readPoints = readPoints[~np.isnan(readPoints).any(axis=1)]
for m in modelPlot:
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'Run01_2DOut.dfsu').as_posix())
dfsIN = Dfsu(pl.Path(runLog['Run Location'][m], 'NCC_2Dout.dfsu').as_posix())
dfs2d_list[m] = dfsIN
# # Read Map
ds_list[m] = dfs2d_list[m].read()
# Read in transects
dsT[4] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T4.dfsu').as_posix())
dsT[5] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T5.dfsu').as_posix())
dsT[6] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T6.dfsu').as_posix())
dsT[7] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T7.dfsu').as_posix())
dsT[8] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T8.dfsu').as_posix(), time=90)
dsT[9] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T9.dfsu').as_posix())
dsT[10] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T10.dfsu').as_posix())
dsT[11] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T11.dfsu').as_posix())
dsT[13] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T13.dfsu').as_posix())
dsT[14] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T14.dfsu').as_posix())
# dsT[4] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T4.dfsu').as_posix())
# dsT[5] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T5.dfsu').as_posix())
# dsT[6] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T6.dfsu').as_posix())
# dsT[7] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T7.dfsu').as_posix())
# dsT[8] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T8.dfsu').as_posix(), time=90)
# dsT[9] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T9.dfsu').as_posix())
# dsT[10] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T10.dfsu').as_posix())
# dsT[11] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T11.dfsu').as_posix())
# dsT[13] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T13.dfsu').as_posix())
# dsT[14] = mikeio.read(pl.Path(runLog['Run Location'][m], 'Run01_T14.dfsu').as_posix())
# ## Read specific points in 2D
# # Find nearest elements
@ -235,7 +235,7 @@ mapbox = 'https://api.mapbox.com/styles/v1/alexander0042/ckemxgtk51fgp19nybfmdcb
x, y, arrow_length = 0.93, 0.95, 0.12
fontprops = fm.FontProperties(size=12)
modelPlot = [1]
modelPlot = [5]
# Set thinning factor
thin_factor = 10
@ -246,12 +246,17 @@ for m in modelPlot:
fig, axes = plt.subplots(figsize=(8, 8))
# Plot Velocity
# Select salinity data at last time step
# Plot all selected values at a given later and not nan
axDHI = ds_list[m]['Current speed'][-1, :].plot(plot_type='contourf',
# Filter Out Land Values (no velocity)
dhiFilt = ds_list[m]['Current speed'].sel(time="2018-01-01 02:30:00").values > 0
dhiFiltIDX = [i for i, x in enumerate(dhiFilt) if x]
# Plot Velocity at last time step for elements greater than zero
axDHI = ds_list[m]['Current speed'].sel(time="2018-01-01 02:30:00").isel(element=dhiFiltIDX).plot.contourf(
show_mesh=False, cmap='inferno', ax=axes,
vmin=vmin, vmax=vmax, label='Current Speed (m/s)')
# Add ADCP data
for adcp_to_plot in range(0, len(adcpDat)):
# position of ensembles

View File

@ -69,7 +69,14 @@ importPaths = ['//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Resear
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220530 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220709 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220709 WQ sampling/Old Queens Fort']
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220709 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220812 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20220812 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Great House',
None,
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221115 WQ sampling/Old Queens Fort',
'//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/20221127 WQ Sampling/Crane']
siteNames = ['Great House',
'Greensleeves',
@ -97,7 +104,14 @@ siteNames = ['Great House',
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort']
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort',
'Great House',
None,
'Old Queens Fort',
'Crane']
timeLabels= ['Before Construction',
'Before Construction',
@ -125,7 +139,14 @@ timeLabels= ['Before Construction',
'May',
'July',
None,
'July']
'July',
'August',
None,
'August',
'November',
None,
'November',
'November']
wave_bts_file = [
'T:/Alexander/WestCoast/Barbados Nowcast 2021-09-15 to 2021-11-15/spawnee_mid_27m.bts',
@ -154,11 +175,20 @@ wave_bts_file = [
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None]
# %% Read in site shapefile
siteShp = gp.read_file('//srv-ott3.baird.com/Projects/INNOVATION/Phase97 Barbados Research/05_Analyses/SitePolygons_Crane.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
for s in [33]: #range(15, 27):
## Define master import path
importPath = importPaths[s]
siteName = siteNames[s]
@ -187,7 +217,7 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
Obs_dat =Obs_dat[Obs_dat['CH1:Temperature(degC)'].notna()]
# Set Time Zone for Sensor. Sometimes it is set to UTC, sometimes it is set to EST
if s < 15:
if s < 15 or s == 30 or s == 32 or s == 33:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('America/Barbados').dt.tz_convert('UTC')
else:
Obs_dat['DateTime'] = pd.to_datetime(Obs_dat['Timestamp(Standard)']).dt.tz_localize('UTC')
@ -203,7 +233,10 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
#convert GPS data to geodataframe
GPS_gdf = gp.GeoDataFrame(GPS, geometry=gp.points_from_xy(-GPS.Easting, GPS.Northing, crs="EPSG:4326"))
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
if s == 30 or s == 32 or s == 33:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date1'].astype(str) + ' ' + GPS_gdf['Time1'].astype(str))
else:
GPS_gdf['DateTime'] = pd.to_datetime(GPS_gdf['Date2'].astype(str) + ' ' + GPS_gdf['Time2'].astype(str))
elif GPS_Type == 'xls':
@ -223,11 +256,6 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
# Convert to UTM
GPS_gdf.geometry = GPS_gdf.geometry.to_crs("EPSG:32621")
#%% Read in site shapefile
siteShp = gp.read_file('C:/Users/arey/files/Projects/West Coast/SitePolygons.shp')
siteShp.geometry = siteShp.geometry.to_crs("EPSG:32621")
#%% Merge GPS to RBR
# Process RBR into datetime
if OBS_File is not None:
@ -261,6 +289,12 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
GFS_Lon = -59.6419
GFS_Lat = 13.1960
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[0, 1])
elif siteName == 'Crane':
axXlim = (234835, 235507)
axYlim = (1449966, 1450580)
GFS_Lon = -59.442
GFS_Lat = 13.1075
Obs_geo['inArea'] = Obs_geo.within(siteShp.iloc[3, 1])
# Set min and max times using conductivity
@ -295,6 +329,9 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
# Set times to download
startdate = metDate - timedelta(hours=18)
enddate = metDate + timedelta(hours=6)
# startdate = metDate - timedelta(hours=36)
# enddate = metDate + timedelta(hours=-12)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
@ -309,27 +346,36 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(10)
# Loop through variables and request
data = dict()
for v in var:
# Setup query
query = ncss.query()
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
# Extract data from specific point
query.lonlat_point(GFS_Lon, GFS_Lat).time(date)
query.accept('netcdf')
query.variables(v)
# IF there are multiple vertical levels, select the desired one
if v == 'u-component_of_wind_height_above_ground' or v == 'v-component_of_wind_height_above_ground':
query.vertical_level(10)
dataIN = ncss.get_data(query)
data[v] = xr.open_dataset(NetCDF4DataStore(dataIN), drop_variables='height_above_ground4')
query_precp.lonlat_point(GFS_Lon, GFS_Lat).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
dataIN = ncss_precp.get_data(query_precp)
data_precp = dict()
data_precp[var_precp[0]] = xr.open_dataset(NetCDF4DataStore(dataIN))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
@ -338,11 +384,12 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
temp_1d.append(temp_3d.metpy.quantify().Temperature_surface.data.squeeze())
gust_1d.append(gust_3d.metpy.quantify().Wind_speed_gust_surface.data.squeeze())
wndu_1d.append(wndu_3d.metpy.quantify()['u-component_of_wind_height_above_ground'].data.squeeze())
wndv_1d.append(wndv_3d.metpy.quantify()['v-component_of_wind_height_above_ground'].data.squeeze())
prep_1d.append(prep_3d.metpy.quantify()['Total_precipitation_surface_6_Hour_Accumulation'].data.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Process Met Data
@ -411,7 +458,7 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
# paramMin = [0.0, 34.0, 32.5, 25.0, 0, 0]
# paramMax = [1.0, 36.0, 34.0, 31.0, 20.0, 1.0]
paramMin = [0.0, 32.0, 100, 26.0, 0, 0]
paramMax = [1.0, 36.0, 130, 34.0, 75.0, 12000]
paramMax = [6, 36.0, 130, 34.0, 150.0, 12000]
fig.patch.set_facecolor('white')
@ -444,13 +491,13 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
12, win_type='nuttall',center=True).mean()
OBS_smoothed.plot(
ax=axes[paramIDX], label='1 Minute Average', color='black',
ax=axes[paramIDX], label='10 Second Average', color='black',
linewidth=3)
# Find the local maximums for Turbidity
if param == 'CH6:Turbidity(NTU)':
ilocs_max.append(argrelextrema(OBS_smoothed.values,
np.greater_equal, order=6, mode='wrap')[0])
np.greater_equal, order=8, mode='wrap')[0])
# Add start and end points?
# ilocs_max = np.insert(ilocs_max, 0, 10)
@ -478,7 +525,7 @@ for s in [15, 16, 17, 18, 19, 20, 21, 23, 24, 26]: #range(15, 27):
axes[paramIDX].set_title(paramName[paramIDX])
axes[paramIDX].set_xlabel('')
axes[paramIDX].set_ylim(paramMin[paramIDX], paramMax[paramIDX])
axes[paramIDX].legend(loc='upper left')
axes[paramIDX].legend(loc='upper right')
# axes[paramIDX].set_xlabel(minTime.strftime('%B %#d, %Y'))
@ -664,8 +711,8 @@ plotvars = ['Air Temperature [degC]', 'Wind Speed [m/s]', 'Wind Direction [deg]'
for i in range(2, 3):
summTable = None
# plotIDXs = np.arange(i, 27, 3)
plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26]
#plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24]
#plotIDXs = [2, 5, 8, 14, 17, 20, 23, 26, 29, 32]
plotIDXs = [0, 3, 6, 12, 15, 18, 21, 24, 27, 30]
plotDates = []
plotTable = np.empty([10, len(plotIDXs)])

1
NTC_DFM/.idea/.name Normal file
View File

@ -0,0 +1 @@
EFDC_compare.ipynb

424
NTC_DFM/EFDC_compare.py Normal file
View File

@ -0,0 +1,424 @@
#%%
# -*- coding: utf-8 -*-
"""
@author: AJMR
Script to compare EFDC results against observations
"""
#%% Import
import os
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
import numpy as np
import geopandas as gp
gp.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw'
from shapely.ops import nearest_points
from shapely.geometry import LineString
from shapely.geometry import Point, MultiPoint
from shapely.ops import nearest_points
from datetime import datetime, timedelta
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_timesfromnc
from dfm_tools.regulargrid import scatter_to_regulargrid
from pathlib import Path
import netCDF4 as nc
import xarray as xr
import pytz
import pathlib as pl
#%% Function to find nearest point
def get_nearest_values(row, other_gdf, point_column='geometry', value_column="geometry"):
"""Find the nearest point and return the corresponding value from specified value column."""
# Create an union of the other GeoDataFrame's geometries:
other_points = other_gdf["geometry"].unary_union
# Find the nearest points
nearest_geoms = nearest_points(row[point_column], other_points)
# Get corresponding values from the other df
nearest_data = other_gdf.loc[other_gdf["geometry"] == nearest_geoms[1]]
nearest_value = nearest_data[value_column].get_values()[0]
return nearest_value
#%% Load in Water Level Data
# Gauge Locations from map
# gdf_gaugeLoc = gp.read_file('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/NTC6.kml')
gdf_gaugeLoc = gp.read_file('//srv-ott3.baird.com/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/NTC6.kml')
gdf_gaugeLocUSSP = gdf_gaugeLoc.to_crs("EPSG:32118")
# All in Feet NAVD88
# df_wl_FFG = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
df_wl_FFG = pd.read_excel('//srv-ott3.baird.com/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
sheet_name='Field_Facility_Gauge')
df_wl_FFG.set_index('Date_time', inplace=True)
df_wl_FFG.index = df_wl_FFG.index.tz_localize(
pytz.timezone('America/New_York')).tz_convert(pytz.utc) #Convert to UTC
gdf_wl_FFG = gp.GeoDataFrame(df_wl_FFG,
geometry=gp.points_from_xy(np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].x[2],
np.ones([len(df_wl_FFG), 1]) * gdf_gaugeLoc['geometry'].y[2]), crs="EPSG:4326")
gdf_wl_FFG.geometry = gdf_wl_FFG.geometry.to_crs("EPSG:32118")
df_wl_NGG1 = pd.read_excel('//srv-ott3.baird.com/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP1_Gauge_Elev_Data_20130521/NC_Gauge_Elev_Data_20130521.xlsx',
sheet_name='National_Grid_Gauge')
df_wl_NGG1.set_index('Date_time', inplace=True)
df_wl_NGG1.index = df_wl_NGG1.index.tz_localize(
pytz.timezone('America/New_York'), ambiguous=True).tz_convert(pytz.utc) #Convert to UTC
gdf_wl_NGG1 = gp.GeoDataFrame(df_wl_NGG1,
geometry=gp.points_from_xy(np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].x[3],
np.ones([len(df_wl_NGG1), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
gdf_wl_NGG1.geometry = gdf_wl_NGG1.geometry.to_crs("EPSG:32118")
# Also includes temperature
df_wl_NGG2 = pd.read_excel('//srv-ott3.baird.com/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/04 Gauge Data/NCP2_NG_TideGauge_20160113/NCP2_NG_TideGauge_20160113.xlsx',
sheet_name='Sheet1')
gdf_wl_NGG2 = gp.GeoDataFrame(df_wl_NGG2,
geometry=gp.points_from_xy(np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].x[3],
np.ones([len(df_wl_NGG2), 1]) * gdf_gaugeLoc['geometry'].y[3]), crs="EPSG:4326")
gdf_wl_NGG2.geometry = gdf_wl_NGG2.geometry.to_crs("EPSG:32118")
#%% Read in EFDC Data to dataframe
# efdc6_df = pd.read_csv('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/2012_Mon6/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
# efdc7_df = pd.read_csv('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/2012_Mon7/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
# efdc8_df = pd.read_csv('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/2012_Mon8/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
# efdc9_df = pd.read_csv('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/2012_Mon9/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
efdc12_df = pd.read_csv('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/2012_Mon12/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
#%% Read in EFDC grid
efdc_grid_df = pd.read_csv('//Ott-flavius/j/INPUT_FILES/GRID_FILES/NC_ER_lxly_20140129.inp',
delim_whitespace=True, skiprows=4,
names=['I', 'J', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc_grid_gdf = gp.GeoDataFrame(
efdc_grid_df, geometry=gp.points_from_xy(efdc_grid_df.X, efdc_grid_df.Y), crs="EPSG:32118")
#%% Merge EFDC outputs with grid locations and add datetimes
efdc6_merged = pd.merge(efdc6_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc6_merged['date'] = pd.to_datetime([datetime(2012, 6, 1, 00, 00, 00) +
timedelta(hours=h) for h in efdc6_merged['ST_hr']])
efdc6_merged.set_index('date', inplace=True)
efdc6_merged.index = efdc6_merged.index.tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc) #Convert to UTC
del efdc6_df
efdc7_merged = pd.merge(efdc7_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc7_merged['date'] = pd.to_datetime([datetime(2012, 7, 1, 00, 00, 00) +
timedelta(hours=h) for h in efdc7_merged['ST_hr']])
efdc7_merged.set_index('date', inplace=True)
efdc7_merged.index = efdc7_merged.index.tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc) #Convert to UTC
del efdc7_df
efdc8_merged = pd.merge(efdc8_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc8_merged['date'] = pd.to_datetime([datetime(2012, 8, 1, 00, 00, 00) +
timedelta(hours=h) for h in efdc8_merged['ST_hr']])
efdc8_merged.set_index('date', inplace=True)
efdc8_merged.index = efdc8_merged.index.tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc) #Convert to UTC
del efdc8_df
efdc9_merged = pd.merge(efdc9_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc9_merged['date'] = pd.to_datetime([datetime(2012, 9, 1, 00, 00, 00) +
timedelta(hours=h) for h in efdc9_merged['ST_hr']])
efdc9_merged.set_index('date', inplace=True)
efdc9_merged.index = efdc9_merged.index.tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc) #Convert to UTC
del efdc9_df
#%%
efdc12_merged = pd.merge(efdc12_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc12_merged['date'] = pd.to_datetime([datetime(2012, 12, 1, 00, 00, 00) +
timedelta(hours=h) for h in efdc12_merged['ST_hr']])
efdc12_merged.set_index('date', inplace=True)
efdc12_merged.index = efdc12_merged.index.tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc) #Convert to UTC
del efdc12_df
#%% Merge EFDC dataframes and convert to geodataframe
efdc_merged = pd.concat([efdc6_merged, efdc7_merged, efdc8_merged, efdc9_merged])
del efdc6_merged
del efdc7_merged
del efdc8_merged
del efdc9_merged
efdc_merged_gdf = gp.GeoDataFrame(
efdc_merged, geometry=efdc_merged['geometry'], crs="EPSG:32118")
del efdc_merged
#%%
efdc_merged = efdc12_merged
efdc_merged_gdf = gp.GeoDataFrame(
efdc_merged, geometry=efdc_merged['geometry'], crs="EPSG:32118")
del efdc_merged
#%% Find nearest grid point to station FFG
nearest_geoms = nearest_points(Point(gdf_wl_FFG.iloc[0,:].geometry), MultiPoint(efdc_grid_gdf.geometry))
station_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry']==nearest_geoms[1]]
#%% Find nearest grid point to station BGG
nearest_geoms = nearest_points(Point(gdf_wl_NGG1.iloc[0,:].geometry), MultiPoint(efdc_grid_gdf.geometry))
stationNGG_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry']==nearest_geoms[1]]
# efdc_grid_gdf.loc[efdc_grid_gdf['geometry']== nearest_geoms[1]].value
#%% Save EFDC as Parquet for faster importing
efdc_merged_gdf.to_parquet('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/efdc_merged_Dec.parquet',
index=True, compression='gzip')
station_gdf.to_parquet('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/station_gdf_Dec.parquet',
index=True, compression='gzip')
stationNGG_gdf.to_parquet('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/stationNGG_gdf_Dec.parquet',
index=True, compression='gzip')
#%% Read in EFDC as Paraquet
stationNGG_gdf = gp.read_parquet('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/stationNGG_gdf.parquet')
station_gdf = gp.read_parquet('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/station_gdf.parquet')
efdc_merged_gdf = gp.read_parquet('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/efdc_merged.parquet')
#%% Read Model Log
runLog = pd.read_excel('C:/Users/arey/files\Projects/Newtown/Model Log NTC.xlsx', 'ModelLog')
dataPath = "FlowFM_map.nc"
#%% Import Delft3D results
modelPlot = [31]
ugrid_all = [None] * (max(modelPlot)+1)
data_frommap_wl = [None] * (max(modelPlot)+1)
facex = [None] * (max(modelPlot)+1)
facey = [None] * (max(modelPlot)+1)
nearest_IDX = list(range(100))
nearest_IDX_NGG = list(range(100))
dsloc = list(range(100))
dsloc_NGG = list(range(100))
# Find nearest point
for i in modelPlot:
dataPath = pl.Path(runLog['Run Location'][i],
'FlowFM', 'dflowfm', 'output', 'FlowFM_map.nc')
file_nc_map = dataPath.as_posix()
tSteps = get_timesfromnc(file_nc=file_nc_map, varname='time')
tStep = len(tSteps)-1
ugrid_all[i] = get_netdata(file_nc=file_nc_map)#,multipart=False)
data_frommap_wl[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_s1', timestep=tStep)
facex[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_face_x')
facey[i] = get_ncmodeldata(file_nc=file_nc_map, varname='mesh2d_face_y')
# Find matching Delft3D index
s = gp.GeoSeries(map(Point, zip(facex[i], facey[i])))
nearest_geoms = nearest_points(Point(gdf_wl_FFG.iloc[0,:].geometry), MultiPoint(s))
nearest_IDX[i] = np.where(s==nearest_geoms[1])
nearest_geoms = nearest_points(Point(gdf_wl_NGG1.iloc[0,:].geometry), MultiPoint(s))
nearest_IDX[i] = np.where(s==nearest_geoms[1])
#%% Read in time series
modelPlot = [31]
ugrid_all = [None] * (max(modelPlot)+1)
data_frommap_wl = [None] * (max(modelPlot)+1)
facex = [None] * (max(modelPlot)+1)
facey = [None] * (max(modelPlot)+1)
nearest_IDX = list(range(100))
nearest_IDX_NGG = list(range(100))
dsloc = list(range(100))
dsloc_NGG = list(range(100))
# Note, this uses the standard netcdf lib + xarray
for i in modelPlot:
dataPath = pl.Path(runLog['Run Location'][i],
'FlowFM', 'dflowfm', 'output', 'FlowFM_map.nc')
file_nc_map = dataPath.as_posix()
ds = xr.open_dataset(file_nc_map)
# Find nearest point
ds_x = ds['mesh2d_face_x']
ds_y = ds['mesh2d_face_y']
s = gp.GeoSeries(map(Point, zip(ds_x, ds_y)))
nearest_geoms = nearest_points(Point(gdf_wl_FFG.iloc[0,:].geometry), MultiPoint(s))
nearest_IDX[i] = np.where(s==nearest_geoms[1])
nearest_geoms_NGG = nearest_points(Point(gdf_wl_NGG1.iloc[0,:].geometry), MultiPoint(s))
nearest_IDX_NGG[i] = np.where(s==nearest_geoms_NGG[1])
dsloc[i] = ds['mesh2d_s1'].sel(mesh2d_nFaces=nearest_IDX[i][0])
dsloc_NGG[i] = ds['mesh2d_s1'].sel(mesh2d_nFaces=nearest_IDX_NGG[i][0])
#%% Convert D3D Timezone
modelPlot = [31]
df_wl_D3D = list(range(100))
df_wl_D3D_NGG = list(range(100))
d3dTzones = list(range(100))
d3dTzones[25] = 'EST'
d3dTzones[26] = 'EST'
d3dTzones[31] = 'EST'
for i in modelPlot:
df_wl_D3D[i] = dsloc[i].to_dataframe()
df_wl_D3D[i].index = df_wl_D3D[i].index.droplevel(-1)
df_wl_D3D[i].index = df_wl_D3D[i].index.tz_localize(
pytz.timezone(d3dTzones[i])).tz_convert(pytz.utc) #Convert to UTC
df_wl_D3D_NGG[i] = dsloc_NGG[i].to_dataframe()
df_wl_D3D_NGG[i].index = df_wl_D3D_NGG[i].index.droplevel(-1)
df_wl_D3D_NGG[i].index = df_wl_D3D_NGG[i].index.tz_localize(
pytz.timezone(d3dTzones[i])).tz_convert(pytz.utc) #Convert to UTC
#%% Plot water levels at nearest point
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(9, 5))
fig.patch.set_facecolor('white')
for week in range(0, 2):
weekOffset = 0
axes[week+weekOffset].plot(station_gdf.index, station_gdf['WSE_m'], label='EFDC Model')
axes[week+weekOffset].plot(gdf_wl_FFG.index+timedelta(hours=0),
gdf_wl_FFG['Water Surface Elevation (ft)']*0.3048,
label='Field Facility Gauge', color='k')
for i in modelPlot:
axes[week+weekOffset].plot(df_wl_D3D[i].index, df_wl_D3D[i].mesh2d_s1, label=runLog['Run'][i])
axes[week+weekOffset].set_xlim(datetime(2012, 12, 15, 0, 0, 0) + timedelta(days=week*7),
datetime(2012, 12, 15+7, 0, 0, 0) + timedelta(days=week*7))
axes[week+weekOffset].set_ylim(-1, 2.5)
axes[week+weekOffset].set_ylabel('Water Surface Elevation [m]')
axes[0].legend(loc='upper left')
axes[0].title.set_text('Field Facility Gauge')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/Figures/DecTS_FFG.png',
bbox_inches='tight')
#%% Plot water levels at National Grid
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(9, 5))
fig.patch.set_facecolor('white')
for week in range(0, 2):
weekOffset = 0
axes[week+weekOffset].plot(stationNGG_gdf.index, stationNGG_gdf['WSE_m'], label='EFDC Model')
axes[week+weekOffset].plot(gdf_wl_NGG1.index+timedelta(hours=0),
gdf_wl_NGG1['Water Surface Elevation (ft)']*0.3048,
label='National Grid Gauge', color='k')
for i in modelPlot:
axes[week+weekOffset].plot(df_wl_D3D_NGG[i].index, df_wl_D3D_NGG[i].mesh2d_s1,
label=runLog['Run'][i])
axes[week+weekOffset].set_xlim(datetime(2012, 12, 15, 0, 0, 0) + timedelta(days=week*7),
datetime(2012, 12, 15+7, 0, 0, 0) + timedelta(days=week*7))
axes[week+weekOffset].set_ylim(-1, 2.5)
axes[week+weekOffset].set_ylabel('Water Surface Elevation [m]')
axes[0].legend(loc='upper left')
axes[0].title.set_text('National Grid Gauge')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/Figures/DecTS_NGG.png',
bbox_inches='tight')
#%% Plot scatters
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 10))
fig.patch.set_facecolor('white')
interpTimes = pd.date_range("2012-06-02T00:00:00", "2012-10-01T00:00:00", freq="60min")
interpTimes = pd.date_range("2012-07-07T00:00:00", "2012-07-27T00:00:00", freq="60min")
for i in modelPlot:
axes.scatter(np.interp(interpTimes, gdf_wl_FFG.index,
gdf_wl_FFG['Water Surface Elevation (ft)']*0.3048),
np.interp(interpTimes, df_wl_D3D[i].index,
df_wl_D3D[i].mesh2d_s1),
label=runLog['Run'][i])
axes.scatter(np.interp(interpTimes, gdf_wl_FFG.index,
gdf_wl_FFG['Water Surface Elevation (ft)']*0.3048),
np.interp(interpTimes, station_gdf.index,
station_gdf['WSE_m']),
label='EFDC')
linepts = np.array([-1.0, 1.5])
plt.plot(linepts, linepts, color='k')
axes.set_xlim(-1.0, 1.5)
axes.set_ylim(-1.0, 1.5)
axes.set_aspect('equal')
axes.set_title('Field Facility Gauge')
axes.legend(loc='upper left')
axes.set_ylabel('Model Water Surface Elevation [m]')
axes.set_xlabel('Observed Water Surface Elevation [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/Figures/FFG_ScatterUpdated.pdf',
bbox_inches='tight')
#%% Plot scatters NGG
fig, axes = plt.subplots(nrows=1, ncols=1, figsize=(10, 10))
fig.patch.set_facecolor('white')
interpTimes = pd.date_range("2012-06-02T00:00:00", "2012-10-01T00:00:00", freq="60min")
interpTimes = pd.date_range("2012-07-07T00:00:00", "2012-07-27T00:00:00", freq="60min")
for i in modelPlot:
axes.scatter(np.interp(interpTimes, gdf_wl_NGG1.index,
gdf_wl_NGG1['Water Surface Elevation (ft)']*0.3048),
np.interp(interpTimes, df_wl_D3D_NGG[i].index,
df_wl_D3D_NGG[i].mesh2d_s1),
label=runLog['Run'][i])
axes.scatter(np.interp(interpTimes, gdf_wl_NGG1.index,
gdf_wl_NGG1['Water Surface Elevation (ft)']*0.3048),
np.interp(interpTimes, stationNGG_gdf.index,
stationNGG_gdf['WSE_m']),
label='EFDC')
linepts = np.array([-1.0, 1.5])
plt.plot(linepts, linepts, color='k')
axes.set_xlim(-1.0, 1.5)
axes.set_ylim(-1.0, 1.5)
axes.set_aspect('equal')
axes.set_title('National Grid Gauge')
axes.legend(loc='upper left')
axes.set_ylabel('Model Water Surface Elevation [m]')
axes.set_xlabel('Observed Water Surface Elevation [m]')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/RevisedEFDC/Figures/NGG_ScatterUpdated.pdf',
bbox_inches='tight')

8
rasterProcess/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (rasterProcess)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/rasterProcess.iml" filepath="$PROJECT_DIR$/.idea/rasterProcess.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

8
rasterProcess2/.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

View File

@ -0,0 +1,13 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="PyUnresolvedReferencesInspection" enabled="true" level="WARNING" enabled_by_default="true">
<option name="ignoredIdentifiers">
<list>
<option value="bool.*" />
<option value="geopandas.io.file" />
</list>
</option>
</inspection_tool>
</profile>
</component>

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.8 (rasterProcess2)" project-jdk-type="Python SDK" />
</project>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/rasterProcess2.iml" filepath="$PROJECT_DIR$/.idea/rasterProcess2.iml" />
</modules>
</component>
</project>

View File

@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PySciProjectComponent">
<option name="PY_SCI_VIEW" value="true" />
<option name="PY_SCI_VIEW_SUGGESTED" value="true" />
</component>
</project>

View File

@ -0,0 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
<component name="PyDocumentationSettings">
<option name="renderExternalDocumentation" value="true" />
</component>
</module>

View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$/.." vcs="Git" />
</component>
</project>

View File

@ -0,0 +1,167 @@
#%% Read and Process NWS Alerts
# Created on: 2023-01-17
# Import Modules
import os
import geopandas as gp
import pandas as pd
import numpy as np
import xarray as xr
import nwswx
import requests
from shapely.geometry import Polygon
import shapely.wkt
#%% Setup NWS ID
nws = nwswx.WxAPI('api@alexanderrey.ca')
#%% Read NWS Alerts
# All alerts
# alertsIN = nws.alerts(return_format=nwswx.formats.JSONLD)
# Active alerts
alertsIN = nws.active_alerts(return_format=nwswx.formats.JSONLD)
nws_alerts = alertsIN['@graph']
# If more than one page of alerts, loop through an append
while 'pagination' in alertsIN:
# Get the next page of alerts
result = requests.get(alertsIN['pagination']['next'])
# Convert to JSON
alertsIN = result.json()
nws_alerts.extend(alertsIN['features'])
print('AWS Alerts: ', len(nws_alerts))
#%% Create NWS Alerts DataFrame
nws_alert_df = pd.DataFrame.from_records(nws_alerts)
#%% Read in NWS Polygon shapefiles as GeoDataFrame
z_shp = gp.read_file("C:/Users/arey/Downloads/z_13se22/z_13se22.shp")
fz_shp = gp.read_file("C:/Users/arey/Downloads/fz13se22/fz13se22.shp")
oz_shp = gp.read_file("C:/Users/arey/Downloads/oz22mr22/oz22mr22.shp")
mz_shp = gp.read_file("C:/Users/arey/Downloads/mz13se22/mz13se22.shp")
w_shp = gp.read_file("C:/Users/arey/Downloads/w_22mr22/w_22mr22.shp")
# ba_shp = gp.read_file("C:/Users/arey/Downloads/ba12my15/ba12my15.shp")
# hs_shp = gp.read_file("C:/Users/arey/Downloads/hs08mr23/hs08mr23.shp")
# s_shp = gp.read_file("C:/Users/arey/Downloads/s_22mr22/s_22mr22.shp")
#%% Match NWS Alerts to NWS Polygons
# Create empty list
nws_alert_polygons = []
nws_alert_data = []
nws_alert_debug = []
# Loop through each NWS Alert and match to NWS Polygon using UGC
for i in range(len(nws_alert_df)):
# If alert contains geometry as a string
if type(nws_alert_df['geometry'][i]) == str:
nws_alert_polygons.append(shapely.wkt.loads(nws_alert_df['geometry'][i]))
nws_alert_data.append(nws_alert_df.iloc[i, :])
nws_alert_debug.append(i)
# Geometry Array
elif type(nws_alert_df['geometry'][i]) == gp.array.GeometryArray:
nws_alert_polygons.append(nws_alert_df['geometry'][i][0])
nws_alert_data.append(nws_alert_df.iloc[i, :])
nws_alert_debug.append(i)
else:
# Get the UGC code for the alert
UGCs = nws_alert_df['geocode'][i]['UGC']
# Loop through each UGC code and find matching polygon
for ugc in UGCs:
# Zone based warning
if (ugc[2] == 'Z') & (ugc[0:2] in z_shp['STATE'].unique()):
alert_poly = z_shp[(z_shp['STATE'] == ugc[0:2]) & (z_shp['ZONE'] == ugc[3:6])].geometry
# Fire Weather Zone based warning
elif (ugc[2] == 'C') & (ugc[0:2] in z_shp['STATE'].unique()):
alert_poly = fz_shp[(fz_shp['STATE'] == ugc[0:2]) & (fz_shp['ZONE'] == ugc[3:6])].geometry
# Marine Zone based warning
elif ugc in mz_shp['ID'].unique():
alert_poly = mz_shp[mz_shp['ID'] == ugc].geometry
# Offshore Zone based warning
elif ugc in oz_shp['ID'].unique():
alert_poly = oz_shp[oz_shp['ID'] == ugc].geometry
# elif ugc[0] == 'W':
# nws_alert_df['geometry'][i] = w_shp[w_shp['UGC'] == ugc].geometry.values
else:
print('No polygon found for UGC: ', ugc)
print('Alert: ', nws_alert_df['headline'][i])
continue
# If no matches found
if len(alert_poly) == 0:
print('No polygon found for UGC: ', ugc)
print('Alert: ', nws_alert_df['headline'][i])
continue
# Append polygon to list
nws_alert_polygons.append(alert_poly.values[0])
nws_alert_data.append(nws_alert_df.iloc[i, :])
nws_alert_debug.append(i)
#%% Create GeoDataFrame of alert polygons
nws_alert_gdf = gp.GeoDataFrame(nws_alert_data, geometry=nws_alert_polygons)
nws_alert_gdf.reset_index(inplace=True)
#%% Create grid of points
xs = np.arange(-127, -65, 0.075)
ys = np.arange(24, 50, 0.075)
lons, lats = np.meshgrid(xs, ys)
# Create GeoSeries of Points
gridPointsSeries = gp.GeoSeries.from_xy(lons.flatten(), lats.flatten())
#%% Loop through each NWS Alert and find points within polygon
alertData_1D = ['' for i in range(len(gridPointsSeries))]
# Loop through NWS Alerts
for alertIDX in nws_alert_gdf.index:
alertString = ''
# Find h3 index for point
# Identify which points are within an alert polygon
alertPoints = np.where(nws_alert_gdf.loc[alertIDX, 'geometry'].contains(gridPointsSeries))[0]
# Loop through points and add alert data
for alertPoint in alertPoints:
# Get existing alert data and add new alert
alertString = alertData_1D[alertPoint]
alertString = alertString + '[' + nws_alert_gdf.loc[alertIDX, 'headline'] + \
nws_alert_gdf.loc[alertIDX, 'description'] + \
nws_alert_gdf.loc[alertIDX, 'areaDesc'] + \
nws_alert_gdf.loc[alertIDX, 'onset'] + \
nws_alert_gdf.loc[alertIDX, 'expires'] + \
nws_alert_gdf.loc[alertIDX, 'severity'] + \
nws_alert_gdf.loc[alertIDX, '@id'] + ']'
alertData_1D[alertPoint] = alertString
print(alertIDX)
#%% Create xarray of alert data
# Convert list to numpy
list_alerts_1d_np = np.array(alertData_1D)
# Reshape back to 2d array
list_alerts_2d_np = list_alerts_1d_np.reshape(lons.shape)
# Convert to xarray
grid_alerts_xr = xr.DataArray(data=list_alerts_2d_np,
dims=["x", "y"],
coords={"lon": (("x", "y"), lons),
"lat": (("x", "y"), lats)},
)
# Save as netcdf
grid_alerts_xr.to_netcdf('grid_alerts_contains3.nc')