New NTC scrips

This commit is contained in:
Alexander Rey 2023-02-03 11:44:57 -05:00
parent 91ce9fac30
commit 15c7820c1c
5 changed files with 466 additions and 20 deletions

66
MEDS/MEDS.py Normal file
View File

@ -0,0 +1,66 @@
#!/usr/bin/env python3
# coding: utf8
import argparse
import time
import json
from datetime import datetime, timedelta
from urllib.request import urlopen
from urllib.parse import urlencode
import math
import time
# Find station ID from API
stationCode = 1700
# Get all stations
with urlopen("https://api-iwls.dfo-mpo.gc.ca/api/v1/stations") as url:
stationlist = json.load(url)
# Find station id number for API
stationsFiltered = list(filter(lambda station: station['code'] == str(stationCode).zfill(5), stationlist))
stationID = stationsFiltered[0]['id']
stationName = stationsFiltered[0]['officialName']
# Need UTC time for start and end of the date
start_dt = datetime(2019, 7, 1)
end_dt = datetime(2021, 12, 31)
# Select data type
# wlo - Observed water level
# wlf or wlf-spine - predicted water levels (at operational stations only)
# wlp - Predicted water levels
#wlp-hilo - High and low sea predictions (Tide tables)
dataType = 'wlo'
dt_merge = []
wl_merge = []
# Loop though request period in 7 day segments
for startDayCount in range(0, math.floor((end_dt-start_dt).days), 7):
# Convert the times to ISO 8601 strings as needed for the HTTP request
sdt = (start_dt+timedelta(days=startDayCount)).strftime("%Y-%m-%dT%H:%M:00Z")
edt = (start_dt+timedelta(days=startDayCount+7)).strftime("%Y-%m-%dT%H:%M:00Z")
resturl = 'https://api-iwls.dfo-mpo.gc.ca/api/v1/stations/{}/data?time-series-code={}&{}&{}'.format(
stationID,
dataType,
urlencode({'from':sdt}),
urlencode({'to':edt}))
time.sleep(0.5)
# Obtain JSON data from CHS
data = {}
with urlopen(resturl) as hf:
data = json.loads(hf.read().decode('utf-8'))
dt_merge.extend([datetime.strptime(x['eventDate'], "%Y-%m-%dT%H:%M:00Z") for x in data])
wl_merge.extend([float(x['value']) for x in data])
print(startDayCount)
print(len(wl_merge))

View File

@ -147,12 +147,18 @@ df_moored_data.columns = colOUT
### Table 3-1, PDF page 65 ### Table 3-1, PDF page 65
### Locations change between deployment 1-9 and 10/11 ### Locations change between deployment 1-9 and 10/11
### YSI from 2021 FRMR report Pg 385
df_moored = pd.DataFrame( df_moored = pd.DataFrame(
{'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM'], {'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM'], 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33], 'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90], 'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'bedElev': [-16, -17, -20, -18, -20]}) 'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
gdf_moored_loc = gp.GeoDataFrame( gdf_moored_loc = gp.GeoDataFrame(
df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263") df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118") gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")

View File

@ -212,8 +212,9 @@ nearest_IDX_NGG = list(range(100))
dsloc = list(range(100)) dsloc = list(range(100))
dsloc_NGG = list(range(100)) dsloc_NGG = list(range(100))
# Find nearest point # Find the nearest point
for i in modelPlot: for i in modelPlot:
dataPath = pl.Path(runLog['Run Location'][i], dataPath = pl.Path(runLog['Run Location'][i],
'FlowFM', 'dflowfm', 'output', 'FlowFM_map.nc') 'FlowFM', 'dflowfm', 'output', 'FlowFM_map.nc')

View File

@ -0,0 +1,334 @@
"""
@author: AJMR
Plotting and animation script for NTC
Febuary 01, 2023
"""
import os
import pandas as pd
import numpy as np
import geopandas as gp
import pytz
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import dfm_tools as dfmt
from dfm_tools.get_nc import get_netdata, get_ncmodeldata, plot_netmapdata
from dfm_tools.get_nc_helpers import get_ncvarproperties, get_stationid_fromstationlist, get_varnamefromattrs
import cartopy.crs as ccrs
import contextily as ctx
from shapely.geometry import Point, MultiPoint
from shapely.ops import nearest_points
import pathlib as pl
import xarray as xr
from datetime import timedelta
import datetime as datetime
#%% Read Model Log
runLog = pd.read_excel('C:/Users/arey/files/Projects/Newtown/Model Log NTC.xlsx', 'ModelLog')
dataPath = "FlowFM_map.nc"
histPath = "FlowFM_his.nc"
#%% Load in Water Level Data
# Field Gauge-EAST
FFG_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/FieldFacility.csv")
FFG_pd['DateTime'] = pd.to_datetime(FFG_pd.Date_time)
FFG_pd.set_index(FFG_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
FFG_pd = FFG_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='infer').tz_convert(pytz.utc)
# National Grid Gauge-WEST
NGG1_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid1.csv")
NGG1_pd['DateTime'] = pd.to_datetime(NGG1_pd.Date_time)
NGG1_pd.set_index(NGG1_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
NGG1_pd = NGG1_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
# Drop ambiguous times
NGG1_pd = NGG1_pd.dropna()
NGG2_pd = pd.read_csv("//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/00_ProcessingCode/NetCDF/Gauge/NationalGrid2.csv")
NGG2_pd['DateTime'] = pd.to_datetime(NGG2_pd.datetime)
NGG2_pd.set_index(NGG2_pd['DateTime'], inplace=True)
# Put in UTC from Eastern Time (with DST!)
NGG2_pd = NGG2_pd.tz_localize(
pytz.timezone('US/Eastern'), ambiguous='NaT').tz_convert(pytz.utc)
# Drop ambiguous times
NGG2_pd = NGG2_pd.dropna()
#%% Read in time series
modelPlot = [102, 104]
Delft_WL = [None] * (max(modelPlot)+1)
# Note, this uses the standard netcdf lib + xarray
for i in modelPlot:
file_nc_hist = pl.Path(runLog['Run Location'][i],
'FlowFM', 'dflowfm', 'output', 'FlowFM_his.nc')
# Hist file path
file_nc_hist = file_nc_hist.as_posix()
# Open hist file dataset
data_xr = xr.open_mfdataset(file_nc_hist, preprocess=dfmt.preprocess_hisnc)
# Get Variables
vars_pd = get_ncvarproperties(data_xr)
# Get stations
stations_pd = data_xr['stations'].to_dataframe()
# Convert to Pandas
Delft_WL[i] = data_xr.waterlevel.sel(stations=['West_WL', 'East_WL']).to_pandas()
# Put in UTC if needed
if i == 104:
Delft_WL[i] = Delft_WL[i].tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc)
else:
Delft_WL[i] = Delft_WL[i].tz_localize(
pytz.timezone('UTC'))
#%% Read in EFDC Data to dataframe
efdc_df = pd.read_csv(
'//ott-athena.baird.com/D/11934.201_Newtown_Creek_TPP/EFDC_MNR/2015_8/READ_FROM_EFDC_GRAPHICS_OUT_BIN.CSV')
#%% Read in EFDC grid
efdc_grid_df = pd.read_csv('//ott-athena.baird.com/D/11934.201_Newtown_Creek_TPP/EFDC_MNR/GRID_FILES/NC_ER_lxly_20140129.inp',
delim_whitespace=True, skiprows=4,
names=['I', 'J', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc_grid_gdf = gp.GeoDataFrame(
efdc_grid_df, geometry=gp.points_from_xy(efdc_grid_df.X, efdc_grid_df.Y), crs="EPSG:32118")
#%% Merge EFDC outputs with grid locations and add datetimes
efdc_merged = pd.merge(efdc_df, efdc_grid_df, how='left', left_on=['I_MOD','J_MOD'],
right_on = ['I','J']).drop(columns=[
' DUMPID', 'END_hr', 'I_MOD', 'J_MOD', 'I_MOD', 'X', 'Y', 'CUE', 'CVE', 'CUN', 'CVN'])
efdc_merged['date'] = pd.to_datetime([datetime.datetime(2015, 8, 1, 0, 0, 0) +
timedelta(hours=h) for h in efdc_merged['ST_hr']])
efdc_merged.set_index('date', inplace=True)
#Convert to UTC
efdc_merged.index = efdc_merged.index.tz_localize(
pytz.timezone('EST')).tz_convert(pytz.utc)
efdc_merged_gdf = gp.GeoDataFrame(
efdc_merged, geometry=efdc_merged['geometry'], crs="EPSG:32118")
del efdc_merged
#%% Find nearest grid point to station FFG
nearest_geoms = nearest_points(Point(
data_xr.waterlevel.sel(stations=['East_WL']).station_x_coordinate.values[0],
data_xr.waterlevel.sel(stations=['East_WL']).station_y_coordinate.values[0]),
MultiPoint(efdc_grid_gdf.geometry))
stationFFG_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry'] == nearest_geoms[1]]
#%% Find nearest grid point to station NGG
nearest_geoms = nearest_points(Point(
data_xr.waterlevel.sel(stations=['West_WL']).station_x_coordinate.values[0],
data_xr.waterlevel.sel(stations=['West_WL']).station_y_coordinate.values[0]),
MultiPoint(efdc_grid_gdf.geometry))
stationNGG_gdf = efdc_merged_gdf.loc[efdc_merged_gdf['geometry'] == nearest_geoms[1]]
#%% Plot Time Series
modelPlot = [102, 104]
fig, axes = plt.subplots(nrows=2, ncols=1, figsize=(9, 8),
sharex=True)
# Plot Data
FFG_pd['Water Surface Elevation (ft)'].shift(
0, timedelta(days=1)).multiply(0.3048).plot(ax=axes[0],
color='k', label='Field Facility Gauge')
#Plot EFDC
stationFFG_gdf['WSE_m'].plot(ax=axes[0], label='EFDC 2019')
# Plot Delft3D
for i in modelPlot:
Delft_WL[i]['East_WL'].plot(ax=axes[0],
label='Delft3D: ' + runLog['Plotting Legend Entry'][i])
# Set axis
axes[0].set_xlim(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1])
axes[0].set_ylim(-1.5, 2.5)
axes[0].legend(loc='upper right')
# Plot Data
NGG2_pd['water_surface_elevation'].shift(
0, timedelta(hours=1)).multiply(0.3048).plot(ax=axes[1],
color='k', label='National Grid Gauge')
#Plot EFDC
stationNGG_gdf['WSE_m'].plot(ax=axes[1], label='EFDC')
# Plot Delft
for i in modelPlot:
Delft_WL[i]['West_WL'].plot(ax=axes[1],
label='Delft3D: ' + runLog['Plotting Legend Entry'][i])
# Set axis
axes[1].set_xlim(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1])
axes[1].set_ylim(-1.5, 2.5)
axes[1].legend(loc='upper right')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/Figures2023/TS_2023.png',
bbox_inches='tight', dpi=400)
#%% Plot scatters
fig, axes = plt.subplots(nrows=1, ncols=2, figsize=(10, 6))
fig.patch.set_facecolor('white')
interpTimes = pd.date_range(Delft_WL[modelPlot[0]].index[0],
Delft_WL[modelPlot[0]].index[-1],
freq="20min")
# Compare EFDC and Data
axes[0].scatter(np.interp(interpTimes, FFG_pd.index,
FFG_pd['Water Surface Elevation (ft)']*0.3048),
np.interp(interpTimes, stationFFG_gdf.index,
stationFFG_gdf['WSE_m']), s=1,
label='EFDC 2019')
for i in modelPlot:
# Compare Delft3D and Data
axes[0].scatter(np.interp(interpTimes, FFG_pd.index,
FFG_pd['Water Surface Elevation (ft)']*0.3048),
np.interp(interpTimes, Delft_WL[i].index,
Delft_WL[i]['East_WL']), s=1,
label='Delft3D: ' + runLog['Plotting Legend Entry'][i])
linepts = np.array([-1.5, 1.5])
axes[0].plot(linepts, linepts, color='k')
axes[0].legend(loc='lower right')
axes[0].set_xlim(-1.5, 1.5)
axes[0].set_ylim(-1.5, 1.5)
axes[0].set_aspect('equal', 'box')
# Compare EFDC and Data
modelInterp = np.interp(interpTimes, stationNGG_gdf.index.shift(
0, timedelta(hours=1)),
stationNGG_gdf['WSE_m'])
dataInterp = np.interp(interpTimes, NGG2_pd.index,
NGG2_pd['water_surface_elevation']*0.3048)
axes[1].scatter(dataInterp, modelInterp, s=1,
label='EFDC 2019')
print(np.sqrt(np.mean((modelInterp-dataInterp)**2)))
corr_matrix = np.corrcoef(dataInterp, modelInterp)
print(corr_matrix[0, 1]**2)
for i in modelPlot:
# Compare Delft3D and Data
modelInterp = np.interp(interpTimes, Delft_WL[i].index.shift(
0, timedelta(hours=1)),
Delft_WL[i]['West_WL'])
dataInterp = np.interp(interpTimes, NGG2_pd.index,
NGG2_pd['water_surface_elevation']*0.3048)
axes[1].scatter(dataInterp, modelInterp, s=1,
label='Delft3D: ' + runLog['Plotting Legend Entry'][i])
print(np.sqrt(np.mean((modelInterp-dataInterp)**2)))
corr_matrix = np.corrcoef(dataInterp, modelInterp)
print(corr_matrix[0, 1]**2)
axes[1].plot(linepts, linepts, color='k')
axes[1].legend(loc='lower right')
axes[1].set_aspect('equal', 'box')
plt.show()
fig.savefig('C:/Users/arey/files/Projects/Newtown/Figures2023/FFG_Scatter2023.png',
bbox_inches='tight', dpi=400)
#%% Read in YSI ADCP
# %% Load in moored data
df_moored_data = pd.read_excel('//srv-ott3/Projects/11934.201 Newtown Creek TPP Privileged and Confidential/03_Data/02_Physical/05 Currents/NC_CurrentMeter_All_Phase1_all_data_2013_07_16/NC_CurrentMeter_All_Phase1_all_moored_2013_05_20.xlsx',
sheet_name='mag_all_moored')
# Shift col names to put second back in
colIN = list(df_moored_data.columns)
colOUT = colIN[0:6] + ['second'] + colIN[6:-1]
df_moored_data.columns = colOUT
### Moored current meter locations from here:
### file://srv-ott3/Projects/11934.201%20Newtown%20Creek%20TPP%20%E2%80%93%20Privileged%20and%20Confidential/03_Data/01_BkgrdReports/NC_DRAFT_DSR_Submittal_No_3_2013-07-03.pdf
### Table 3-1, PDF page 65
### Locations change between deployment 1-9 and 10/11
### YSI from 2021 FRMR report Pg 385
df_moored = pd.DataFrame(
{'depOrig': ['NC083CM', 'NC081CM', 'NC082CM', 'NC086CM', 'EK023CM',
'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'depCurr': ['NC086CM', 'NC087CM', 'NC088CM', 'NC089CM', 'EK023CM',
'NC310', 'NC313', 'NC316', 'NC318', 'EK108', 'EB043'],
'Northing': [208519.95, 206083.24, 203835.10, 201381.55, 200827.33,
208809.66, 205547.85, 203870.2, 201684.6, 200860.91, 200336.12],
'Easting': [996198.97, 1000959.45, 1004387.42, 1005283.07, 1004644.90,
996586.22, 1001028.85, 1004501.4, 1005027.4, 1004516.53, 1005535.44],
'bedElev': [-16, -17, -20, -18, -20, 0, 0, 0, 0, 0, 0]})
gdf_moored_loc = gp.GeoDataFrame(
df_moored, geometry=gp.points_from_xy(df_moored.Easting, df_moored.Northing), crs="EPSG:2263")
gdf_moored_loc.geometry = gdf_moored_loc.geometry.to_crs("EPSG:32118")
# Loop through Station IDs for deployments 1-9 and assign locations
# for d in range(1,10):
# depMask = df_moored_data['deployment'] == ('dep' + str(d))
# for stationIDX, station in enumerate(df_moored['depOrig']):
# stationMask = (df_moored_data['station'] == station) & depMask
#
# # Assign geography to station plus deployment
# df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
# df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Loop through Station IDs for deployments 10-11 and assign locations
for d in range(1,12):
depMask = df_moored_data['deployment'] == 'dep' + str(d)
for stationIDX, station in enumerate(df_moored['depCurr']):
stationMask = (df_moored_data['station'] == station) & depMask
# Assign geography to station plus deployment
df_moored_data.loc[stationMask, 'Northing'] = df_moored.loc[stationIDX, 'Northing']
df_moored_data.loc[stationMask, 'Easting'] = df_moored.loc[stationIDX, 'Easting']
# Create geodataframe and convert to USSP
gdf_moored = gp.GeoDataFrame(
df_moored_data, geometry=gp.points_from_xy(df_moored_data.Easting, df_moored_data.Northing), crs="EPSG:2263")
#convert data to CRS of D3D
gdf_moored.geometry = gdf_moored.geometry.to_crs("EPSG:32118")
gdf_moored['date'] = pd.to_datetime(gdf_moored['year'].astype(str) + '-' +
gdf_moored['month'].astype(str).str.zfill(2) + '-' +
gdf_moored['day'].astype(str).str.zfill(2) + ' ' +
gdf_moored['hour'].astype(str).str.zfill(2) + ':' +
gdf_moored['minute'].astype(str).str.zfill(2))
#%% Velocity validation

View File

@ -1,18 +1,21 @@
#%% Read and Process NWS Alerts #%% Read and Process NWS Alerts
# Created on: 2023-01-17 # Created on: 2023-01-17
import datetime
# Import Modules # Import Modules
import os
import geopandas as gp import geopandas as gp
import pandas as pd import pandas as pd
import numpy as np import numpy as np
import xarray as xr import xarray as xr
from netCDF4 import Dataset
import nwswx import nwswx
import requests import requests
from shapely.geometry import Polygon
import shapely.wkt import shapely.wkt
import re
#%% Setup NWS ID #%% Setup NWS ID
nws = nwswx.WxAPI('api@alexanderrey.ca') nws = nwswx.WxAPI('api@alexanderrey.ca')
@ -123,7 +126,7 @@ lons, lats = np.meshgrid(xs, ys)
gridPointsSeries = gp.GeoSeries.from_xy(lons.flatten(), lats.flatten()) gridPointsSeries = gp.GeoSeries.from_xy(lons.flatten(), lats.flatten())
#%% Loop through each NWS Alert and find points within polygon #%% Loop through each NWS Alert and find points within polygon
alertData_1D = ['' for i in range(len(gridPointsSeries))] alertData_1D = [None for i in range(len(gridPointsSeries))]
# Loop through NWS Alerts # Loop through NWS Alerts
for alertIDX in nws_alert_gdf.index: for alertIDX in nws_alert_gdf.index:
@ -138,13 +141,26 @@ for alertIDX in nws_alert_gdf.index:
# Get existing alert data and add new alert # Get existing alert data and add new alert
alertString = alertData_1D[alertPoint] alertString = alertData_1D[alertPoint]
alertString = alertString + '[' + nws_alert_gdf.loc[alertIDX, 'headline'] + \ if alertString == None:
nws_alert_gdf.loc[alertIDX, 'description'] + \ alertString = ''
nws_alert_gdf.loc[alertIDX, 'areaDesc'] + \
nws_alert_gdf.loc[alertIDX, 'onset'] + \ alertDescription = nws_alert_gdf.loc[alertIDX, 'description']
nws_alert_gdf.loc[alertIDX, 'expires'] + \
nws_alert_gdf.loc[alertIDX, 'severity'] + \ if alertDescription == None:
nws_alert_gdf.loc[alertIDX, '@id'] + ']' alertDescription = nws_alert_gdf.loc[alertIDX, 'headline']
if nws_alert_gdf.loc[alertIDX, 'ends'] == None:
alertEndTime = nws_alert_gdf.loc[alertIDX, 'expires']
else:
alertEndTime = nws_alert_gdf.loc[alertIDX, 'ends']
alertString = alertString + '[' + nws_alert_gdf.loc[alertIDX, 'headline'] + '}' \
'{' + alertDescription + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'areaDesc'] + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'onset'] + '}' + \
'{' + alertEndTime + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, 'severity'] + '}' + \
'{' + nws_alert_gdf.loc[alertIDX, '@id'] + ']'
alertData_1D[alertPoint] = alertString alertData_1D[alertPoint] = alertString
@ -160,8 +176,31 @@ list_alerts_2d_np = list_alerts_1d_np.reshape(lons.shape)
grid_alerts_xr = xr.DataArray(data=list_alerts_2d_np, grid_alerts_xr = xr.DataArray(data=list_alerts_2d_np,
dims=["x", "y"], dims=["x", "y"],
coords={"lon": (("x", "y"), lons), coords={"lon": (("x", "y"), lons),
"lat": (("x", "y"), lats)}, "lat": (("x", "y"), lats)}
) )
# Save as netcdf # Save as netcdf
grid_alerts_xr.to_netcdf('grid_alerts_contains3.nc') grid_alerts_xr.to_netcdf('grid_alerts_contains8.nc', engine="h5netcdf",
encoding={"__xarray_dataarray_variable__": {'gzip': True, "compression_opts": 9}})
#%% Read in netcdf
grid_alerts_nc = Dataset('grid_alerts_contains5.nc')
grid_alert_pt = grid_alerts_nc['__xarray_dataarray_variable__'][176, 300]
print(grid_alerts_nc['lon'][176, 300])
print(grid_alerts_nc['lat'][176, 300])
#%% Process alert data string
alertPattern = '\[([^]]+)\]'
alertList = re.findall(alertPattern, grid_alert_pt)
# Loop through each alert
for alert in alertList:
# Extract alert details
alertDetails = alert.split('}{')
print(alertDetails)
alertTime = datetime.datetime.strptime(alertDetails[3], '%Y-%m-%dT%H:%M:%S%z')