AJMR-Python-Baird/Mustique/GFS_Import.py

108 lines
3.3 KiB
Python

#%% Script to read in GFS data at a specific point
# Author: Alexander Rey
# November 18, 2021
#%% Project Setup
import os
import sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from xarray.backends import NetCDF4DataStore
import xarray as xr
from datetime import datetime, timedelta
from netCDF4 import num2date
from metpy.units import units
from siphon.catalog import TDSCatalog
#%% Helper function for finding proper time variable
def find_time_var(var, time_basename='time'):
for coord_name in var.coords:
if coord_name.startswith(time_basename):
return var.coords[coord_name]
raise ValueError('No time variable found for ' + var.name)
#%% GFS Import
# List hours to download
hours = [0, 6, 12, 18]
# List variables to download
var = ['Temperature_surface', 'Wind_speed_gust_surface',
'u-component_of_wind_height_above_ground', 'v-component_of_wind_height_above_ground']
# Precipitation variable
var_precp = ['Total_precipitation_surface_6_Hour_Accumulation']
# Setup lists for data
temp_1d = []
gust_1d = []
wndu_1d = []
wndv_1d = []
prep_1d = []
time_1d = []
# Set times to download
startdate = datetime(year=2021, month=10, day=1, hour=0, minute=0, second=0)
enddate = datetime(year=2021, month=10, day=7, hour=0, minute=0, second=0)
date_list = pd.date_range(startdate, enddate, freq='6H')
# Loop through dates
for date in date_list:
# Base URL for 0.5 degree GFS data
best_gfs = TDSCatalog('https://www.ncei.noaa.gov/thredds/catalog/model-gfs-g4-anl-files/' +
date.strftime('%Y%m') + '/' + date.strftime('%Y%m%d') + '/' + 'catalog.xml')
# Generate URLs for specific grib file
best_ds = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_000.grb2']
best_ds_precp = best_gfs.datasets['gfs_4_'+date.strftime('%Y%m%d_%H%M')+'_006.grb2']
# Format the query parameters
ncss = best_ds.subset()
query = ncss.query()
ncss_precp = best_ds_precp.subset()
query_precp = ncss_precp.query()
# Extract data from specific point
query.lonlat_point(-75.6972, 45.4215).time(date)
query.accept('netcdf')
query.variables(var[0], var[1], var[2], var[3])
query.vertical_level(0) #10 m
data = ncss.get_data(query)
data = xr.open_dataset(NetCDF4DataStore(data), drop_variables='height_above_ground4')
query_precp.lonlat_point(-75.6972, 45.4215).time(date + timedelta(hours=6))
query_precp.accept('netcdf')
query_precp.variables(var_precp[0])
query_precp.vertical_level(0) #10 m
data_precp = ncss_precp.get_data(query_precp)
data_precp = xr.open_dataset(NetCDF4DataStore(data_precp))
temp_3d = data[var[0]]
gust_3d = data[var[1]]
wndu_3d = data[var[2]]
wndv_3d = data[var[3]]
prep_3d = data_precp[var_precp[0]]
# Read the individual point (with units) and append to the list
temp_1d.append(temp_3d.metpy.unit_array.squeeze())
gust_1d.append(gust_3d.metpy.unit_array.squeeze())
wndu_1d.append(wndu_3d.metpy.unit_array.squeeze())
wndv_1d.append(wndv_3d.metpy.unit_array.squeeze())
prep_1d.append(prep_3d.metpy.unit_array.squeeze())
time_1d.append(find_time_var(temp_3d))
#%% Create a new figure
fig = plt.figure(figsize=(15, 12))
fig.patch.set_facecolor('white')
plt.plot([i.values[0] for i in time_1d], [i.m.item(0) for i in temp_1d])
plt.show()