AJMR-Python-Baird/MEDS/MEDS.py

66 lines
1.9 KiB
Python

#!/usr/bin/env python3
# coding: utf8
import argparse
import time
import json
from datetime import datetime, timedelta
from urllib.request import urlopen
from urllib.parse import urlencode
import math
import time
# Find station ID from API
stationCode = 1700
# Get all stations
with urlopen("https://api-iwls.dfo-mpo.gc.ca/api/v1/stations") as url:
stationlist = json.load(url)
# Find station id number for API
stationsFiltered = list(filter(lambda station: station['code'] == str(stationCode).zfill(5), stationlist))
stationID = stationsFiltered[0]['id']
stationName = stationsFiltered[0]['officialName']
# Need UTC time for start and end of the date
start_dt = datetime(2019, 7, 1)
end_dt = datetime(2021, 12, 31)
# Select data type
# wlo - Observed water level
# wlf or wlf-spine - predicted water levels (at operational stations only)
# wlp - Predicted water levels
#wlp-hilo - High and low sea predictions (Tide tables)
dataType = 'wlo'
dt_merge = []
wl_merge = []
# Loop though request period in 7 day segments
for startDayCount in range(0, math.floor((end_dt-start_dt).days), 7):
# Convert the times to ISO 8601 strings as needed for the HTTP request
sdt = (start_dt+timedelta(days=startDayCount)).strftime("%Y-%m-%dT%H:%M:00Z")
edt = (start_dt+timedelta(days=startDayCount+7)).strftime("%Y-%m-%dT%H:%M:00Z")
resturl = 'https://api-iwls.dfo-mpo.gc.ca/api/v1/stations/{}/data?time-series-code={}&{}&{}'.format(
stationID,
dataType,
urlencode({'from':sdt}),
urlencode({'to':edt}))
time.sleep(0.5)
# Obtain JSON data from CHS
data = {}
with urlopen(resturl) as hf:
data = json.loads(hf.read().decode('utf-8'))
dt_merge.extend([datetime.strptime(x['eventDate'], "%Y-%m-%dT%H:%M:00Z") for x in data])
wl_merge.extend([float(x['value']) for x in data])
print(startDayCount)
print(len(wl_merge))