July 19 code update

Powershell estofs
This commit is contained in:
Coastlines 2023-07-19 13:20:20 -04:00
parent 5550b4ef71
commit dd2e858cf7
14 changed files with 4494 additions and 824 deletions

311
DUNEX_2022_Process.m Normal file
View File

@ -0,0 +1,311 @@
%% Read and process results from long term DUNEX Run
addpath(genpath('matlab/applications/delft3d_matlab'),'-begin')
addpath(genpath('D:/DUNEX_RT/Operation/Hindcast'),'-begin')
%% Model Results 9
%trihPath = 'D:\DUNEX_RT\Operation\Hindcast\Setup\trih-dunx22.dat';
trihPath = 'D:\Alexander\Setup8_NoWaves\trih-dunx22.dat';
% trihPath = 'D:\Alexander\Setup8_NoWind_NoWaves\trih-dunx22.dat';
% trihPath = '\\tsclient\F\DUNEX_RT\trih-dunx22.dat';
runSteps = 1:8785;
% Read in model data
modelHistNamesB = qpread(qpfopen(trihPath),1,'water level','stations');
modelHistWLB = qpread(qpfopen(trihPath),1,'water level','griddata',runSteps,[98:129 6 7 8 9]);
modelHistVelB = qpread(qpfopen(trihPath),1,'depth averaged velocity','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWaveHeightB = qpread(qpfopen(trihPath),1,'significant wave height','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWavePerB = qpread(qpfopen(trihPath),1,'peak wave period','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWaveDirB = qpread(qpfopen(trihPath),1,'wave direction','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWindSpeedB = qpread(qpfopen(trihPath),1,'wind speed','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWindDirB = qpread(qpfopen(trihPath),1,'wind direction','griddata',runSteps,[98:129 6 7 8 9]);
%% Model Results
trihPath = 'D:\Alexander\Setup8\trih-dunx22.def';
% trihPath = 'D:\Alexander\Setup8_NoWind_NoWaves\trih-dunx22.dat';
% trihPath = 'D:\Alexander\Setup8_NoWind\trih-dunx22.dat';
runSteps = 1:8785;
% Read in model data
modelHistNames = qpread(qpfopen(trihPath),1,'water level','stations');
modelHistWL = qpread(qpfopen(trihPath),1,'water level','griddata',runSteps,[98:129 6 7 8 9]);
modelHistVel = qpread(qpfopen(trihPath),1,'depth averaged velocity','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWaveHeight = qpread(qpfopen(trihPath),1,'significant wave height','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWavePer = qpread(qpfopen(trihPath),1,'peak wave period','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWaveDir = qpread(qpfopen(trihPath),1,'wave direction','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWindSpeed = qpread(qpfopen(trihPath),1,'wind speed','griddata',runSteps,[98:129 6 7 8 9]);
modelHistWindDir = qpread(qpfopen(trihPath),1,'wind direction','griddata',runSteps,[98:129 6 7 8 9]);
%% WL QA/AC
axisLimits=[datetime(2020,07,20) datetime(2020,10,01,00,00,00)];
dateStart = axisLimits(1);
dateEnd = axisLimits(2);
delfTime = axisLimits(1):minutes(10):axisLimits(2);
%% Read in WL
stations{6} = 8651370; %Dck
stations{7} = 8652587; %Oregon Inlet
stations{8} = 8654467; %Hatteras
stations{9} = 8656483; %Beau
for Stat=[6 7 8 9]
for Month = 1:3
if Month==1
dateStartM=dateStart +((Month-1).*days(31));
dateEndM=dateStart +((Month).*days(31));
else
dateStartM=dateEndM + days(1);
dateEndM=dateStart +((Month).*days(31));
end
url = ['https://tidesandcurrents.noaa.gov/api/datagetter?product=water_level&application=NOS.COOPS.TAC.WL&'...
'begin_date=' datestr(dateStartM,'yyyymmdd') '&end_date=' datestr(dateEndM,'yyyymmdd') '&datum=NAVD' '&station=' num2str(stations{Stat})...
'&time_zone=GMT&units=metric&format=csv'];
wlIN = webread(url);
if Month==1
measuredWL_D = datenum(wlIN.DateTime);
measuredWL_W = wlIN.WaterLevel;
else
measuredWL_D = vertcat(measuredWL_D,datenum(wlIN.DateTime));
measuredWL_W = vertcat(measuredWL_W,wlIN.WaterLevel);
end
clear windIN wlIN
end
switch Stat
case 6
tideOutN = interp1(measuredWL_D,...
measuredWL_W,...
datenum(axisLimits(1):minutes(10):axisLimits(2)));
case 7
tideOutOI = interp1(measuredWL_D,...
measuredWL_W,...
datenum(axisLimits(1):minutes(10):axisLimits(2)));
case 8
tideOutHat = interp1(measuredWL_D,...
measuredWL_W,...
datenum(axisLimits(1):minutes(10):axisLimits(2)));
case 9
tideOutS = interp1(measuredWL_D,...
measuredWL_W,...
datenum(axisLimits(1):minutes(10):axisLimits(2)));
end
end
%% Read in merged WL
MergedWL_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_WL.csv');
MergedHs_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_SigWave.csv');
MergedDir_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_Dir.csv');
MergedPer_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_Period.csv');
MergedVelX_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_VelX.csv');
MergedVelY_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_VelY.csv');
MergedWindD_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_WindDir.csv');
MergedWindS_Table = readtable('D:\Alexander\MATLAB\DunexBounds\Dunex_RT_WindSpeed.csv');
%% Hs QA/QC
% Read in NDBC
StatCount=1;
for Station = [44095]
buoyIn{StatCount} = readtable(...
['D:\DUNEX_RT\Operation\Hindcast\Buoy\'...
num2str(Station) 'h2020.txt']);
buoyOut{StatCount} = struct;
buoyOut{StatCount}.WVHT = buoyIn{StatCount}.WVHT;
buoyOut{StatCount}.DateTime = datetime(buoyIn{StatCount}.x_YY,...
buoyIn{StatCount}.MM,buoyIn{StatCount}.DD,buoyIn{StatCount}.hh,...
buoyIn{StatCount}.mm,0);
buoyOut{StatCount}.DateTime(buoyOut{StatCount}.WVHT==99) = [];
buoyOut{StatCount}.WVHT(buoyOut{StatCount}.WVHT==99) = [];
StatCount=StatCount+1;
end
%% Create FrankenTimeline
Dunex_RT_WL = MergedWL_Table;
Dunex_RT_SigWave = MergedHs_Table;
Dunex_RT_Dir = MergedDir_Table;
Dunex_RT_Period = MergedPer_Table;
Dunex_RT_VelX = MergedVelX_Table;
Dunex_RT_VelY = MergedVelY_Table;
Dunex_RT_WindDir = MergedWindD_Table;
Dunex_RT_WindSpeed = MergedWindS_Table;
for i = 1:32
modelWL_Inter(:, i) = interp1(modelHistWL.Time,...
modelHistWL.Val(:,i), datenum(MergedWL_Table.DateTime));
modelHs_Inter(:, i) = interp1(modelHistWaveHeight.Time,...
modelHistWaveHeight.Val(:,i), datenum(MergedHs_Table.DateTime));
modelDir_Inter(:, i) = interp1(modelHistWaveDir.Time,...
modelHistWaveDir.Val(:,i), datenum(MergedDir_Table.DateTime));
modelPer_Inter(:, i) = interp1(modelHistWavePer.Time,...
modelHistWavePer.Val(:,i), datenum(MergedPer_Table.DateTime));
modelVelX_Inter(:, i) = interp1(modelHistVel.Time,...
modelHistVel.XComp(:,i), datenum(MergedVelX_Table.DateTime));
modelVelY_Inter(:, i) = interp1(modelHistVel.Time,...
modelHistVel.YComp(:,i), datenum(MergedVelY_Table.DateTime));
modelWindD_Inter(:, i) = interp1(modelHistWindDir.Time,...
modelHistWindDir.Val(:,i), datenum(MergedWindD_Table.DateTime));
modelWindS_Inter(:, i) = interp1(modelHistWindSpeed.Time,...
modelHistWindSpeed.Val(:,i), datenum(MergedWindS_Table.DateTime));
Dunex_RT_WL{isnan(Dunex_RT_WL{:,i}), i} =...
modelWL_Inter(isnan(Dunex_RT_WL{:,i}), i);
Dunex_RT_SigWave{isnan(Dunex_RT_SigWave{:,i}), i} =...
modelHs_Inter(isnan(Dunex_RT_SigWave{:,i}), i);
Dunex_RT_Dir{isnan(Dunex_RT_Dir{:,i}), i} =...
modelDir_Inter(isnan(Dunex_RT_Dir{:,i}), i);
Dunex_RT_Period{isnan(Dunex_RT_Period{:,i}), i} =...
modelPer_Inter(isnan(Dunex_RT_Period{:,i}), i);
Dunex_RT_VelX{isnan(Dunex_RT_VelX{:,i}), i} =...
modelVelX_Inter(isnan(Dunex_RT_VelX{:,i}), i);
Dunex_RT_VelY{isnan(Dunex_RT_VelY{:,i}), i} =...
modelVelY_Inter(isnan(Dunex_RT_VelY{:,i}), i);
Dunex_RT_WindDir{isnan(Dunex_RT_WindDir{:,i}), i} =...
modelWindD_Inter(isnan(Dunex_RT_WindDir{:,i}), i);
Dunex_RT_WindSpeed{isnan(Dunex_RT_WindSpeed{:,i}), i} =...
modelWindS_Inter(isnan(Dunex_RT_WindSpeed{:,i}), i);
end
%% Plot WL Hat
wlValF = figure
subplot(2,1,1)
title('Hatteras Coast Guard')
hold on
plot(modelHistWL.Time, modelHistWL.Val(:,35),'-r')
% plot(modelHistWLB.Time, modelHistWLB.Val(:,35),'-b')
plot(datenum(axisLimits(1):minutes(10):axisLimits(2)), tideOutHat, '-k')
% plot(datenum(MergedWL_Table.DateTime),MergedWL_Table.x106_DUNEX_9,'-b')
% plot(datenum(Dunex_RT_WL.DateTime),Dunex_RT_WL.x106_DUNEX_9,'-m')
legend('Large-scale Model','Observations','Orientation','horizontal','Location','south')
ylabel('Water Level [m]')
ax=gca;
ax.XLim = datenum([datetime(2020,08,01,00,00,00) datetime(2020,10,01,00,00,00)]);
ax.YLim = [-0.5 0.75];
datetick('x','keeplimits')
% figure
subplot(2,1,2)
title('Oregon Inlet Marina')
hold on
plot(modelHistWL.Time, modelHistWL.Val(:,34),'-r')
% plot(modelHistWLB.Time, modelHistWLB.Val(:,34),'-b')
% plot(datenum(axisLimits(1):minutes(10):axisLimits(2)), tideOutS, '-k')
plot(datenum(axisLimits(1):minutes(10):axisLimits(2)), tideOutOI, '-k')
% plot(datenum(MergedWL_Table.DateTime),MergedWL_Table.x106_DUNEX_9,'-b')
% plot(datenum(Dunex_RT_WL.DateTime),Dunex_RT_WL.x106_DUNEX_9,'-m')
legend('Large-scale Model','Observations','Orientation','horizontal','Location','south')
ylabel('Water Level [m]')
ax=gca;
ax.XLim = datenum([datetime(2020,08,01,00,00,00) datetime(2020,10,01,00,00,00)]);
ax.YLim = [-0.5 1];
datetick('x','keeplimits')
wlValF.Units='inches';
wlValF.PaperOrientation='portrait';
wlValF.Position=[0 0 8 4];
exportgraphics(wlValF, 'D:\Alexander\LargeScaleWL_Val.png', 'Resolution', 300)
%% Plot Hs
figure
hold on
plot(modelHistWaveHeight.Time, modelHistWaveHeight.Val(:,34),'-r')
% plot(datenum(axisLimits(1):minutes(10):axisLimits(2)), tideOutS, '-k')
plot(datenum(buoyOut{3}.DateTime), buoyOut{3}.WVHT, '-k')
% plot(datenum(MergedHs_Table.DateTime),MergedHs_Table.x129_DUNEX_32,'-b')
plot(datenum(MergedHs_Table.DateTime),MergedHs_Table.x106_DUNEX_3,'-b')
plot(datenum(Dunex_RT_SigWave.DateTime),Dunex_RT_SigWave.x106_DUNEX_3,'-m')
datetick('x','keeplimits')
%% Save
writetable(Dunex_RT_WL, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_WL_RevB.csv')
writetable(Dunex_RT_SigWave, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_SigWave_RevB.csv')
writetable(Dunex_RT_Dir, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_Dir_RevB.csv')
writetable(Dunex_RT_Period, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_Period_RevB.csv')
writetable(Dunex_RT_VelX, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_VelX_RevB.csv')
writetable(Dunex_RT_VelY, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_VelY_RevB.csv')
writetable(Dunex_RT_WindDir, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_WindDir_RevB.csv')
writetable(Dunex_RT_WindSpeed, 'D:\Alexander\MATLAB\DunexBoundsB\Dunex_RT_WindSpeed_RevB.csv')
%% Save Raw Model
varNames = {'WL', 'SigWave', 'Dir', 'Period', 'VelX', 'VelY', 'WindDir', 'WindSpeed'};
for v = 1:8%[1 5 6 7 8]
clear table_Raw
switch v
case 1
table_Raw = array2table(modelHistWL.Val);
case 2
table_Raw = array2table(modelHistWaveHeight.Val);
case 3
table_Raw = array2table(modelHistWaveDir.Val);
case 4
table_Raw = array2table(modelHistWavePer.Val);
case 5
table_Raw = array2table(modelHistVel.XComp);
case 6
table_Raw = array2table(modelHistVel.YComp);
case 7
table_Raw = array2table(modelHistWindSpeed.Val);
case 8
table_Raw = array2table(modelHistWindDir.Val);
end
table_Raw.Properties.VariableNames = modelHistNames([98:129 6 7 8 9]);
table_Raw.DateTime = datetime(datevec(modelHistWL.Time));
writetable(table_Raw, ['D:\Alexander\MATLAB\DUNEX_NoWind\Dunex_RT_NoWind'...
varNames{v} '.csv'])
end

190
Dunex_Archive_Merge.m Normal file
View File

@ -0,0 +1,190 @@
%% Script to merge DUNEX-RT Archive to a "best series" dataset
% Alexander Rey, July 15, 2022
addpath(genpath('matlab/applications/delft3d_matlab'),'-begin')
addpath(genpath('D:/DUNEX_RT/Operation/Hindcast'),'-begin')
%% Get descriptive information of the hist file
runDate = datetime(2020, 07, 20);
histVars = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate,'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1);
histStations = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate,'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'water level','stations');
%% Extract data from July 20, 2020 to October 2, 2020
% Extract only the first 6 hours (36 time steps)
% Extract only stations for DUNEX (98-129)
mergedHistWL = [];
mergedHistVelX = [];
mergedHistVelY = [];
mergeHistWaveHeight = [];
mergeHistWavePer = [];
mergeHistWaveDir = [];
mergedHistTime = [];
mergeHistWindSpeed = [];
mergeHistWindDir = [];
% Loop through times
for runDate = datetime(2020, 07, 20):hours(6):datetime(2020, 10, 02)
% for runDate = datetime(2020, 09, 16):hours(6):datetime(2020, 09, 26)
% for runDate = datetime(2020, 08, 03):hours(6):datetime(2020, 08, 06)
% If file is missing go back and use correct time steps from later in
% the run
if exist(['D:\DUNEX_RT\Archive\' datestr(runDate,'yyyy-mm-dd_HH')...
'_HistOut7.dat'],'file') == 2
runSteps = 2:37;
hoursBack = 0;
elseif exist(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(6), 'yyyy-mm-dd_HH')...
'_HistOut7.dat'],'file') == 2
runSteps = 38:73;
hoursBack = 6
elseif exist(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(12),'yyyy-mm-dd_HH')...
'_HistOut7.dat'],'file') == 2
runSteps = 74:109;
hoursBack = 12
elseif exist(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(18),'yyyy-mm-dd_HH')...
'_HistOut7.dat'],'file') == 2
runSteps = 110:145;
hoursBack = 18
elseif exist(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(24),'yyyy-mm-dd_HH')...
'_HistOut7.dat'],'file') == 2
runSteps = 146:181;
hoursBack = 24
elseif exist(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(30),'yyyy-mm-dd_HH')...
'_HistOut7.dat'],'file') == 2
runSteps = 182:217;
hoursBack = 30
else
% If missing
mergedHistWL = [mergedHistWL nan([32,36])];
mergedHistVelX = [mergedHistVelX nan([32,36])];
mergedHistVelY = [mergedHistVelY nan([32,36])];
mergeHistWaveHeight = [mergeHistWaveHeight nan([32,36])];
mergeHistWavePer = [mergeHistWavePer nan([32,36])];
mergeHistWaveDir = [mergeHistWaveDir nan([32,36])];
mergeHistWindSpeed = [mergeHistWindSpeed nan([32,36])];
mergeHistWindDir = [mergeHistWindDir nan([32,36])];
mergedHistTime = [mergedHistTime datenum(runDate+minutes(10):minutes(10):runDate+hours(6))];
continue
end
% Read in model data
modelHistWL = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'water level','griddata',runSteps,98:129);
modelHistVel = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'depth averaged velocity','griddata',runSteps,98:129);
modelHistWaveHeight = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'significant wave height','griddata',runSteps,98:129);
modelHistWavePer = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'peak wave period','griddata',runSteps,98:129);
modelHistWaveDir = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'wave direction','griddata',runSteps,98:129);
modelHistWindSpeed = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'wind speed','griddata',runSteps,98:129);
modelHistWindDir = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'wind direction','griddata',runSteps,98:129);
% Merge
mergedHistWL = [mergedHistWL modelHistWL.Val'];
mergedHistVelX = [mergedHistVelX modelHistVel.XComp'];
mergedHistVelY = [mergedHistVelY modelHistVel.YComp'];
mergeHistWaveHeight = [mergeHistWaveHeight modelHistWaveHeight.Val'];
mergeHistWavePer = [mergeHistWavePer modelHistWavePer.Val'];
mergeHistWaveDir = [mergeHistWaveDir modelHistWaveDir.Val'];
mergeHistWindSpeed = [mergeHistWindSpeed modelHistWindSpeed.Val'];
mergeHistWindDir = [mergeHistWindDir modelHistWindDir.Val'];
mergedHistTime = [mergedHistTime modelHistWL.Time'];
disp(runDate)
end
extractBed = qpread(qpfopen(['D:\DUNEX_RT\Archive\' datestr(runDate-hours(hoursBack),'yyyy-mm-dd_HH')...
'_HistOut7.dat']),1,'bed level','griddata',1,98:129);
extractPtX = extractBed.X(1,:);
extractPtY = extractBed.Y(1,:);
%% Plot
figure
plot(mergedHistTime, mergedHistWL(10, :))
datetick('x')
ylabel('DUNEX-RT Water Level (mNAVD88)')
figure
plot(mergedHistTime, mergeHistWaveHeight(10, :))
datetick('x')
ylabel('DUNEX-RT Significant Wave Height (m8)')
%% Export data
MergedWL_Table = array2table(mergedHistWL');
MergedWL_Table.Properties.VariableNames = histStations(98:129);
MergedWL_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedWL_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_WL.csv')
MergedVelX_Table = array2table(mergedHistVelX');
MergedVelX_Table.Properties.VariableNames = histStations(98:129);
MergedVelX_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedVelX_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_VelX.csv')
MergedVelY_Table = array2table(mergedHistVelY');
MergedVelY_Table.Properties.VariableNames = histStations(98:129);
MergedVelY_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedVelY_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_VelY.csv')
MergedWaveHeight_Table = array2table(mergeHistWaveHeight');
MergedWaveHeight_Table.Properties.VariableNames = histStations(98:129);
MergedWaveHeight_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedWaveHeight_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_SigWave.csv')
MergedWavePeriod_Table = array2table(mergeHistWavePer');
MergedWavePeriod_Table.Properties.VariableNames = histStations(98:129);
MergedWavePeriod_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedWavePeriod_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_Period.csv')
MergedWaveDir_Table = array2table(mergeHistWaveDir');
MergedWaveDir_Table.Properties.VariableNames = histStations(98:129);
MergedWaveDir_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedWaveDir_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_Dir.csv')
MergedWindSpeed_Table = array2table(mergeHistWindSpeed');
MergedWindSpeed_Table.Properties.VariableNames = histStations(98:129);
MergedWindSpeed_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedWindSpeed_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_WindSpeed.csv')
MergedWindDir_Table = array2table(mergeHistWindDir');
MergedWindDir_Table.Properties.VariableNames = histStations(98:129);
MergedWindDir_Table.DateTime = datetime(datevec(mergedHistTime)) ;
writetable(MergedWindDir_Table, 'D:\Alexander\MATLAB\DunexBounds\Dunex_RT_WindDir.csv')
stationDetails = cell2table([histStations(98:129) num2cell(extractPtX') num2cell(extractPtY') num2cell(squeeze(extractBed.Val(1,1,:)))]);
stationDetails.Properties.VariableNames = {'StationName','UTMx','UTMy', 'BedLevel'};
writetable(stationDetails, 'D:\Alexander\MATLAB\DunexBounds\stationDetails.csv')

View File

@ -5,9 +5,9 @@
clear all clear all
cd D:\DUNEX_RT\Operation cd D:\DUNEX_RT\Operation
echo off; echo off;
addpath('NCtoolbox') % addpath('NCtoolbox')
addpath('m_map') addpath('m_map')
setup_nctoolbox % setup_nctoolbox
flowGrid = load('Cone7W_100.mat'); flowGrid = load('Cone7W_100.mat');
waveGrid = load('Cone7W_250.mat'); waveGrid = load('Cone7W_250.mat');
load('LMSL_RT.mat') load('LMSL_RT.mat')
@ -30,7 +30,7 @@ props.setProperty('mail.smtp.starttls.enable','true');
% Every 6 hours % Every 6 hours
nistTime = datetime(datevec(now())) - tzoffset(datetime(datevec(now()),'TimeZone','local')); nistTime = datetime(datevec(now())) - tzoffset(datetime(datevec(now()),'TimeZone','local'));
masterTime = dateshift(nistTime,'start','hour')-hours(2) % Assume script will run at 5 and a half hours after model start masterTime = dateshift(nistTime,'start','hour')-hours(2) % Assume script will run 2 hours after model start
clear nistTime clear nistTime
masterDate = datetime(year(masterTime),month(masterTime),day(masterTime)); masterDate = datetime(year(masterTime),month(masterTime),day(masterTime));
@ -40,14 +40,17 @@ else
FIRST_RUN = 1; FIRST_RUN = 1;
end end
% setup_nctoolbox
% pause(1)
%% Import %% Import
try try
Import=0; Import=0;
%% Clean files from previous run %% Clean files from previous run
system('taskkill /IM "d_hydro.exe" /F') % system('taskkill /IM "d_hydro.exe" /F')
system('taskkill /IM "wave.exe" /F') % system('taskkill /IM "wave.exe" /F')
delete com-* delete com-*
delete hot* delete hot*
@ -81,17 +84,26 @@ try
% datestr(masterTime-hours(6),'yyyymmdd')... % datestr(masterTime-hours(6),'yyyymmdd')...
% '/estofs.t' datestr(masterTime-hours(6),'hh') 'z.conus.east.f000.grib2']; % '/estofs.t' datestr(masterTime-hours(6),'hh') 'z.conus.east.f000.grib2'];
url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'... % url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'...
% datestr(masterTime-hours(6),'yyyymmdd')...
% '/estofs.t' datestr(masterTime-hours(6),'hh') 'z.conus.east.f000.grib2'];
% url = ['https://noaa-gestofs-pds.s3.amazonaws.com/_stofs-3D-Atlantic/stofs_3d_atl.'...
% datestr(masterTime-hours(6),'yyyymmdd')...
% '/stofs_3d_atl.t' datestr(masterTime-hours(6),'hh') 'z.conus.east.f000.grib2'];
url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/stofs/prod/stofs_2d_glo.'...
datestr(masterTime-hours(6),'yyyymmdd')... datestr(masterTime-hours(6),'yyyymmdd')...
'/estofs.t' datestr(masterTime-hours(6),'hh') 'z.conus.east.f000.grib2']; '/stofs_2d_glo.t' datestr(masterTime-hours(6),'hh') 'z.conus.east.f000.grib2'];
websave('ESTOFS.grib',url,options) websave('ESTOFS.grib',url,options)
ESTOFS = ncgeodataset('ESTOFS.grib'); % ESTOFS = ncgeodataset('ESTOFS.grib');
% Convert grib file to NetCDF
system(['D:\DUNEX_RT\Windows_64\wgrib2.exe D:\DUNEX_RT\Operation\ESTOFS.grib '...
'-netcdf D:\DUNEX_RT\Operation\ESTOFS.nc']);
latESTOFS = ncread('D:\DUNEX_RT\Operation\ESTOFS.nc','latitude');
lonESTOFS = wrapTo180(ncread('D:\DUNEX_RT\Operation\ESTOFS.nc','longitude'));
m_proj('lambert conformal conic','ori',[-95 25.0],'clo',-95,'par',[25 25],'ell','sphere');
[meshX,meshY] = meshgrid(ESTOFS{'x'}(:),ESTOFS{'y'}(:));
[lonESTOFS,latESTOFS] = m_xy2ll(meshX.*1000,meshY.*1000);
clear meshX meshY
ESTstart = 6; %7 ESTstart = 6; %7
ESTrun = 6; ESTrun = 6;
catch catch
@ -105,17 +117,26 @@ try
% url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/estofs/prod/estofs.'... % url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/estofs/prod/estofs.'...
% datestr(masterTime-days(1),'yyyymmdd')... % datestr(masterTime-days(1),'yyyymmdd')...
% '/estofs.t' datestr(masterTime-days(1),'hh') 'z.conus.east.f000.grib2']; % '/estofs.t' datestr(masterTime-days(1),'hh') 'z.conus.east.f000.grib2'];
url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'... % url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'...
% datestr(masterTime-days(1),'yyyymmdd')...
% '/estofs.t' datestr(masterTime-days(1),'hh') 'z.conus.east.f000.grib2'];
url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/stofs/prod/stofs_2d_glo.'...
datestr(masterTime-days(1),'yyyymmdd')... datestr(masterTime-days(1),'yyyymmdd')...
'/estofs.t' datestr(masterTime-days(1),'hh') 'z.conus.east.f000.grib2']; '/stofs_2d_glo.t' datestr(masterTime-days(1),'hh') 'z.conus.east.f000.grib2'];
websave('ESTOFS.grib',url,options) websave('ESTOFS.grib',url,options)
ESTOFS = ncgeodataset('ESTOFS.grib'); % ESTOFS = ncgeodataset('ESTOFS.grib');
% Convert grib file to NetCDF
system(['D:\DUNEX_RT\Windows_64\wgrib2.exe D:\DUNEX_RT\Operation\ESTOFS.grib '...
'-netcdf D:\DUNEX_RT\Operation\ESTOFS.nc']);
% m_proj('lambert conformal conic','ori',[-95 25.0],'clo',-95,'par',[25 25],'ell','sphere');
latESTOFS = ncread('D:\DUNEX_RT\Operation\ESTOFS.nc','latitude');
lonESTOFS = wrapTo180(ncread('D:\DUNEX_RT\Operation\ESTOFS.nc','longitude'));
m_proj('lambert conformal conic','ori',[-95 25.0],'clo',-95,'par',[25 25],'ell','sphere');
[meshX,meshY] = meshgrid(ESTOFS{'x'}(:),ESTOFS{'y'}(:));
[lonESTOFS,latESTOFS] = m_xy2ll(meshX.*1000,meshY.*1000);
clear meshX meshY
ESTstart = 24; %25 ESTstart = 24; %25
ESTrun = 24; ESTrun = 24;
end end
@ -170,7 +191,8 @@ try
% Find ESTOFS values from small grid % Find ESTOFS values from small grid
idxCount=1; idxCount=1;
errorET = 0; errorET = 0;
for i = ESTstart:ESTstart+96 i = ESTstart;
while i <=ESTstart+96
if errorET<10 if errorET<10
try try
% url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/estofs/prod/estofs.'... % url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/estofs/prod/estofs.'...
@ -178,23 +200,57 @@ try
% '/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.conus.east.f'... % '/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.conus.east.f'...
% num2str(i,'%03d') '.grib2']; % num2str(i,'%03d') '.grib2'];
url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'... % url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'...
% datestr(masterTime-hours(ESTrun),'yyyymmdd')...
% '/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.conus.east.f'...
% num2str(i,'%03d') '.grib2'];
url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/stofs/prod/stofs_2d_glo.'...
datestr(masterTime-hours(ESTrun),'yyyymmdd')... datestr(masterTime-hours(ESTrun),'yyyymmdd')...
'/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.conus.east.f'... '/stofs_2d_glo.t' datestr(masterTime-hours(ESTrun),'hh') 'z.conus.east.f'...
num2str(i,'%03d') '.grib2']; num2str(i,'%03d') '.grib2'];
websave(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime,'YYYY-mm-DD_HH') '_f' num2str(idxCount-1,'%03d') '.grib'],url,options) ESTOFSstring = ['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime,'YYYY-mm-DD_HH') '_f' num2str(idxCount-1,'%03d')];
%websave([ESTOFSstring '.grib'],url,options)
% Use Powershell since sometimes websave doesn't
% work??? But Powershell seems to? Something about
% content encoding? Also, the first part of the
% string is required due to some https thing with
% powershell
system(['powershell -command "[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12; Invoke-WebRequest -Uri ' ...
url ' -OutFile ' ESTOFSstring '.grib"']);
% Convert grib file to NetCDF using wgrib2
system(['D:\DUNEX_RT\Windows_64\wgrib2.exe '...
ESTOFSstring '.grib '...
'-netcdf ' ...
ESTOFSstring '.nc']);
% Remove grib file
eval(['delete ' ESTOFSstring '.grib'])
if i<=ESTstart+48 if i<=ESTstart+48
ESTOFS = ncgeodataset(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ... % ESTOFS = ncgeodataset(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime,'YYYY-mm-DD_HH') '_f' num2str(idxCount-1,'%03d') '.grib']); % datestr(masterTime,'YYYY-mm-DD_HH') '_f' num2str(idxCount-1,'%03d') '.grib']);
waterLevelsGRID(idxCount,:,:) = ESTOFS{'Ocean_Surface_Elevation_Relative_to_Geoid_surface'}(1,min(idxESTOFS(:,1)):max(idxESTOFS(:,1)),... waterLevelsGRID(idxCount,:,:) = ncread([ESTOFSstring '.nc'],...
min(idxESTOFS(:,2)):max(idxESTOFS(:,2))); % Extract small grid 'ELEV_surface',[min(idxESTOFS(:,1)),min(idxESTOFS(:,2)),1],...
[max(idxESTOFS(:,1))-min(idxESTOFS(:,1))+1,max(idxESTOFS(:,2))-min(idxESTOFS(:,2))+1,1]);
% waterLevelsGRID(idxCount,:,:) = ESTOFS{'Ocean_Surface_Elevation_Relative_to_Geoid_surface'}(1,min(idxESTOFS(:,1)):max(idxESTOFS(:,1)),...
% min(idxESTOFS(:,2)):max(idxESTOFS(:,2))); % Extract small grid
disp(i)
disp(idxCount)
end end
i = i + 1;
idxCount = idxCount + 1; idxCount = idxCount + 1;
errorET = 0;
catch catch
errorET = errorET+1; disp('ERROR ET')
errorET = errorET+1
end end
else else
error('ESTOFS ERROR') error('ESTOFS ERROR')
@ -204,8 +260,8 @@ try
idxCount=1; idxCount=1;
for i = 1:length(idxESTOFS) for i = 1:length(idxESTOFS)
waterLevels(:,idxCount) = waterLevelsGRID(:,idxESTOFS(idxCount,1)-min(idxESTOFS(:,1))+1,... waterLevels(:,idxCount) = squeeze(waterLevelsGRID(:,idxESTOFS(idxCount,1)-min(idxESTOFS(:,1))+1,...
idxESTOFS(idxCount,2)-min(idxESTOFS(:,2))+1); idxESTOFS(idxCount,2)-min(idxESTOFS(:,2))+1))';
idxCount = idxCount + 1; idxCount = idxCount + 1;
end end
@ -215,12 +271,12 @@ try
% url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/estofs/prod/estofs.'... % url = ['https://nomads.ncep.noaa.gov/pub/data/nccf/com/estofs/prod/estofs.'...
% datestr(masterTime-hours(ESTrun),'yyyymmdd')... % datestr(masterTime-hours(ESTrun),'yyyymmdd')...
% '/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.points.cwl.nc']; % '/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.points.cwl.nc'];
url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'... % url = ['https://noaa-gestofs-pds.s3.amazonaws.com/estofs.'...
datestr(masterTime-hours(ESTrun),'yyyymmdd')... % datestr(masterTime-hours(ESTrun),'yyyymmdd')...
'/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.points.cwl.nc']; % '/estofs.t' datestr(masterTime-hours(ESTrun),'hh') 'z.points.cwl.nc'];
%
websave(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ... % websave(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime,'YYYY-mm-DD_HH') '_points.nc'],url,options) % datestr(masterTime,'YYYY-mm-DD_HH') '_points.nc'],url,options)
% fprintf(fileID,[boundryNames{Boundry} ' N T 750 1322 1081 1322 0.0000000e+000\n']); % fprintf(fileID,[boundryNames{Boundry} ' N T 750 1322 1081 1322 0.0000000e+000\n']);
% fprintf(fileID,[boundryNames{Boundry} ' N T 475 1 1081 1 0.0000000e+000\n']); % fprintf(fileID,[boundryNames{Boundry} ' N T 475 1 1081 1 0.0000000e+000\n']);
@ -234,11 +290,15 @@ try
datestr(masterTime-hours(6),'yyyymmdd')... datestr(masterTime-hours(6),'yyyymmdd')...
'/gfswave.atlocn.0p16_' datestr(masterTime-hours(6),'hh') 'z']; '/gfswave.atlocn.0p16_' datestr(masterTime-hours(6),'hh') 'z'];
MWW3 = ncgeodataset(url); % MWW3 = ncgeodataset(url);
clear url % clear url
%
% lonMWW3=wrapTo180(MWW3{'lon'}(:));
% latMWW3=MWW3{'lat'}(:);
lonMWW3=wrapTo180(ncread(url,'lon'));
latMWW3=wrapTo180(ncread(url,'lat'));
lonMWW3=wrapTo180(MWW3{'lon'}(:));
latMWW3=MWW3{'lat'}(:);
% Find MWW3 index for grid points along boundry % Find MWW3 index for grid points along boundry
idxCount=1; idxCount=1;
@ -293,29 +353,45 @@ try
clear idxCount clear idxCount
% % Extract WW3 Hs for 36 hours at indexes
% wavesGRID_Hs = MWW3{'htsgwsfc'}(7:23,min(idxMWW3(:,1)):max(idxMWW3(:,1)),...
% min(idxMWW3(:,2)):max(idxMWW3(:,2))); % Extract small grid htsgwsfc
% %swell_1
% % Period
% wavesGRID_P = MWW3{'perpwsfc'}(7:23,min(idxMWW3(:,1)):max(idxMWW3(:,1)),... %
% min(idxMWW3(:,2)):max(idxMWW3(:,2))); % Extract small grid perpwsfc
% %swper_1
% % Direction
% wavesGRID_Dir = MWW3{'dirpwsfc'}(7:23,min(idxMWW3(:,1)):max(idxMWW3(:,1)),...
% min(idxMWW3(:,2)):max(idxMWW3(:,2))); % Extract small grid dirpwsfc
% %swdir_1
% Extract WW3 Hs for 36 hours at indexes % Extract WW3 Hs for 36 hours at indexes
wavesGRID_Hs = MWW3{'htsgwsfc'}(7:23,min(idxMWW3(:,1)):max(idxMWW3(:,1)),... wavesGRID_Hs = ncread(url,'htsgwsfc',[min(idxMWW3(:,2)),min(idxMWW3(:,1)),7],...
min(idxMWW3(:,2)):max(idxMWW3(:,2))); % Extract small grid htsgwsfc [max(idxMWW3(:,2))-min(idxMWW3(:,2))+1, max(idxMWW3(:,1))-min(idxMWW3(:,1))+1,23-7+1]); % Extract small grid htsgwsfc
%swell_1 %swell_1
% Period % Period
wavesGRID_P = MWW3{'perpwsfc'}(7:23,min(idxMWW3(:,1)):max(idxMWW3(:,1)),... % wavesGRID_P = ncread(url,'perpwsfc',[min(idxMWW3(:,2)),min(idxMWW3(:,1)),7],...
min(idxMWW3(:,2)):max(idxMWW3(:,2))); % Extract small grid perpwsfc [max(idxMWW3(:,2))-min(idxMWW3(:,2))+1, max(idxMWW3(:,1))-min(idxMWW3(:,1))+1,23-7+1]); % Extract small grid htsgwsfc
%swper_1 %swper_1
% Direction % Direction
wavesGRID_Dir = MWW3{'dirpwsfc'}(7:23,min(idxMWW3(:,1)):max(idxMWW3(:,1)),... wavesGRID_Dir = ncread(url,'dirpwsfc',[min(idxMWW3(:,2)),min(idxMWW3(:,1)),7],...
min(idxMWW3(:,2)):max(idxMWW3(:,2))); % Extract small grid dirpwsfc [max(idxMWW3(:,2))-min(idxMWW3(:,2))+1, max(idxMWW3(:,1))-min(idxMWW3(:,1))+1,23-7+1]);% Extract small grid htsgwsfc
%swdir_1
% Find WW3 values from small grid % Find WW3 values from small grid
idxCount=1; idxCount=1;
for i = 1:length(idxMWW3) for i = 1:length(idxMWW3)
waves(:,idxCount,1) = wavesGRID_Hs(:,idxMWW3(idxCount,1)-min(idxMWW3(:,1))+1,... waves(:,idxCount,1) = wavesGRID_Hs(idxMWW3(idxCount,2)-min(idxMWW3(:,2))+1,...
idxMWW3(idxCount,2)-min(idxMWW3(:,2))+1); %Hs idxMWW3(idxCount,1)-min(idxMWW3(:,1))+1,:); %Hs
waves(:,idxCount,2) = wavesGRID_P(:,idxMWW3(idxCount,1)-min(idxMWW3(:,1))+1,... waves(:,idxCount,2) = wavesGRID_P(idxMWW3(idxCount,2)-min(idxMWW3(:,2))+1,...
idxMWW3(idxCount,2)-min(idxMWW3(:,2))+1); %Tp idxMWW3(idxCount,1)-min(idxMWW3(:,1))+1,:); %Tp
waves(:,idxCount,3) = wavesGRID_Dir(:,idxMWW3(idxCount,1)-min(idxMWW3(:,1))+1,... waves(:,idxCount,3) = wavesGRID_Dir(idxMWW3(idxCount,2)-min(idxMWW3(:,2))+1,...
idxMWW3(idxCount,2)-min(idxMWW3(:,2))+1); %Dir idxMWW3(idxCount,1)-min(idxMWW3(:,1))+1,:); %Dir
idxCount = idxCount + 1; idxCount = idxCount + 1;
end end
@ -347,40 +423,56 @@ try
[pCheck,gridParams] = system(['perl "D:\DUNEX_RT\Windows_64\grid_defn.pl" '... [pCheck,gridParams] = system(['perl "D:\DUNEX_RT\Windows_64\grid_defn.pl" '...
'D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(ForcastHour,'%02d') '.grib2']); 'D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(ForcastHour,'%02d') '.grib2']);
[wCheck,gridParams] = system(['wgrib2 D:\DUNEX_RT\Operation\HRRR\hrrrT' ... % [wCheck,gridParams] = system(['wgrib2 D:\DUNEX_RT\Operation\HRRR\hrrrT' ...
% num2str(ForcastHour,'%02d') '.grib2 -new_grid_winds earth -new_grid ' ...
% gridParams ' D:\DUNEX_RT\Operation\HRRR\hrrrT'...
% num2str(ForcastHour,'%02d') '.grib2.regrid']);
% Convert Winds
[wCheck,~] = system(['wgrib2 D:\DUNEX_RT\Operation\HRRR\hrrrT' ...
num2str(ForcastHour,'%02d') '.grib2 -new_grid_winds earth -new_grid ' ... num2str(ForcastHour,'%02d') '.grib2 -new_grid_winds earth -new_grid ' ...
gridParams ' D:\DUNEX_RT\Operation\HRRR\hrrrT'... gridParams ' D:\DUNEX_RT\Operation\HRRR\hrrrT'...
num2str(ForcastHour,'%02d') '.grib2.regrid']); num2str(ForcastHour,'%02d') '.grib2.regrid']);
% Convert grib file to NetCDF
[nCheck,~] = system(['D:\DUNEX_RT\Windows_64\wgrib2.exe D:\DUNEX_RT\Operation\HRRR\hrrrT'...
num2str(ForcastHour,'%02d') '.grib2.regrid' ...
' -netcdf D:\DUNEX_RT\Operation\HRRR\hrrrT'...
num2str(ForcastHour,'%02d') '.regrid.nc']);
end end
% delete(gcp('nocreate')) % delete(gcp('nocreate'))
HRRR = ncgeodataset(['D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(0,'%02d') '.grib2.regrid']); % HRRR = ncgeodataset(['D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(0,'%02d') '.grib2.regrid']);
clear url clear url
m_proj('lambert conformal conic','ori',[262.5 38.5],'clo',262.5,'par',[38.5 38.5],'ell','sphere');%,'lat',[20 55],'lon',[-60 -130] % m_proj('lambert conformal conic','ori',[262.5 38.5],'clo',262.5,'par',[38.5 38.5],'ell','sphere');%,'lat',[20 55],'lon',[-60 -130]
[meshX,meshY] = meshgrid(HRRR{'x'}(:),HRRR{'y'}(:)); % [meshX,meshY] = meshgrid(HRRR{'x'}(:),HRRR{'y'}(:));
[windGridLON,windGridLAT] = m_xy2ll(meshX.*1000,meshY.*1000); % [windGridLON,windGridLAT] = m_xy2ll(meshX.*1000,meshY.*1000);
windGridLON = ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
num2str(ForcastHour,'%02d') '.regrid.nc'],'longitude');
windGridLAT = ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
num2str(ForcastHour,'%02d') '.regrid.nc'],'latitude');
windGridLON=wrapTo180(windGridLON); windGridLON=wrapTo180(windGridLON);
clear meshX meshY
% Extract MWW3 Hs for 48 hours at indexes % Extract MWW3 Hs for 48 hours at indexes
for ForcastHour = 0:48 for ForcastHour = 0:48
HRRR = ncgeodataset(['D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(ForcastHour,'%02d') '.grib2.regrid']); % HRRR = ncgeodataset(['D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(ForcastHour,'%02d') '.grib2.regrid']);
windU(ForcastHour+1,:,:) = HRRR{'u-component_of_wind_height_above_ground'}(:,:,:); %U Winds @ 10m [m/s] windU(ForcastHour+1,:,:) = ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
windV(ForcastHour+1,:,:) = HRRR{'v-component_of_wind_height_above_ground'}(:,:,:); %V Winds @ 10m [m/s] num2str(ForcastHour,'%02d') '.regrid.nc'],'UGRD_10maboveground');
windV(ForcastHour+1,:,:) = ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
num2str(ForcastHour,'%02d') '.regrid.nc'],'VGRD_10maboveground'); %V Winds @ 10m [m/s]
HRRR = ncgeodataset(['D:\DUNEX_RT\Operation\HRRR\hrrrT' num2str(ForcastHour,'%02d') '.grib2']); pressure(ForcastHour+1,:,:) = ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
pressure(ForcastHour+1,:,:) = HRRR{'Pressure_surface'}(:,:,:); %Surface Pressure [pa] num2str(ForcastHour,'%02d') '.regrid.nc'],'PRES_surface'); %Surface Pressure [pa]
if ForcastHour==0 rain(ForcastHour+1,:,:) =ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
rain(ForcastHour+1,:,:) = HRRR{'Total_precipitation_surface_0_Hour_Accumulation'}(1,:,:); %Surface precipitation [kg/m^2] num2str(ForcastHour,'%02d') '.regrid.nc'],'APCP_surface');%Surface precipitation [kg/m^2]
elseif ForcastHour==1 temperature(ForcastHour+1) = squeeze(nanmean(nanmean(ncread(['D:\DUNEX_RT\Operation\HRRR\hrrrT'...
rain(ForcastHour+1,:,:) = HRRR{'Total_precipitation_surface_1_Hour_Accumulation'}(1,:,:); %Surface precipitation [kg/m^2] num2str(ForcastHour,'%02d') '.regrid.nc'],'TMP_surface')- 273.15, 1), 2)); %Surface Temperature [C]
else
rain(ForcastHour+1,:,:) = HRRR{'Total_precipitation_surface_Mixed_intervals_Accumulation'}(2,:,:); %Surface precipitation [kg/m^2]
end
temperature(ForcastHour+1) = squeeze(nanmean(nanmean(HRRR{'Temperature_surface'}(:,:,:)- 273.15,2),3)); %Surface Temperature [C]
end end
% Extract Temp % Extract Temp
@ -698,6 +790,8 @@ try
vDatNavDInter(vDatOrder) = naninterp(interp1(... vDatNavDInter(vDatOrder) = naninterp(interp1(...
linspace(1,length(vDatOrder),length(vDatOrderOld)),... linspace(1,length(vDatOrder),length(vDatOrderOld)),...
vDatNavD(vDatOrderOld),1:length(vDatOrder))); vDatNavD(vDatOrderOld),1:length(vDatOrder)));
disp(size(waterLevels))
disp(size(masterTime:hours(1):masterTime+hours(48)))
for i = 1:length(idxNCOM) for i = 1:length(idxNCOM)
wlO(:,i) = interp1(masterTime:hours(1):masterTime+hours(48),waterLevels(:,i)+0,masterTime:minutes(10):masterTime+hours(48),'pchip'); wlO(:,i) = interp1(masterTime:hours(1):masterTime+hours(48),waterLevels(:,i)+0,masterTime:minutes(10):masterTime+hours(48),'pchip');
end end
@ -1740,14 +1834,21 @@ try
for e = [1 4] for e = [1 4]
try try
clear meshX meshY % clear meshX meshY
m_proj('lambert conformal conic','ori',[-95 25.0],'clo',-95,'par',[25 25],'ell','sphere'); % m_proj('lambert conformal conic','ori',[-95 25.0],'clo',-95,'par',[25 25],'ell','sphere');
ESTOFS = ncgeodataset(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ... % ESTOFS = ncgeodataset(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(0,'%03d') '.grib']); % datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(0,'%03d') '.grib']);
[meshX,meshY] = meshgrid(ESTOFS{'x'}(:),ESTOFS{'y'}(:)); % [meshX,meshY] = meshgrid(ESTOFS{'x'}(:),ESTOFS{'y'}(:));
[lonESTOFS,latESTOFS] = m_xy2ll(meshX.*1000,meshY.*1000); % [lonESTOFS,latESTOFS] = m_xy2ll(meshX.*1000,meshY.*1000);
clear meshX meshY
latESTOFS = ncread(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(0,'%03d') '.nc'],'latitude');
lonESTOFS = wrapTo180(ncread(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(0,'%03d') '.nc'],'longitude'));
% clear meshX meshY
ESTOFS_Val{e} = 1; ESTOFS_Val{e} = 1;
catch catch
ESTOFS_Val{e} = 0; ESTOFS_Val{e} = 0;
@ -1764,11 +1865,17 @@ try
[estofIDX] = NearestValue(wlDeg,lonESTOFS,latESTOFS); %Lat [estofIDX] = NearestValue(wlDeg,lonESTOFS,latESTOFS); %Lat
for i = 1:ESTOFSrunLength(e)+1 for i = 1:ESTOFSrunLength(e)+1
ESTOFS = ncgeodataset(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ... % ESTOFS = ncgeodataset(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(i-1,'%03d') '.grib']); % datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(i-1,'%03d') '.grib']);
% waterLevelsESTOFS{e}(i,Stat) = squeeze(ESTOFS{'Ocean_Surface_Elevation_Relative_to_Geoid_surface'}(1,estofIDX(1),estofIDX(2)));
%
waterLevelsESTOFS{e}(i,Stat) = squeeze(ncread(['D:\DUNEX_RT\Archive\ESTOFS\ESTOFS_' ...
datestr(masterTime-hours(ESTOFStimeOffset(e)),'YYYY-mm-DD_HH') '_f' num2str(i-1,'%03d') '.nc'],...
'ELEV_surface',[estofIDX(1),estofIDX(2),1], [1, 1, 1]));
waterLevelsESTOFS{e}(i,Stat) = squeeze(ESTOFS{'Ocean_Surface_Elevation_Relative_to_Geoid_surface'}(1,estofIDX(1),estofIDX(2)));
end end
waterLevelsESTOFSINTER{e}(:,Stat) = interp1(masterTime-hours(ESTOFStimeOffset(e)):minutes(60):masterTime+hours(48),... waterLevelsESTOFSINTER{e}(:,Stat) = interp1(masterTime-hours(ESTOFStimeOffset(e)):minutes(60):masterTime+hours(48),...
waterLevelsESTOFS{e}(:,Stat),masterTime-hours(ESTOFStimeOffset(e)):minutes(15):masterTime+hours(48),'spline'); waterLevelsESTOFS{e}(:,Stat),masterTime-hours(ESTOFStimeOffset(e)):minutes(15):masterTime+hours(48),'spline');
end end
@ -1778,6 +1885,10 @@ try
end end
end end
% Remove ESTOFS Netcdf files older than a week
system('ForFiles /p "D:\DUNEX_RT\Archive\ESTOFS" /s /d -7 /c "cmd /c del /q @file"')
%% Validation import WL %% Validation import WL
% Duck % Duck
@ -1849,18 +1960,35 @@ try
% [hohonuBase num2str(hohonuStation(Stat)) '/statistic/?cleaned=false&format=json&from='... % [hohonuBase num2str(hohonuStation(Stat)) '/statistic/?cleaned=false&format=json&from='...
% datestr(dateStartM,'yyyy-mm-dd') '+00%3A00&to=' datestr(dateEndM,'yyyy-mm-dd')... % datestr(dateStartM,'yyyy-mm-dd') '+00%3A00&to=' datestr(dateEndM,'yyyy-mm-dd')...
% '+23%3A59']); % '+23%3A59']);
websave(fname,... % Try catch for 429 errpr
[hohonuBase num2str(hohonuStation(Stat)) '/statistic/?datum=NAVD&from='... try
datestr(dateStartM,'yyyy-mm-dd') '%' datestr(dateStartM,'HHMM:SS') '&to='... websave(fname,...
datestr(dateEndM,'yyyy-mm-dd') '%' datestr(dateEndM,'HHMM:SS')... [hohonuBase num2str(hohonuStation(Stat)) '/statistic/?datum=NAVD&from='...
'&cleaned=false&format=json&station_type=Hohonu&tz=000000'], options); datestr(dateStartM,'yyyy-mm-dd') '%20' datestr(dateStartM,'HH:MM') '&to='...
datestr(dateEndM,'yyyy-mm-dd') '%20' datestr(dateEndM,'HH:MM')...
'&cleaned=False&format=json&station_type=Hohonu&tz=000000'], options);
catch
statWL(:,Stat) = 0;
fid = fopen('errorFile.log','a+');
% write the error to file
% first line: message
fprintf(fid,'Hohonu Error\n');
% close file
fclose(fid)
continue
end
fid = fopen(fname); fid = fopen(fname);
raw = fread(fid,inf); raw = fread(fid,inf);
str = char(raw'); str = char(raw');
fclose(fid); fclose(fid);
hohonuIN = jsondecode(str); hohonuLOAD = jsondecode(str);
% save data where not nan
hohonuIN.data{1} = hohonuLOAD.data{1}(~isnan(hohonuLOAD.data{2}));
hohonuIN.data{2} = hohonuLOAD.data{2}(~isnan(hohonuLOAD.data{2}));
% Filter out data more than 1 standard deviation % Filter out data more than 1 standard deviation
% hmean = mean((hohonuDatums(Stat) - hohonuIN.data{2}) * 0.001); % hmean = mean((hohonuDatums(Stat) - hohonuIN.data{2}) * 0.001);
@ -1874,9 +2002,12 @@ try
hohonuFilt1 = abs((hohonuIN.data{2} * 0.3048))>(abs(hmean)+hstd*1.5); hohonuFilt1 = abs((hohonuIN.data{2} * 0.3048))>(abs(hmean)+hstd*1.5);
% Sharp jump filter- if a jump is greater than 10x the average % Sharp jump filter- if a jump is greater than 10x the average
% jump % jump or greater than 0.1 m Plus before and after points
hohonuFilt2 = [0;abs(diff(hohonuIN.data{2} * 0.3048))>... hohonuFilt2A = [0;abs(diff(hohonuIN.data{2} * 0.3048))>...
(mean(diff(hohonuIN.data{2} * 0.3048),'omitnan')*10)]; min(abs(mean(diff(hohonuIN.data{2} * 0.3048),'omitnan')*10),0.1)];
hohonuFilt2B = [0; hohonuFilt2A(1:end-1)];
hohonuFilt2C = [hohonuFilt2A(2:end); 0];
hohonuFilt2 = hohonuFilt2A | hohonuFilt2B | hohonuFilt2C;
dateStartMtz = datetime(dateStartM,'TimeZone','UTC'); dateStartMtz = datetime(dateStartM,'TimeZone','UTC');
dateEndMtz = datetime(dateEndM,'TimeZone','UTC'); dateEndMtz = datetime(dateEndM,'TimeZone','UTC');
@ -1884,10 +2015,18 @@ try
% statWL(:,Stat) = interp1(... % statWL(:,Stat) = interp1(...
% datetime(hohonuIN.data{1}(~hohonuFilt),'InputFormat','yyyy-MM-dd''T''HH:mm:SSZ','TimeZone','UTC'),... % datetime(hohonuIN.data{1}(~hohonuFilt),'InputFormat','yyyy-MM-dd''T''HH:mm:SSZ','TimeZone','UTC'),...
% (hohonuDatums(Stat) - hohonuIN.data{2}(~hohonuFilt)) * 0.001,dateStartMtz:minutes(15):dateEndMtz); % (hohonuDatums(Stat) - hohonuIN.data{2}(~hohonuFilt)) * 0.001,dateStartMtz:minutes(15):dateEndMtz);
statWL(:,Stat) = interp1(...
datetime(hohonuIN.data{1}(~hohonuFilt1 & hohonuFilt2),'InputFormat','yyyy-MM-dd''T''HH:mm:SSZ','TimeZone','UTC'),...
hohonuIN.data{2}(~hohonuFilt1 & hohonuFilt2) * 0.3048,dateStartMtz:minutes(15):dateEndMtz);
% Skip if no data or if more than 33% of the points are
% filtered
if isempty(hohonuIN.data{2}) ||...
isempty(hohonuIN.data{2}(~hohonuFilt1 & ~hohonuFilt2) * 0.3048) ||...
sum(~hohonuFilt1 & ~hohonuFilt2) < length(hohonuFilt2)/3
statWL(:,Stat) = 0;
else
statWL(:,Stat) = interp1(...
datetime(hohonuIN.data{1}(~hohonuFilt1 & ~hohonuFilt2),'InputFormat','yyyy-MM-dd''T''HH:mm:SSZ','TimeZone','UTC'),...
hohonuIN.data{2}(~hohonuFilt1 & ~hohonuFilt2) * 0.3048,dateStartMtz:minutes(15):dateEndMtz);
end
catch err catch err
%open file %open file
fid = fopen('errorFile.log','a+'); fid = fopen('errorFile.log','a+');
@ -2260,7 +2399,7 @@ try
modelwlPlot(1:213,locLine(Loc)+4) = num2cell(statWL(:,Loc)); modelwlPlot(1:213,locLine(Loc)+4) = num2cell(statWL(:,Loc));
end end
elseif Loc>=17 % Hohonu starts at 17 elseif Loc>=17 % Hohonu starts at 17
if isnan(statWL(1,Loc-12)) %-12 to get to Hohonu IDX if all(isnan(statWL(:,Loc-12))) %-12 to get to Hohonu IDX
modelwlPlot(1:385,locLine(Loc)+4) = num2cell(zeros(385,1)); modelwlPlot(1:385,locLine(Loc)+4) = num2cell(zeros(385,1));
else else
modelwlPlot(1:213,locLine(Loc)+4) = num2cell(statWL(:,Loc-12)); modelwlPlot(1:213,locLine(Loc)+4) = num2cell(statWL(:,Loc-12));
@ -2424,7 +2563,7 @@ catch err
end end
%% %%
quit % quit

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

208
Rudy2014_Compare.m Normal file
View File

@ -0,0 +1,208 @@
%% Compare 2014 Delft3D run with Rudy data
% AJMR, July 20, 2022
addpath(genpath('matlab/applications/delft3d_matlab'),'-begin')
%% Read in observation points
clear obsPtsLon obsPtsLat
obsFiles = dir("\\COASTLINES.appsci.queensu.ca\Rudy2014\Google earth objects");
obsCount = 1;
% Read in in order 1-13, L2, PE to match spreadsheets
for i = [7 13:20 8:10 12 6 4]
[obsPtsLon(obsCount), obsPtsLat(obsCount)] = read_kml(['\\COASTLINES.appsci.queensu.ca\Rudy2014\Google earth objects\' obsFiles(i).name]);
[obsPtsUTMx(obsCount), obsPtsUTMy(obsCount)] = wgs2utm(obsPtsLat(obsCount),obsPtsLon(obsCount),18,'N');
obsCount = obsCount + 1;
end
%% Read in data at observation points
clear obsDat
datFiles = dir ("D:\Alexander\Rudy2014\Point Sorted Data");
% Read in in order 1-13, L2, PE to match spreadsheets
obsCount = 1;
for i = 4:length(datFiles)
% Read data as table
obsDat{2, obsCount} = readtable(['\\COASTLINES.appsci.queensu.ca\Rudy2014\Point Sorted Data\' ...
datFiles(i).name],'DataRange','X3:AQ17','VariableNamesRange',...
'X1:AQ1', 'VariableUnitsRange', 'X2:AQ2');
% Drop empty columns
obsDat{2, obsCount} = removevars(obsDat{2, obsCount},{ ...
'Var3', 'Var7', 'Var9', 'Var11', 'Var13', 'Var15', 'Var17', 'Var19'});
% Add Date
dateIN = readcell(['D:\Alexander\Rudy2014\Point Sorted Data\' ...
datFiles(i).name], 'Range','C3:D3');
try
obsDat{1, obsCount} = datetime(dateIN{1},'InputFormat', 'MM/dd/uuuu') + ...
days(dateIN{2});
catch
obsDat{1, obsCount} = datetime(dateIN{1},'InputFormat', 'MM/dd/uuuu') + ...
hours(hour(dateIN{2})) + minutes(minute(dateIN{2}));
end
obsCount = obsCount + 1;
end
%% Read in Model results at observation points
%% Read in grids
Grid{1} = wlgrid('read','\\batchelor.appsci.queensu.ca\G\Fateme\Oct_7_RudyRun\G9_1.grd');
Grid{2} = wlgrid('read','\\batchelor.appsci.queensu.ca\G\Fateme\Oct_7_RudyRun\G9_2.grd');
Grid{3} = wlgrid('read','\\batchelor.appsci.queensu.ca\G\Fateme\Oct_7_RudyRun\G9_3.grd');
modelTimes = datetime(datevec(qpread(qpfopen('\\batchelor.appsci.queensu.ca\G\Fateme\Oct_7_RudyRun\trim-G9_1.dat'),...
1,'water level','times')));
%% Match grid points
for i = 1:length(obsPtsUTMx)
[GridMatch{1},GridDist{1}] = NearestValue([obsPtsUTMx(i), obsPtsUTMy(i)], Grid{1}.X, Grid{1}.Y);
[GridMatch{2},GridDist{2}] = NearestValue([obsPtsUTMx(i), obsPtsUTMy(i)], Grid{2}.X, Grid{2}.Y);
[GridMatch{3},GridDist{3}] = NearestValue([obsPtsUTMx(i), obsPtsUTMy(i)], Grid{3}.X, Grid{3}.Y);
if sqrt(GridDist{1}(1)^2 + GridDist{1}(2)^2) < sqrt(GridDist{2}(1)^2 + GridDist{2}(2)^2) && ...
sqrt(GridDist{1}(1)^2 + GridDist{1}(2)^2) < sqrt(GridDist{3}(1)^2 + GridDist{3}(2)^2)
gridMatch(:, i) = GridMatch{1};
gridMatchID(i) = 1;
elseif sqrt(GridDist{2}(1)^2 + GridDist{2}(2)^2) <= sqrt(GridDist{1}(1)^2 + GridDist{1}(2)^2) && ...
sqrt(GridDist{2}(1)^2 + GridDist{2}(2)^2) <= sqrt(GridDist{3}(1)^2 + GridDist{3}(2)^2)
gridMatch(:, i) = GridMatch{2};
gridMatchID(i) = 2;
elseif sqrt(GridDist{3}(1)^2 + GridDist{3}(2)^2) <= sqrt(GridDist{1}(1)^2 + GridDist{1}(2)^2) && ...
sqrt(GridDist{3}(1)^2 + GridDist{3}(2)^2) <= sqrt(GridDist{2}(1)^2 + GridDist{2}(2)^2)
gridMatch(:, i) = GridMatch{3};
gridMatchID(i) = 3;
end
end
%% Extract time series surface temperatures for each point
clear modelTemp
for d = 1:size(obsDat,2)
modelTStep(d) = NearestValue(obsDat{1, d}, modelTimes);
end
for i = 1:length(gridMatch)
modelIN = qpread(qpfopen(['\\batchelor.appsci.queensu.ca\G\Fateme\Oct_7_RudyRun\trim-G9_' ...
num2str(gridMatchID(i)) '.dat']),1,'temperature','griddata', ...
0,gridMatch(1, i),gridMatch(2, i),0);
modelTemp(:,:,i) = modelIN.Val;
disp(i)
end
%% Read in other spreadsheet
boundDat = readtable("\\COASTLINES.appsci.queensu.ca\Rudy2014\Boundary condition data.xlsx", 'Sheet', ...
'Boundary conditions');
%% Read in WaterTrax
traxDat = readtable("\\COASTLINES.appsci.queensu.ca\Rudy2014\Copy of Alexander SheetModelling.xlsx", 'Sheet', ...
'2014_Rudy');
%% Plotting
figure
hold on
plot(modelTimes,mean(modelTemp(:,:,13),2),'k')
plot(modelTimes,modelTemp(:,1,13),'m')
plot(modelTimes,modelTemp(:,8,13),'b')
% plot(boundDat.Date,boundDat.Temp_1)
% scatter(traxDat.Date+hours(14),traxDat.WaterTEffluent__C_,'r')
scatter(traxDat.Date(1:end-7)+hours(14),traxDat.WaterTEffluent__C_(1:end-7),'r')
for d = [1:15 17:size(obsDat,2)]
scatter(obsDat{1, d}, obsDat{2, d}{13,"Temp"},'b')
end
legend('Model', 'Observations', 'Rudy Observations')
ylabel('Water Temperature °C')
%% Save Model Data
writematrix(squeeze(mean(modelTemp(:,:,1:13),2)),...
'D:\Alexander\Rudy2014\modelData.xlsx')
writematrix(modelTimes,...
'D:\Alexander\Rudy2014\modelTimes.xlsx')
%% Stats
statCount = 1;
for d = 1:length(traxDat.Date)
traxIDXs(statCount) = NearestValue(traxDat.Date(d)+hours(14),modelTimes);
statCount = statCount + 1;
end
% [traxR2, traxRMSE] = rsquare(traxDat.WaterTEffluent__C_, mean(modelTemp(traxIDXs,:,13),2), false)
% [traxR2, traxRMSE] = rsquare(traxDat.WaterTEffluent__C_(1:end-7), modelTemp(traxIDXs(1:end-7),8,13), false)
[traxR2, traxRMSE] = rsquare(mean(modelTemp(traxIDXs(1:end-7),:,13),2),...
traxDat.WaterTEffluent__C_(1:end-7))
writematrix([mean(modelTemp(traxIDXs(1:end),:,13),2) traxDat.WaterTEffluent__C_(1:end)],...
'D:\Alexander\Rudy2014\matchedWaterTrax.xlsx')
statCount = 1;
for d = [1:15 17:size(obsDat,2)]
rudIDXs(statCount) = NearestValue(obsDat{1, d},modelTimes);
rudDat(statCount) = obsDat{2, d}{13,"Temp"};
statCount = statCount + 1;
end
% [traxR2, traxRMSE] = rsquare(mean(modelTemp(traxIDXs,:,13),2), traxDat.WaterTEffluent__C_)
[rudR2, rudRMSE] = rsquare(rudDat', squeeze(modelTemp(rudIDXs,4,13)))
% [traxR2, traxRMSE] = rsquare(mean(modelTemp(traxIDXs(1:end-7),:,13),2),...
% traxDat.WaterTEffluent__C_(1:end-7))

49
dio-wave-errors.txt Normal file
View File

@ -0,0 +1,49 @@
DioError 902: Could not open FLOW2WAVE_DATA.txt_.stream for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_.stream
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 902: Could not open FLOW2WAVE_DATA.txt_.stream for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_.stream
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 902: Could not open FLOW2WAVE_DATA.txt_.stream for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_.stream
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 902: Could not open FLOW2WAVE_DATA.txt_.stream for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_.stream
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 203: Could not open FLOW2WAVE_DATA for reading
DioError 902: Could not open FLOW2WAVE_DATA.txt_.stream for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_.stream
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 902: Could not open FLOW2WAVE_DATA.txt_.stream for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 902: Could not open FLOW2WAVE_DATA.txt_FLOW2WAVE_DATA.data for deletion
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_.stream
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 901: Could not set available: WAVE2FLOW_DATA.txt_WAVE2FLOW_DATA.data
DioError 203: Could not open FLOW2WAVE_DATA for reading

Binary file not shown.

63
py_addpath.m Normal file
View File

@ -0,0 +1,63 @@
%Add directory to import search path for the instance of
%the Python interpreter currently controlled by MATLAB
%
%EXAMPLE USAGE
% >> py_addpath('C:\Documents\ERPResults')
%
%REQUIRED INPUTS
% directory - Directory to add the Python import search path
% MATLAB_too - If true (or 1), directory will also be added to the
% MATLAB path. {default: false}
%
%OPTIONAL OUTPUT
% new_py_path - a cell array of the directories on the updated
% Python path; to get this output without updating the
% Python path, use an empty string as the input:
% py_path = py_addpath('')
%
%VERSION DATE: 3 Novemeber 2017
%AUTHOR: Eric Fields
%
%NOTE: This function is provided "as is" and any express or implied warranties
%are disclaimed.
%Copyright (c) 2017, Eric Fields
%All rights reserved.
%This code is free and open source software made available under the 3-clause BSD license.
function new_py_path = py_addpath(directory, MATLAB_too)
%check input
if ~ischar(directory)
error('Input must be a string')
elseif ~exist(directory, 'dir') && ~isempty(directory)
error('%s is not a valid directory', directory)
end
%Convert relative path to absolute path
if ~isempty(directory)
directory = char(py.os.path.abspath(directory));
end
%add directory to Python path if not already present
if ~any(strcmp(get_py_path(), directory))
py_path = py.sys.path;
py_path.insert(int64(1), directory);
end
%add directory to MATLAB path if requested
if nargin>1 && MATLAB_too
addpath(directory);
end
%optionally return ammended path.sys as cell array
if nargout
new_py_path = get_py_path();
end
end
function current_py_path = get_py_path()
%Function to return the current python search path as a cell array of strings
current_py_path = cellfun(@char, cell(py.sys.path), 'UniformOutput', 0)';
end

186
read_kml.m Normal file
View File

@ -0,0 +1,186 @@
function [x,y,z] = read_kml(fileName)
% READ_KML Reads in (x,y,z) from a GoogleEarth kml file.
%
% I have tried to make this code as robust as possible, but it may crash
% or give unexpected resutls if the file is not formatted exactly as
% expected.
%
% Example:
% [x,y,z] = read_kml('test.kml');
%
% where test.kml looks like:
% <?xml version="1.0" encoding="UTF-8"?>
% <kml xmlns="http://earth.google.com/kml/2.1">
% <Placemark>
% <name>test_length</name>
% <description>junk</description>
% <LineString>
% <tessellate>1</tessellate>
% <coordinates>
% -73.65138440596144,40.45517368645169,0 -73.39056199144957,40.52146569128411,0 -73.05890757388369,40.59561213913959,0 -72.80519929505505,40.66961872411046,0 -72.61180114704385,40.72997510603909,0 -72.43718187249095,40.77509309196679,0 </coordinates>
% </LineString>
% </Placemark>
% </kml>
%
% afarris@usgs.gov 2016March09, now can read mulitple sets of coordinates
% afarris@usgs.gov 2006November
%% open the data file and find the beginning of the data
fid=fopen(fileName);
if fid < 0
error('could not find file')
end
% This loop reads the data file one line at a time. If if finds the word
% <coordinates>, it knows there is data until it reads the word
% </coordinates>. After loading this data, it keeps reading the file,
% looking for another instance of <coordinates> until it finds the word
% </kml> which signals that the end of the file has been reached.
% Some files have all the data on one line, others have newline charecters
% in various points in the file. I hope this code that works in all cases.
done=0;
endoffile = 0;
ar = 1;
while endoffile == 0
while done == 0
junk = fgetl(fid);
f = strfind(junk,'<coordinates>');
ff = strfind(junk,'</kml>');
if ~isempty(f)
done = 1;
elseif ~isempty(ff)
endoffile = 1;
done = 1;
end
end
if endoffile
break
end
% 'junk' either ends with the word '<coordinates>' OR
% some data follows the word '<coordinates>'
if (f + 13) >= length(junk)
% no data on this line
% done2 is set to zero so the next loop will read the data
done2 = 0;
else
% there is some data in this line following '<coordinates>'
clear f2
f2 = strfind(junk,'</coordinates>');
if ~isempty(f2)
%all data is on this line
% there may be multiple sets of data on this one line
% I read them all
for i = 1 : size(f2,2)
alldata{ar} = junk(f(i)+13:f2(i)-1);
% I add in whitespace b/c sometimes it is missing
alldata{ar+1} = ' ';
ar = ar+2;
end
% done2 is set to one because the next loop does not need to run
done2 = 1;
else
% only some data is on this line
alldata{ar} = junk(f+13:end);
% I add in whitespace b/c sometimes it is missing
alldata{ar+1} = ' ';
ar = ar+2;
% done2 is set to zero so the next loop will read the rest of the data
done2 = 0;
end
% check to see if at end of the file
ff = strfind(junk,'</kml>');
if ~isempty(ff)
% no more data
endoffile = 1;
break
else
% need to keep looking for more data
done = 0;
end
end
% If not all the data was on the line with the word <coordiate>,
% read in the data
while done2 == 0
% read in line from data file
junk = fgetl(fid);
f = strfind(junk,'</coordinates>');
if isempty(f) == 1
% no ending signal, just add this data to the rest
alldata{ar} = junk;
ar = ar + 1;
else
% ending signal is present
done = 0;
if f < 20
% </coordinates> is in the begining of the line, ergo no data
% on this line; just end the loop
done2 = 1;
else
% the ending signal (</coordinates>) is present: remove it,
% add data to the rest and signal the end of the loop
f2 = strfind(junk,'</coordinates>');
alldata{ar} = junk(1:f2-1);
ar = ar + 1;
done2 = 1;
disp('done with line')
end
end
% check to see if at end of the file
ff = strfind(junk,'</kml>');
if ~isempty(ff)
% no more data
endoffile = 1;
break
else
% need to keep looking for more data
done = 0;
end
end
end
fclose(fid);
%% get the data into neat vectors
% I have to divide the string into X, Y and Z values.
%
% This is hard b/c there is no comma between points
% (just commans between x and y, and between
% y and z) ie; -70.0000,42.0000,0 -70.1000,40.10000,0 -70.2,....
%
% I used to do this by finding commas and spaces, now I use
% 'strsplit'! Thank you Matlab!
% 'alldata' is one huge cell
% turn alldata into regular vector so it is easier to work with
data = cell2mat(alldata);
% data is one huge string, split it so there is seperate element for each number
C = strsplit(data,{',',' '});
% sometimes first and/or last element in C is empty, this causes problems
len = size(C,2);
if isempty(C{1}) && isempty(C{end})
D = C(2:len-1);
elseif isempty(C{1}) && ~isempty(C{end})
D = C(2:end);
elseif isempty(C{end}) && ~isempty(C{1})
D = C(1:len-1);
end
% There has GOT to be a better way to split C into 3 variables!
a = 1;
for i = 1 : 3: length(D)-2
x(a,1) = str2double(D{i});
a=a+1;
end
a=1;
for i = 2 : 3: length(D)-1
y(a,1) = str2double(D{i});
a=a+1;
end
a=1;
for i = 3 : 3: length(D)
z(a,1) = str2double(D{i});
a=a+1;
end

74
rsquare.m Normal file
View File

@ -0,0 +1,74 @@
function [r2 rmse] = rsquare(y,f,varargin)
% Compute coefficient of determination of data fit model and RMSE
%
% [r2 rmse] = rsquare(y,f)
% [r2 rmse] = rsquare(y,f,c)
%
% RSQUARE computes the coefficient of determination (R-square) value from
% actual data Y and model data F. The code uses a general version of
% R-square, based on comparing the variability of the estimation errors
% with the variability of the original values. RSQUARE also outputs the
% root mean squared error (RMSE) for the user's convenience.
%
% Note: RSQUARE ignores comparisons involving NaN values.
%
% INPUTS
% Y : Actual data
% F : Model fit
%
% OPTION
% C : Constant term in model
% R-square may be a questionable measure of fit when no
% constant term is included in the model.
% [DEFAULT] TRUE : Use traditional R-square computation
% FALSE : Uses alternate R-square computation for model
% without constant term [R2 = 1 - NORM(Y-F)/NORM(Y)]
%
% OUTPUT
% R2 : Coefficient of determination
% RMSE : Root mean squared error
%
% EXAMPLE
% x = 0:0.1:10;
% y = 2.*x + 1 + randn(size(x));
% p = polyfit(x,y,1);
% f = polyval(p,x);
% [r2 rmse] = rsquare(y,f);
% figure; plot(x,y,'b-');
% hold on; plot(x,f,'r-');
% title(strcat(['R2 = ' num2str(r2) '; RMSE = ' num2str(rmse)]))
%
% Jered R Wells
% 11/17/11
% jered [dot] wells [at] duke [dot] edu
%
% v1.2 (02/14/2012)
%
% Thanks to John D'Errico for useful comments and insight which has helped
% to improve this code. His code POLYFITN was consulted in the inclusion of
% the C-option (REF. File ID: #34765).
if isempty(varargin); c = true;
elseif length(varargin)>1; error 'Too many input arguments';
elseif ~islogical(varargin{1}); error 'C must be logical (TRUE||FALSE)'
else c = varargin{1};
end
% Compare inputs
if ~all(size(y)==size(f)); error 'Y and F must be the same size'; end
% Check for NaN
tmp = ~or(isnan(y),isnan(f));
y = y(tmp);
f = f(tmp);
if c; r2 = max(0,1 - sum((y(:)-f(:)).^2)/sum((y(:)-mean(y(:))).^2));
else r2 = 1 - sum((y(:)-f(:)).^2)/sum((y(:)).^2);
if r2<0
% http://web.maths.unsw.edu.au/~adelle/Garvan/Assays/GoodnessOfFit.html
warning('Consider adding a constant term to your model') %#ok<WNTAG>
r2 = 0;
end
end
rmse = sqrt(mean((y(:) - f(:)).^2));

View File

@ -106,4 +106,5 @@ Fwndgpr = #hrrr.ampr#
AirOut = #Y# AirOut = #Y#
Fileva = #TemperatureRT.eva# Fileva = #TemperatureRT.eva#
Maseva = #Y# Maseva = #Y#
PavBnd = 101300.0
Commnt = Commnt =

View File

@ -13,10 +13,10 @@ Commnt = no. dry points: 1068
Fildry = #NCShoreALLSquare2_100.dry# Fildry = #NCShoreALLSquare2_100.dry#
Commnt = no. thin dams: 0 Commnt = no. thin dams: 0
Commnt = Commnt =
Itdate = #2022-05-31# Itdate = #2023-07-19#
Tunit = #M# Tunit = #M#
Tstart = 0.0000000e+00 Tstart = 7.2000000e+02
Tstop = 2.8800000e+03 Tstop = 3.6000000e+03
Dt = 0.25 Dt = 0.25
Tzone = 0 Tzone = 0
Commnt = Commnt =
@ -93,9 +93,9 @@ PHhydr = #YYYYYY#
PHderv = #YYY# PHderv = #YYY#
PHproc = #YYYYYYYYYY# PHproc = #YYYYYYYYYY#
PHflux = #YYYY# PHflux = #YYYY#
Flmap = 0.0000000e+00 60 2.8800000e+03 Flmap = 7.2000000e+02 60 3.6000000e+03
Flhis = 0.0000000e+00 10 2.8800000e+03 Flhis = 7.2000000e+02 10 3.6000000e+03
Flpp = 0.0000000e+00 60 2.8800000e+03 Flpp = 7.2000000e+02 60 3.6000000e+03
Flrst = 60 Flrst = 60
Commnt = Commnt =
Online = #N# Online = #N#
@ -107,4 +107,4 @@ Fwndgpr = #hrrr.ampr#
AirOut = #Y# AirOut = #Y#
Fileva = #TemperatureRT.eva# Fileva = #TemperatureRT.eva#
Maseva = #Y# Maseva = #Y#
Commnt = PavBnd = 101300.0

View File

@ -10,7 +10,7 @@ addpath(genpath(fullfile(home, 'cdm', 'utilities')));
warning off warning off
try try
setup_nctoolbox_java; % setup_nctoolbox_java;
catch me catch me
ex = MException('MBARI:NCTOOLBOX', 'Failed to setup the Java classpath'); ex = MException('MBARI:NCTOOLBOX', 'Failed to setup the Java classpath');
ex.throw ex.throw