Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Iss65: Tidal Plots for Matlab #100

Open
wants to merge 10 commits into
base: develop
Choose a base branch
from
62 changes: 35 additions & 27 deletions examples/river_example.html

Large diffs are not rendered by default.

Binary file modified examples/river_example.mlx
Binary file not shown.
49 changes: 27 additions & 22 deletions examples/tidal_example.html

Large diffs are not rendered by default.

Binary file modified examples/tidal_example.mlx
Binary file not shown.
80 changes: 14 additions & 66 deletions mhkit/river/resource/exceedance_probability.m
Original file line number Diff line number Diff line change
Expand Up @@ -5,81 +5,29 @@
%
% Parameters
% ----------
% Q : Discharge data [m3/s]
%
% Pandas dataframe indexed by time [datetime or s]
%
% To make a pandas data frame from user supplied frequency and spectra
% use py.mhkit_python_utils.pandas_dataframe.timeseries_to_pandas(timeseries,time,x)
%
% OR
%
% structure of form:
%
% Q.Discharge
%
% Q.time
% Q : Array
% Discharge data [m3/s]
%
% Returns
% -------
% F : Structure
%
%
% F.F: Exceedance probability [unitless]
%
% F.time: time [epoch time (s)]
%
% F : Array
% Exceedance probability [unitless]
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

% First we make a sorted array of discharge values
sort_q = sort(Q,'descend');
% Next we make a ranking array
rank = [1:numel(sort_q)];
% Now each value in Q is assigned a rank where rank is the same as pandas
% ranking algorithm with the 'max' option. For example in the data set
% [1, 2, 2, 3] the ranks would be [ 1, 2, 3, 4] and the rank of 2 would be
% 3 because 3 is the maximum rank that a 2 has in the total set.
assigned_rank = arrayfun(@(x) rank(find(sort_q==x,1,'last')), Q);

py.importlib.import_module('mhkit_python_utils');
py.importlib.import_module('mhkit');

if (isa(Q,'py.pandas.core.frame.DataFrame')~=1)
x=size(Q.Discharge);
li=py.list();
if x(2)>1
for i = 1:x(2)
app=py.list(Q.Discharge(:,i));
li=py.mhkit_python_utils.pandas_dataframe.lis(li,app);

end
elseif x(2) ==1
li=Q.Discharge;
end

if any(isdatetime(Q.time(1)))
si=size(Q.time);
for i=1:si(2)
Q.time(i)=posixtime(Q.time(i));
end
end
Q=py.mhkit_python_utils.pandas_dataframe.timeseries_to_pandas(li,Q.time,int32(x(2)));
end

EPpd=py.mhkit.river.resource.exceedance_probability(Q);

xx=cell(EPpd.axes);
v=xx{2};
vv=cell(py.list(py.numpy.nditer(v.values,pyargs("flags",{"refs_ok"}))));

vals=double(py.array.array('d',py.numpy.nditer(EPpd.values)));
sha=cell(EPpd.values.shape);
x=int64(sha{1,1});
y=int64(sha{1,2});
F = 100 * (assigned_rank / (numel(Q)+1));

vals=reshape(vals,[x,y]);

si=size(vals);
for i=1:si(2)
test=string(py.str(vv{i}));
newname=split(test,",");

F.(newname(1))=vals(:,i);

end
F.time=double(py.array.array('d',py.numpy.nditer(EPpd.index)));



Expand Down
19 changes: 9 additions & 10 deletions mhkit/tests/River_TestResource.m
Original file line number Diff line number Diff line change
Expand Up @@ -11,13 +11,13 @@ function test_Froude_number(testCase)

function test_exceedance_probability(testCase)
% Create arbitrary discharge between 0 and 8(N=9)
Q = struct('Discharge',[0;1;2;3;4;5;6;7;8],'time',[0 1 2 3 4 5 6 7 8]);
Q = [0;1;2;3;4;5;6;7;8];
% Rank order for non-repeating elements simply adds 1 to each element
%if N=9, max F = 100((max(Q)+1)/10) = 90%
%if N=9, min F = 100((min(Q)+1)/10) = 10%
f = exceedance_probability(Q);
assertEqual(testCase,min(f.F), 10);
assertEqual(testCase,max(f.F), 90);
assertEqual(testCase,min(f), 10);
assertEqual(testCase,max(f), 90);
end

function test_polynomial_fit(testCase)
Expand Down Expand Up @@ -103,9 +103,9 @@ function test_plot_flow_duration_curve(testCase)
end

Q = struct('Discharge',[0;1;2;3;4;5;6;7;8],'time',[0 1 2 3 4 5 6 7 8]);
f = exceedance_probability(Q);
f = exceedance_probability(Q.Discharge);

plot_flow_duration_curve(Q.Discharge, f.F,"savepath",filename);
plot_flow_duration_curve(Q.Discharge, f,"savepath",filename);

assertTrue(testCase,isfile(filename));
delete(filename);
Expand All @@ -118,7 +118,7 @@ function test_plot_power_duration_curve(testCase)
end

Q = struct('Discharge',[0;1;2;3;4;5;6;7;8],'time',[0 1 2 3 4 5 6 7 8]);
f = exceedance_probability(Q);
f = exceedance_probability(Q.Discharge);
poly = polynomial_fit(0:1:8, 10*(0:1:8),1);
V = discharge_to_velocity(Q, poly.coef);
VV = V.V;
Expand All @@ -130,7 +130,7 @@ function test_plot_power_duration_curve(testCase)
% Power should be 10x greater and exclude the ends of V
P = velocity_to_power(V, poly.coef, cut_in, cut_out);

plot_power_duration_curve(P.P, f.F,"savepath",filename);
plot_power_duration_curve(P.P, f,"savepath",filename);

assertTrue(testCase,isfile(filename));
delete(filename);
Expand All @@ -145,9 +145,9 @@ function test_plot_velocity_duration_curve(testCase)
Q = struct('Discharge',[0;1;2;3;4;5;6;7;8],'time',[0 1 2 3 4 5 6 7 8]);
poly = polynomial_fit(0:1:8, 10*(0:1:8),1);
V = discharge_to_velocity(Q, poly.coef);
f = exceedance_probability(Q);
f = exceedance_probability(Q.Discharge);

plot_velocity_duration_curve(V.V, f.F,"savepath",filename);
plot_velocity_duration_curve(V.V, f,"savepath",filename);

assertTrue(testCase,isfile(filename));
delete(filename);
Expand Down Expand Up @@ -190,7 +190,6 @@ function test_plot_velocity_vs_power(testCase)
end

Q = struct('Discharge',[0;1;2;3;4;5;6;7;8],'time',[0 1 2 3 4 5 6 7 8]);
f = exceedance_probability(Q);
poly = polynomial_fit(0:1:8, 10*(0:1:8),1);
V = discharge_to_velocity(Q, poly.coef);
VV = V.V;
Expand Down
7 changes: 3 additions & 4 deletions mhkit/tests/Tidal_TestResource.m
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
methods (Test)

function test_exceedance_probability(testCase)
Q = struct('Discharge',[0;1;2;3;4;5;6;7;8],'time',[1 2 3 4 5 6 7 8 9]);
Q = [0;1;2;3;4;5;6;7;8];
f = exceedance_probability(Q);
assertEqual(testCase,min(f.F), 10);
assertEqual(testCase,max(f.F), 90);
assertEqual(testCase,min(f), 10);
assertEqual(testCase,max(f), 90);
end

function test_principal_flow_directions(testCase)
Expand Down Expand Up @@ -112,7 +112,6 @@ function test_plot_phase_exceedance(testCase)
file_name = '../../examples/data/tidal/s08010.json';
data = read_noaa_json(file_name);
data.s = data.s/100;
data.Discharge = data.s;
width_direction = 1;
[direction1, direction2] = ...
principal_flow_directions(data.d,width_direction);
Expand Down
29 changes: 10 additions & 19 deletions mhkit/tidal/graphics/plot_tidal_phase_exceedance.m
Original file line number Diff line number Diff line change
Expand Up @@ -80,34 +80,25 @@
isEbb = ~((data.d < upper_split) & (data.d >= lower_split));
end

s_ebb = struct('time', {data.time(isEbb)},...
'Discharge', {data.Discharge(isEbb)});
s_flood = struct('time', {data.time(~isEbb)},...
'Discharge', {data.Discharge(~isEbb)});
s_ebb = data.s(isEbb);
s_flood = data.s(~isEbb);

F = exceedance_probability(data).F;
F_ebb = exceedance_probability(s_ebb).F;
F_flood = exceedance_probability(s_flood).F;
F_ebb = exceedance_probability(s_ebb);
F_flood = exceedance_probability(s_flood);

decimals = round(options.bin_size/0.1);
s_new = [round(min(data.s),decimals):options.bin_size:...
round(max(data.s),decimals)+options.bin_size];
s_new = s_new(1:end-1); % this accounts for the difference between matlab
% and numpy.arange which uses a half open interval

py.importlib.import_module('scipy');
kwa = pyargs('bounds_error',false);
[~,i_ebb] = unique(s_ebb);
[~,i_flood] = unique(s_flood);

f_ebb = py.scipy.interpolate.interp1d(s_ebb.Discharge,...
F_ebb, kwa);
f_flood = py.scipy.interpolate.interp1d(s_flood.Discharge,...
F_flood, kwa);
F_ebb = interp1(s_ebb(i_ebb), F_ebb(i_ebb), s_new);
F_flood = interp1(s_flood(i_flood), F_flood(i_flood), s_new);

F_ebb = double(f_ebb(s_new));
F_flood = double(f_flood(s_new));

py.importlib.import_module('numpy');
F_max_total = py.numpy.nanmax(F_ebb) + py.numpy.nanmax(F_flood);
F_max_total = max(F_ebb(~isnan(F_ebb))) + max(F_flood(~isnan(F_flood)));

s_plot = repmat(s_new,2,1);
figure = area(s_plot.', [F_ebb/F_max_total*100; F_flood/F_max_total*100].');
Expand All @@ -125,4 +116,4 @@
exportgraphics(gca, options.savepath);
end

hold off
hold off
62 changes: 33 additions & 29 deletions mhkit/tidal/io/read_noaa_json.m
Original file line number Diff line number Diff line change
Expand Up @@ -19,42 +19,46 @@
%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

py.importlib.import_module('mhkit');
py.importlib.import_module('numpy');
fid = fopen(filename); % Open the file
raw = fread(fid,inf); % Read the contents
str = char(raw'); % Transformation
fclose(fid); % Close the file
init_data = jsondecode(str);

datap = py.mhkit.tidal.io.noaa.read_noaa_json(filename);

datac=cell(datap);
data=struct(datac{2});
data_df=datac{1};

fields=fieldnames(data);
data = struct();

% Move metadata from its own field to the main structure
fields = fieldnames(init_data.metadata);
for idx = 1:length(fields)
data.(fields{idx}) = string(data.(fields{idx}));
data.(fields{idx}) = convertCharsToStrings(...
init_data.metadata.(fields{idx}));
end
init_data = rmfield(init_data,'metadata');

xx=cell(data_df.axes);
v=xx{2};


vv=cell(py.list(py.numpy.nditer(v.values,pyargs("flags",{"refs_ok"}))));
% Move the data fields over and create time
fields = fieldnames(init_data);
shape = size(fieldnames(init_data.(fields{1})));
for idx = 1:length(fields)
data.(fields{idx}) = zeros(shape);
end

vals=double(py.array.array('d',py.numpy.nditer(data_df.values,pyargs("flags",{"refs_ok"}))));
sha=cell(data_df.values.shape);
x=int64(sha{1,1});
y=int64(sha{1,2});
% loop through the time (which gets read in as the field names for each
% data column) and fill in data

vals=reshape(vals,[x,y]);
si=size(vals);
time = fieldnames(init_data.(fields{1}));
data.time = cellfun(@(x) x(2:end), time, 'UniformOutput', false);
data.time = cellfun(@str2double,data.time);
data.time = transpose(data.time/1000.);
% Time is in epochtime since 1970-01-01. Divide by 1000 to remove
% milliseconds. convert with
% datetime(time,'ConvertFrom','epochtime','Epoch','1970-01-01')

for i=1:si(2)
test=string(py.str(vv{i}));
newname=split(test,",");
data.(newname(1))=vals(:,i);
for idx = 1:length(fields)
test = init_data.(fields{idx}).(time{1});
data.(fields{idx}) = cell2mat(struct2cell(init_data.(fields{idx})));
if ischar(test) || isstring(test)
data.(fields{idx}) = arrayfun(@str2double,data.(fields{idx}));
end
end

times = double( ...
py.mhkit_python_utils.pandas_dataframe.datetime_index_to_ordinal(data_df));

data.time = posixtime(datetime(times, 'ConvertFrom', 'datenum'));
end
Loading