diff --git a/docs/index.rst b/docs/index.rst index 6168308..4b9e351 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -214,8 +214,8 @@ Monthly Non-Exceedance Table tables.table_monthly_non_exceedance( df, - var1='HS', - step_var1=0.5, + var='HS', + step_var=0.5, output_file='Hs_table_monthly_non_exceedance.csv' ) @@ -230,7 +230,7 @@ Monthly Statistics Plot plots.plot_monthly_stats( df, - var1='HS', + var='HS', show=['Maximum','P99','Mean'], title='Hs[m]', output_file='Hs_monthly_stats.png' @@ -246,8 +246,8 @@ Directional Non-Exceedance Table tables.table_directional_non_exceedance( df, - var1='HS', - step_var1=0.5, + var='HS', + step_var=0.5, var_dir='DIRM', output_file='table_directional_non_exceedance.csv' ) @@ -263,8 +263,8 @@ Directional Statistics Plot plots.plot_directional_stats( df, - var1='HS', - step_var1=0.5, + var='HS', + step_var=0.5, var_dir='DIRM', title='$H_s$[m]', output_file='directional_stats.png' @@ -754,7 +754,7 @@ T2m Monthly Statistics Plot plots.plot_monthly_stats( df, - var1='T2m', + var='T2m', show=['Minimum','Mean','Maximum'], title='T2m', output_file='T2m_monthly_stats.png' @@ -770,8 +770,8 @@ T2m Monthly Non-Exceedance Table tables.table_monthly_non_exceedance( df, - var1='T2m', - step_var1=0.5, + var='T2m', + step_var=0.5, output_file='T2m_table_monthly_non_exceedance.csv' ) @@ -840,7 +840,7 @@ Current Speed Monthly Statistics Plot plots.plot_monthly_stats( ds_ocean, - var1='current_speed_0m', + var='current_speed_0m', show=['Mean', 'P99', 'Maximum'], title='Current[m/s], depth:0m', output_file='current_0m_monthly_stats.png' @@ -856,9 +856,9 @@ Current Speed Directional Statistics Plot plots.plot_directional_stats( ds_ocean, - var1='current_speed_0m', + var='current_speed_0m', var_dir='current_direction_0m', - step_var1=0.05, + step_var=0.05, show=['Mean', 'P99', 'Maximum'], title='Current[m/s], depth:0m', output_file='current_0m_dir_stats.png' @@ -1529,13 +1529,13 @@ Map Statistics .. code-block:: python - from metocean_stats.stats.map_funcs import * + from metocean_stats import maps Plot map with points of interest: .. code-block:: python - plot_points_on_map(lon=[3.35,3.10], + maps.plot_points_on_map(lon=[3.35,3.10], lat=[60.40,60.90], label=['NORA3','NORKYST800'], bathymetry='NORA3') @@ -1548,7 +1548,7 @@ Plot extreme signigicant wave height based on NORA3 data: .. code-block:: python - plot_extreme_wave_map(return_level=100, + maps.plot_extreme_wave_map(return_period=100, product='NORA3', title='100-yr return values Hs (NORA3)', set_extent = [0,30,52,73]) @@ -1561,7 +1561,7 @@ Plot extreme wind at 10 m height based on NORA3 data: .. code-block:: python - plot_extreme_wind_map(return_level=100, + maps.plot_extreme_wind_map(return_period=100, product='NORA3', z=10, title='100-yr return values Wind at 10 m (NORA3)', @@ -1575,7 +1575,7 @@ Plot extreme wind at 100 m height based on NORA3 data: .. code-block:: python - plot_extreme_wind_map(return_level=100, + plot_extreme_wind_map(return_period=100, product='NORA3', z=100, title='100-yr return values Wind at 100 m (NORA3)', diff --git a/environment.yml b/environment.yml index 66a6054..024c07e 100644 --- a/environment.yml +++ b/environment.yml @@ -19,4 +19,5 @@ dependencies: - keras - tensorflow - cartopy + - python-docx diff --git a/examples/example_ERA5_stats.py b/examples/example_ERA5_stats.py deleted file mode 100644 index 07d61a4..0000000 --- a/examples/example_ERA5_stats.py +++ /dev/null @@ -1,27 +0,0 @@ -from metocean_api import ts -from metocean_stats.stats import general_stats, dir_stats, extreme_stats - -# Define TimeSeries-object -ds = ts.TimeSeries(lon=109.94, lat=15.51,start_time='2000-01-01', end_time='2010-12-31' , product='ERA5') - - -# Import data using cdsapi and save it as csv -#ds.import_data(save_csv=True) -# Load data from local file -ds.load_data('tests/data/'+ds.datafile) - -# Generate Statistics -general_stats.scatter_diagram(data=ds.data, var1='swh', step_var1=1, var2='pp1d', step_var2=1, output_file='scatter_hs_tp_diagram.png') -general_stats.table_var_sorted_by_hs(data=ds.data, var='pp1d',var_hs='swh', output_file='tp_sorted_by_hs.csv') -general_stats.table_monthly_percentile(data=ds.data, var='swh', output_file='hs_monthly_perc.csv') -general_stats.table_monthly_min_mean_max(data=ds.data, var='swh',output_file='hs_montly_min_mean_max.csv')# - -# Directional Statistics -dir_stats.var_rose(ds.data, 'mwd','swh','windrose.png',method='overall') -dir_stats.directional_min_mean_max(ds.data,'mwd','swh','hs_dir_min_mean_max.csv') - -# Extreme Statistics -rl_pot = extreme_stats.return_levels_pot(data=ds.data, var='swh', periods=[20,50,100], output_file='return_levels_POT.png') -rl_am = extreme_stats.return_levels_annual_max(data=ds.data, var='swh', periods=[20,50,100],method='GEV',output_file='return_levels_GEV.png') - - diff --git a/examples/example_NORA.py b/examples/example_NORA.py index ba1d493..f66173d 100644 --- a/examples/example_NORA.py +++ b/examples/example_NORA.py @@ -1,87 +1,67 @@ -#from metocean_api import ts -from metocean_stats import plots, tables, stats +from metocean_stats import plots, tables, stats, maps from metocean_stats.stats.aux_funcs import * -from metocean_stats.stats.map_funcs import * - -# Define TimeSeries-object -#ds = ts.TimeSeries(lon=3.73, lat=64.6,start_time='1990-01-01', end_time='1990-01-31' , product='NORA3_wave_sub') - -# Import data from thredds.met.no and save it as csv -#ds.import_data(save_csv=True) -# Load data from local file -#ds.load_data('/home/konstantinosc/'+ds.datafile) -ds = readNora10File('../tests/data/NORA_test.txt') # for Lun -#ds = readNora10File('NORA10_6036N_0336E.1958-01-01.2022-12-31.txt') # for Lun -#ds = wind_correction_nora10(ds,var='W10') -#ds = readNora10File('NORA10_5766N_0503E.1958-01-01.2022-12-31.txt') # for Hav - - -#plots.scatter_plot(ds,'HS','W10',density=True, regression_line=True, output_file='scatter_plot.png') -#fig = plots.plot_scatter(ds,var1='W10',var2='W100',var1_units='m/s',var2_units='m/s', title='Scatter',regression_line=True,qqplot=False,density=True,output_file='scatter_plot.png') +df = readNora10File('../tests/data/NORA_test.txt') # Map: -#plot_points_on_map( lon=[3.35,3.10], lat=[60.40,60.90],label=['NORA3','NORKYST800'],bathymetry='NORA3') -#plot_extreme_wave_map(return_level=100, product='NORA3', title='100-yr return values Hs (NORA3)', set_extent = [0,30,52,73]) -#plot_extreme_wind_map(return_level=100, product='NORA3',z=100, title='100-yr return values Wind at 100 m (NORA3)', set_extent = [0,30,52,73]) +maps.plot_points_on_map(lon=[3.35,3.10], lat=[60.40,60.90],label=['NORA3','NORKYST800'],bathymetry='NORA3',output_file='map.png') +maps.plot_extreme_wave_map(return_period=100, product='NORA3', title='100-yr return values Hs (NORA3)', set_extent = [0,30,52,73],output_file='wave_100yrs.png') +maps.plot_extreme_wind_map(return_period=100, product='NORA3',z=10, title='100-yr return values Wind at 100 m (NORA3)', set_extent = [0,30,52,73], output_file='wind_100yrs.png') # Wind: -#plots.plot_multi_diagnostic_return_levels(ds, var='HS', periods=[10, 100],threshold=None,output_file='plot_diagnostic_return_levels.png') -#plots.var_rose(ds,direc='D10',inten='W10',bins_range= np.arange(0, 25, step=5),title='Omni wind rose',output_file='wind_omni.png') -#general.plot_directional_stats(df,var1='W10',step_var1=0.1,var_dir='D10',title = '', output_file='directional_stats.png') -#general.table_directional_non_exceedance(df,var1='W10',step_var1=2,var_dir='D10',output_file='table_directional_non_exceedance.csv') -#general.plot_monthly_stats(df,var1='W10',step_var1=2,title = '', output_file='monthly_stats.png') -#general.table_monthly_non_exceedance(df,var1='W10',step_var1=2,output_file='table_monthly_non_exceedance.csv') -#plots.plot_prob_non_exceedance_fitted_3p_weibull(df,var='W10',output_file='prob_non_exceedance_fitted_3p_weibull.png') -#plots.plot_directional_weibull_return_periods(df,var='W10',var_dir='D10',periods=[1, 10, 100, 1000], units='m/s',output_file='Wind.dir_extremes_weibull.png') -#tables.table_directional_weibull_return_periods(df,var='W10',periods=[1, 10, 100, 10000], units='m/s',var_dir = 'D10',output_file='directional_extremes_weibull.wind.csv') -#plots.plot_monthly_weibull_return_periods(df,var='W10',periods=[1, 10, 100, 1000], units='m/s',output_file='monthly_extremes_weibull.wind.png') -#tables.table_monthly_weibull_return_periods(df,var='W10',periods=[1, 10, 100, 10000], units='m/s',output_file='monthly_extremes_weibull.wind.csv') -#plots.plot_monthly_weather_window(df,var='W10',threshold=10, window_size=12,output_file= 'NORA10_monthly_weather_window4_12_plot.png') - +plots.var_rose(df,var_dir='D10',var='W10',method='overall',max_perc=40,decimal_places=1, units='m/s',output_file='wind_omni.png') +plots.var_rose(df,var_dir='D10',var='W10',method='monthly',max_perc=40,decimal_places=1, units='m/s',output_file='wind_monthly.png') +plots.plot_directional_stats(df,var='W10',step_var=0.1,var_dir='D10',title = '', output_file='directional_stats.png') +plots.table_directional_non_exceedance(df,var='W10',step_var=2,var_dir='D10',output_file='table_directional_non_exceedance.csv') +plots.plot_monthly_stats(df,var='W10',title = 'Wind Speed at 10 m [m/s]', output_file='monthly_stats.png') +tables.table_monthly_non_exceedance(df,var='W10',step_var=2,output_file='table_monthly_non_exceedance.csv') +plots.plot_prob_non_exceedance_fitted_3p_weibull(df,var='W10',output_file='prob_non_exceedance_fitted_3p_weibull.png') +plots.plot_monthly_return_periods(df,var='W10',periods=[1, 10, 100],distribution='Weibull3P_MOM', units='m/s',output_file='W10_monthly_extremes.png') +plots.plot_directional_return_periods(df,var='W10',var_dir='D10',periods=[1, 10, 100],distribution='Weibull3P_MOM', units='m/s',adjustment='NORSOK',output_file='W10_dir_extremes_Weibull_norsok.png') +plots.plot_monthly_weather_window(df,var='W10',threshold=10, window_size=12,output_file= 'NORA10_monthly_weather_window4_12_plot.png') +plots.plot_scatter(df,var1='W10',var2='W100',var1_units='m/s',var2_units='m/s', title='Scatter',regression_line=True,qqplot=False,density=True,output_file='scatter_plot.png') +plots.plot_multi_diagnostic_return_levels(df, var='HS', periods=[10, 100],threshold=None,output_file='plot_diagnostic_return_levels.png') # Waves: -#plots.plot_prob_non_exceedance_fitted_3p_weibull(ds,var='HS',output_file='prob_non_exceedance_fitted_3p_weibull.png') -# tables.scatter_diagram(ds, var1='HS', step_var1=1, var2='TP', step_var2=1, output_file='Hs_Tp_scatter.csv') -# tables.table_var_sorted_by_hs(ds, var='TP', var_hs='HS', output_file='Tp_sorted_by_Hs.csv') -# tables.table_monthly_non_exceedance(ds,var1='HS',step_var1=0.5,output_file='Hs_table_monthly_non_exceedance.csv') -# plots.plot_monthly_stats(ds,var1='HS',show=['Maximum','P99','Mean'], title = 'Hs[m]', output_file='Hs_monthly_stats.png') -# tables.table_directional_non_exceedance(ds,var1='HS',step_var1=0.5,var_dir='DIRM',output_file='table_directional_non_exceedance.csv') -# plots.plot_directional_stats(ds,var1='HS',step_var1=0.5, var_dir='DIRM', title = '$H_s$[m]', output_file='directional_stats.png') -# plots.plot_joint_distribution_Hs_Tp(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,1000], title='Hs-Tp joint distribution',output_file='Hs.Tp.joint.distribution.png',density_plot=True) -# tables.table_monthly_joint_distribution_Hs_Tp_param(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='monthly_Hs_Tp_joint_param.csv') -# df = tables.table_directional_joint_distribution_Hs_Tp_param(ds,var_hs='HS',var_tp='TP',var_dir='DIRM',periods=[1,10,100],output_file='dir_Hs_Tp_joint_param.csv') -# plots.plot_monthly_weather_window(ds,var='HS',threshold=4, window_size=12,output_file= 'NORA10_monthly_weather_window4_12_plot.png') -# tables.table_monthly_return_periods(ds,var='HS',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes_Weibull.csv') -# tables.table_directional_return_periods(ds,var='HS',periods=[1, 10, 100, 10000], units='m',var_dir = 'DIRM',distribution='Weibull3P_MOM', adjustment='NORSOK' ,output_file='directional_extremes_weibull.csv') -# plots.plot_monthly_return_periods(ds,var='HS',periods=[1, 10, 100],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes.png') -# plots.plot_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000 ],distribution='GUM', units='m',output_file='dir_extremes_GUM.png') -# plots.plot_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',output_file='dir_extremes_Weibull_norsok.png') -# plots.plot_polar_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',output_file='dir_extremes_Weibull_polar.png') -# tables.table_monthly_joint_distribution_Hs_Tp_return_values(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='monthly_Hs_Tp_joint_return_values.csv') -# tables.table_directional_joint_distribution_Hs_Tp_return_values(ds,var_hs='HS',var_tp='TP',var_dir='DIRM',periods=[1,10,100,1000],adjustment='NORSOK',output_file='directional_Hs_Tp_joint_return_values.csv') -# tables.table_Hs_Tpl_Tph_return_values(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='hs_tpl_tph_return_values.csv') -# plots.plot_tp_for_given_hs(ds, 'HS', 'TP',output_file='tp_for_given_hs.png') -# tables.table_tp_for_given_hs(ds, 'HS', 'TP',max_hs=20,output_file='tp_for_given_hs.csv') -# tables.table_tp_for_rv_hs(ds, var_hs='HS', var_tp='TP',periods=[1,10,100,10000],output_file='tp_for_rv_hs.csv') -# tables.table_wave_induced_current(ds, var_hs='HS',var_tp='TP',depth=200,ref_depth=200, spectrum = 'JONSWAP',output_file='JONSWAP_wave_induced_current_depth200.csv') -# tables.table_wave_induced_current(ds, var_hs='HS',var_tp='TP',depth=200,ref_depth=200, spectrum = 'TORSEHAUGEN',output_file='TORSEHAUGEN_wave_induced_current_depth200.csv') -# tables.table_hs_for_given_wind(ds, 'HS','W10', bin_width=2, max_wind=42, output_file='table_perc_hs_for_wind.csv') -# plots.plot_hs_for_given_wind(ds, 'HS', 'W10',output_file='hs_for_given_wind.png') -# tables.table_hs_for_rv_wind(ds, var_wind='W10', var_hs='HS',periods=[1,10,100,10000],output_file='hs_for_rv_wind.csv') -# df = tables.table_Hmax_crest_return_periods(ds,var_hs='HS', var_tp='TP', depth=200, periods=[1, 10, 100,10000],sea_state='long-crested') -# tables.table_directional_Hmax_return_periods(ds,var_hs='HS', var_tp = 'TP',var_dir='DIRM', periods=[10, 100,10000],adjustment='NORSOK', output_file='table_dir_Hmax_return_values.csv') +plots.plot_prob_non_exceedance_fitted_3p_weibull(df,var='HS',output_file='prob_non_exceedance_fitted_3p_weibull.png') +tables.scatter_diagram(df, var1='HS', step_var1=1, var2='TP', step_var2=1, output_file='Hs_Tp_scatter.csv') +tables.table_var_sorted_by_hs(df, var='TP', var_hs='HS', output_file='Tp_sorted_by_Hs.csv') +tables.table_monthly_non_exceedance(df,var='HS',step_var=0.5,output_file='Hs_table_monthly_non_exceedance.csv') +plots.plot_monthly_stats(df,var='HS',show=['Maximum','P99','Mean'], title = 'Hs[m]', output_file='Hs_monthly_stats.png') +tables.table_directional_non_exceedance(df,var='HS',step_var=0.5,var_dir='DIRM',output_file='table_directional_non_exceedance.csv') +plots.plot_directional_stats(df,var='HS',step_var=0.5, var_dir='DIRM', title = '$H_s$[m]', output_file='directional_stats.png') +plots.plot_joint_distribution_Hs_Tp(df,var_hs='HS',var_tp='TP',periods=[1,10,100,1000], title='Hs-Tp joint distribution',output_file='Hs.Tp.joint.distribution.png',density_plot=True) +tables.table_monthly_joint_distribution_Hs_Tp_param(df,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='monthly_Hs_Tp_joint_param.csv') +tables.table_directional_joint_distribution_Hs_Tp_param(df,var_hs='HS',var_tp='TP',var_dir='DIRM',periods=[1,10,100],output_file='dir_Hs_Tp_joint_param.csv') +plots.plot_monthly_weather_window(df,var='HS',threshold=4, window_size=12,output_file= 'NORA10_monthly_weather_window4_12_plot.png') +tables.table_monthly_return_periods(df,var='HS',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes_Weibull.csv') +tables.table_directional_return_periods(df,var='HS',periods=[1, 10, 100, 10000], units='m',var_dir = 'DIRM',distribution='Weibull3P_MOM', adjustment='NORSOK' ,output_file='directional_extremes_weibull.csv') +plots.plot_monthly_return_periods(df,var='HS',periods=[1, 10, 100],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes.png') +plots.plot_directional_return_periods(df,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000 ],distribution='GUM', units='m',output_file='dir_extremes_GUM.png') +plots.plot_directional_return_periods(df,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',output_file='dir_extremes_Weibull_norsok.png') +tables.table_monthly_joint_distribution_Hs_Tp_return_values(df,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='monthly_Hs_Tp_joint_return_values.csv') +tables.table_directional_joint_distribution_Hs_Tp_return_values(df,var_hs='HS',var_tp='TP',var_dir='DIRM',periods=[1,10,100,1000],adjustment='NORSOK',output_file='directional_Hs_Tp_joint_return_values.csv') +tables.table_Hs_Tpl_Tph_return_values(df,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='hs_tpl_tph_return_values.csv') +plots.plot_tp_for_given_hs(df, 'HS', 'TP',output_file='tp_for_given_hs.png') +tables.table_tp_for_given_hs(df, 'HS', 'TP',max_hs=20,output_file='tp_for_given_hs.csv') +tables.table_tp_for_rv_hs(df, var_hs='HS', var_tp='TP',periods=[1,10,100,10000],output_file='tp_for_rv_hs.csv') +tables.table_wave_induced_current(df, var_hs='HS',var_tp='TP',depth=200,ref_depth=200, spectrum = 'JONSWAP',output_file='JONSWAP_wave_induced_current_depth200.csv') +tables.table_wave_induced_current(df, var_hs='HS',var_tp='TP',depth=200,ref_depth=200, spectrum = 'TORSEHAUGEN',output_file='TORSEHAUGEN_wave_induced_current_depth200.csv') +tables.table_hs_for_given_wind(df, 'HS','W10', bin_width=2, max_wind=42, output_file='table_perc_hs_for_wind.csv') +plots.plot_hs_for_given_wind(df, 'HS', 'W10',output_file='hs_for_given_wind.png') +tables.table_hs_for_rv_wind(df, var_wind='W10', var_hs='HS',periods=[1,10,100,10000],output_file='hs_for_rv_wind.csv') +tables.table_Hmax_crest_return_periods(df,var_hs='HS', var_tp='TP', depth=200, periods=[1, 10, 100,10000],sea_state='long-crested',output_file='table_Hmax_crest_rp.csv') +tables.table_directional_Hmax_return_periods(df,var_hs='HS', var_tp = 'TP',var_dir='DIRM', periods=[10, 100,10000],adjustment='NORSOK', output_file='table_dir_Hmax_return_values.csv') # Air Temperature: -#ds = air_temperature_correction_nora10(ds,var='T2m') # Temperature correction -#plots.plot_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM_L',method='minimum', units='°C',output_file='T2m_monthly_extremes_neg.png') -#tables.table_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM_L', method='minimum' ,units='°C',output_file='T2m_monthly_extremes_neg.csv') -#plots.plot_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM', method='maximum', units='°C',output_file='T2m_monthly_extremes_pos.png') -#tables.table_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM', method='maximum' ,units='°C',output_file='T2m_monthly_extremes_pos.csv') -# plots.plot_monthly_stats(ds,var1='T2m',show=['Minimum','Mean','Maximum'], title = 'T2m', output_file='T2m_monthly_stats.png') -# tables.table_monthly_non_exceedance(ds,var1='T2m',step_var1=0.5,output_file='T2m_table_monthly_non_exceedance.csv') +plots.plot_monthly_return_periods(df,var='T2m',periods=[1, 10, 100],distribution='GUM_L',method='minimum', units='°C',output_file='T2m_monthly_extremes_neg.png') +tables.table_monthly_return_periods(df,var='T2m',periods=[1, 10, 100],distribution='GUM_L', method='minimum' ,units='°C',output_file='T2m_monthly_extremes_neg.csv') +plots.plot_monthly_return_periods(df,var='T2m',periods=[1, 10, 100],distribution='GUM', method='maximum', units='°C',output_file='T2m_monthly_extremes_pos.png') +tables.table_monthly_return_periods(df,var='T2m',periods=[1, 10, 100],distribution='GUM', method='maximum' ,units='°C',output_file='T2m_monthly_extremes_pos.csv') +plots.plot_monthly_stats(df,var='T2m',show=['Minimum','Mean','Maximum'], title = 'T2m', output_file='T2m_monthly_stats.png') +tables.table_monthly_non_exceedance(df,var='T2m',step_var=0.5,output_file='T2m_table_monthly_non_exceedance.csv') # Currents: @@ -94,25 +74,25 @@ #plots.plot_monthly_return_periods(ds_ocean,var='current_speed_0m',periods=[1, 10, 100],distribution='Weibull3P_MOM',method='POT',threshold='P99', units='m/s',output_file='csp0m_monthly_extremes.png') -#plots.var_rose(ds, f'current_direction_{depth}',f'current_speed_{depth}',max_perc=30,decimal_places=2, units='m/s', method='monthly', output_file='monthly_rose.png') -#plots.var_rose(ds,f'current_direction_{depth}',f'current_speed_{depth}',max_perc=30,decimal_places=2, units='m/s', method='overall', output_file='overall_rose.png') -#plots.plot_monthly_stats(ds,var1=f'current_speed_{depth}',show=['Mean','P99','Maximum'], title = f'Current[m/s], depth:{depth}', output_file=f'current{depth}_monthly_stats.png') -#plots.plot_directional_stats(ds,var1=f'current_speed_{depth}',var_dir=f'current_direction_{depth}',step_var1=0.05,show=['Mean','P99','Maximum'], title = f'Current[m/s], depth:{depth}', output_file=f'current{depth}_dir_stats.png') -#tables.table_directional_return_periods(ds,var=f'current_speed_{depth}',periods=[1, 10, 100, 10000], units='m/s',var_dir = f'current_direction_{depth}',distribution='Weibull', adjustment='NORSOK' ,output_file=f'directional_extremes_weibull_current{depth}.csv') -#tables.table_monthly_return_periods(ds,var=f'current_speed_{depth}',periods=[1, 10, 100, 10000], units='m/s',distribution='Weibull3P_MOM',method='POT',threshold='P99',output_file=f'monthly_extremes_weibull_current{depth}.csv') -#df = tables.table_monthly_return_periods(ds,var='HS',periods=[1, 10, 100, 10000], units='m',distribution='Weibull3P_MOM',method='POT',threshold='P99',output_file='HS_monthly_extremes_Weibull_POT_P99.csv') -#df = tables.table_monthly_return_periods(ds,var='HS',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes_Weibull.csv') -#df = tables.table_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_dir_extremes_Weibull.csv') -#df = tables.table_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM',method='POT',threshold='P99', units='m',output_file='HS_dir_extremes_Weibull_POT.csv') -# plots.plot_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',method='POT',threshold='P99',output_file='dir_extremes_Weibull_norsok.png') -# df = tables.table_profile_return_values(ds,var=['W10','W50','W80','W100','W150'], z=[10, 50, 80, 100, 150], periods=[1, 10, 100, 10000], output_file='RVE_wind_profile.csv') +#plots.var_rose(df, f'current_direction_{depth}',f'current_speed_{depth}',max_perc=30,decimal_places=2, units='m/s', method='monthly', output_file='monthly_rose.png') +#plots.var_rose(df,f'current_direction_{depth}',f'current_speed_{depth}',max_perc=30,decimal_places=2, units='m/s', method='overall', output_file='overall_rose.png') +#plots.plot_monthly_stats(df,var1=f'current_speed_{depth}',show=['Mean','P99','Maximum'], title = f'Current[m/s], depth:{depth}', output_file=f'current{depth}_monthly_stats.png') +#plots.plot_directional_stats(df,var1=f'current_speed_{depth}',var_dir=f'current_direction_{depth}',step_var1=0.05,show=['Mean','P99','Maximum'], title = f'Current[m/s], depth:{depth}', output_file=f'current{depth}_dir_stats.png') +#tables.table_directional_return_periods(df,var=f'current_speed_{depth}',periods=[1, 10, 100, 10000], units='m/s',var_dir = f'current_direction_{depth}',distribution='Weibull', adjustment='NORSOK' ,output_file=f'directional_extremes_weibull_current{depth}.csv') +#tables.table_monthly_return_periods(df,var=f'current_speed_{depth}',periods=[1, 10, 100, 10000], units='m/s',distribution='Weibull3P_MOM',method='POT',threshold='P99',output_file=f'monthly_extremes_weibull_current{depth}.csv') +#df = tables.table_monthly_return_periods(df,var='HS',periods=[1, 10, 100, 10000], units='m',distribution='Weibull3P_MOM',method='POT',threshold='P99',output_file='HS_monthly_extremes_Weibull_POT_P99.csv') +#df = tables.table_monthly_return_periods(df,var='HS',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes_Weibull.csv') +#df = tables.table_directional_return_periods(df,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_dir_extremes_Weibull.csv') +#df = tables.table_directional_return_periods(df,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM',method='POT',threshold='P99', units='m',output_file='HS_dir_extremes_Weibull_POT.csv') +# plots.plot_directional_return_periods(df,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',method='POT',threshold='P99',output_file='dir_extremes_Weibull_norsok.png') +# df = tables.table_profile_return_values(df,var=['W10','W50','W80','W100','W150'], z=[10, 50, 80, 100, 150], periods=[1, 10, 100, 10000], output_file='RVE_wind_profile.csv') #fig = plots.plot_profile_return_values(ds_ocean,var=['current_speed_' + d for d in depth], z=[float(d[:-1]) for d in depth], periods=[1, 10, 100, 10000],reverse_yaxis=True, output_file='RVE_current_profile.png') #df = tables.table_current_for_given_wind(ds_all, var_curr='current_speed_0m', var_wind='W10', bin_width=2, max_wind=42, output_file='table_perc_current_for_wind.csv') #plots.plot_current_for_given_wind(ds_all, var_curr='current_speed_0m', var_wind='W10',max_wind=40 ,output_file='curr_for_given_wind.png') #df = tables.table_current_for_given_hs(ds_all, var_curr='current_speed_0m', var_hs='HS', bin_width=2, max_hs=20, output_file='table_perc_current_for_Hs.csv') #ds['current_speed_0m'] = 0.05*ds['W10'] -#df = tables.table_current_for_given_wind(ds, var_curr='current_speed_0m', var_wind='W10', bin_width=2, max_wind=42, output_file='table_perc_current_for_wind.csv') -#df = tables.table_current_for_given_hs(ds, var_curr='current_speed_0m', var_hs='HS', bin_width=2, max_hs=20, output_file='table_perc_current_for_Hs.csv') +#df = tables.table_current_for_given_wind(df, var_curr='current_speed_0m', var_wind='W10', bin_width=2, max_wind=42, output_file='table_perc_current_for_wind.csv') +#df = tables.table_current_for_given_hs(df, var_curr='current_speed_0m', var_hs='HS', bin_width=2, max_hs=20, output_file='table_perc_current_for_Hs.csv') #plots.plot_current_for_given_hs(ds_all, var_curr='current_speed_0m', var_hs='HS', max_hs=20, output_file='curr_for_given_hs.png') #df = tables.table_extreme_current_profile_rv(ds_ocean, var=['current_speed_' + d for d in depth], z=[float(d[:-1]) for d in depth], periods=[1,100,1000],percentile=95, output_file='table_extreme_current_profile_rv.png') @@ -150,6 +130,6 @@ #df = tables.table_tidal_levels(ds_tide, var='tide', output_file='tidal_levels.csv') #fig = plots.plot_tidal_levels(ds_tide, var='tide',start_time='2010-01-01',end_time='2010-03-30', output_file='tidal_levels.png') #df = tables.table_storm_surge_for_given_hs(ds_all, var_surge='zeta_0m', var_hs='HS', bin_width=1, max_hs=20, output_file='table_perc_surge_for_Hs.csv') -#fig = plots.plot_storm_surge_for_given_hs(ds,var_surge='zeta_0m', var_hs='HS', max_hs=20, output_file='surge_for_given_hs.png') -#df = tables.table_extreme_total_water_level(ds, var_hs='HS',var_tp='TP',var_surge='zeta_0m', var_tide='tide', periods=[100,10000], output_file='table_extreme_total_water_level.csv') -#df = tables.table_storm_surge_for_rv_hs(ds, var_hs='HS',var_tp='TP',var_surge='zeta_0m', var_tide='tide', periods=[1,10,100,10000],depth=200, output_file='table_storm_surge_for_rv_hs.csv') +#fig = plots.plot_storm_surge_for_given_hs(df,var_surge='zeta_0m', var_hs='HS', max_hs=20, output_file='surge_for_given_hs.png') +#df = tables.table_extreme_total_water_level(df, var_hs='HS',var_tp='TP',var_surge='zeta_0m', var_tide='tide', periods=[100,10000], output_file='table_extreme_total_water_level.csv') +#df = tables.table_storm_surge_for_rv_hs(df, var_hs='HS',var_tp='TP',var_surge='zeta_0m', var_tide='tide', periods=[1,10,100,10000],depth=200, output_file='table_storm_surge_for_rv_hs.csv') diff --git a/examples/example_NORA3_stats.py b/examples/example_NORA3_stats.py deleted file mode 100644 index 33e5dcb..0000000 --- a/examples/example_NORA3_stats.py +++ /dev/null @@ -1,152 +0,0 @@ -#from metocean_api import ts -from metocean_stats import plots, tables, stats -from metocean_stats.stats.aux_funcs import * -from metocean_stats.stats.map_funcs import * - - -# Define TimeSeries-object -#ds = ts.TimeSeries(lon=3.73, lat=64.6,start_time='1990-01-01', end_time='1990-01-31' , product='NORA3_wave_sub') - -# Import data from thredds.met.no and save it as csv -#ds.import_data(save_csv=True) -# Load data from local file -#ds.load_data('/home/konstantinosc/'+ds.datafile) -#ds = readNora10File('../tests/data/NORA_test.txt') # for Lun -#ds = readNora10File('NORA10_6036N_0336E.1958-01-01.2022-12-31.txt') # for Lun -#ds = wind_correction_nora10(ds,var='W10') -#ds = readNora10File('NORA10_5766N_0503E.1958-01-01.2022-12-31.txt') # for Hav - -# Map: -#plot_points_on_map( lon=[3.35,3.10], lat=[60.40,60.90],label=['NORA3','NORKYST800'],bathymetry='NORA3') -#plot_extreme_wave_map(return_level=50, product='NORA3', title='50-yr return values Hs (NORA3)', set_extent = [0,30,52,73]) -#plot_extreme_wind_map(return_level=100, product='NORA3',level=0, title='100-yr return values Wind at 10 m (NORA3)', set_extent = [0,30,52,73]) - - -# Wind: -#plots.var_rose(ds,direc='D10',inten='W10',bins_range= np.arange(0, 25, step=5),title='Omni wind rose',output_file='wind_omni.png') -#general.plot_directional_stats(df,var1='W10',step_var1=0.1,var_dir='D10',title = '', output_file='directional_stats.png') -#general.table_directional_non_exceedance(df,var1='W10',step_var1=2,var_dir='D10',output_file='table_directional_non_exceedance.csv') -#general.plot_monthly_stats(df,var1='W10',step_var1=2,title = '', output_file='monthly_stats.png') -#general.table_monthly_non_exceedance(df,var1='W10',step_var1=2,output_file='table_monthly_non_exceedance.csv') -#plots.plot_prob_non_exceedance_fitted_3p_weibull(df,var='W10',output_file='prob_non_exceedance_fitted_3p_weibull.png') -#plots.plot_directional_weibull_return_periods(df,var='W10',var_dir='D10',periods=[1, 10, 100, 1000], units='m/s',output_file='Wind.dir_extremes_weibull.png') -#tables.table_directional_weibull_return_periods(df,var='W10',periods=[1, 10, 100, 10000], units='m/s',var_dir = 'D10',output_file='directional_extremes_weibull.wind.csv') -#plots.plot_monthly_weibull_return_periods(df,var='W10',periods=[1, 10, 100, 1000], units='m/s',output_file='monthly_extremes_weibull.wind.png') -#tables.table_monthly_weibull_return_periods(df,var='W10',periods=[1, 10, 100, 10000], units='m/s',output_file='monthly_extremes_weibull.wind.csv') -#plots.plot_monthly_weather_window(df,var='W10',threshold=10, window_size=12,output_file= 'NORA10_monthly_weather_window4_12_plot.png') - - -# Waves: -# plots.plot_prob_non_exceedance_fitted_3p_weibull(ds,var='HS',output_file='prob_non_exceedance_fitted_3p_weibull.png') -# tables.scatter_diagram(ds, var1='HS', step_var1=1, var2='TP', step_var2=1, output_file='Hs_Tp_scatter.csv') -# tables.table_var_sorted_by_hs(ds, var='TP', var_hs='HS', output_file='Tp_sorted_by_Hs.csv') -# tables.table_monthly_non_exceedance(ds,var1='HS',step_var1=0.5,output_file='Hs_table_monthly_non_exceedance.csv') -# plots.plot_monthly_stats(ds,var1='HS',show=['Maximum','P99','Mean'], title = 'Hs[m]', output_file='Hs_monthly_stats.png') -# tables.table_directional_non_exceedance(ds,var1='HS',step_var1=0.5,var_dir='DIRM',output_file='table_directional_non_exceedance.csv') -# plots.plot_directional_stats(ds,var1='HS',step_var1=0.5, var_dir='DIRM', title = '$H_s$[m]', output_file='directional_stats.png') -# plots.plot_joint_distribution_Hs_Tp(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,1000], title='Hs-Tp joint distribution',output_file='Hs.Tp.joint.distribution.png',density_plot=True) -# tables.table_monthly_joint_distribution_Hs_Tp_param(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='monthly_Hs_Tp_joint_param.csv') -# df = tables.table_directional_joint_distribution_Hs_Tp_param(ds,var_hs='HS',var_tp='TP',var_dir='DIRM',periods=[1,10,100],output_file='dir_Hs_Tp_joint_param.csv') -# plots.plot_monthly_weather_window(ds,var='HS',threshold=4, window_size=12,output_file= 'NORA10_monthly_weather_window4_12_plot.png') -# tables.table_monthly_return_periods(ds,var='HS',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes_Weibull.csv') -# tables.table_directional_return_periods(ds,var='HS',periods=[1, 10, 100, 10000], units='m',var_dir = 'DIRM',distribution='Weibull3P_MOM', adjustment='NORSOK' ,output_file='directional_extremes_weibull.csv') -# plots.plot_monthly_return_periods(ds,var='HS',periods=[1, 10, 100],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes.png') -# plots.plot_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000 ],distribution='GUM', units='m',output_file='dir_extremes_GUM.png') -# plots.plot_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',output_file='dir_extremes_Weibull_norsok.png') -# plots.plot_polar_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',output_file='dir_extremes_Weibull_polar.png') -# tables.table_monthly_joint_distribution_Hs_Tp_return_values(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='monthly_Hs_Tp_joint_return_values.csv') -# tables.table_directional_joint_distribution_Hs_Tp_return_values(ds,var_hs='HS',var_tp='TP',var_dir='DIRM',periods=[1,10,100,1000],adjustment='NORSOK',output_file='directional_Hs_Tp_joint_return_values.csv') -# tables.table_Hs_Tpl_Tph_return_values(ds,var_hs='HS',var_tp='TP',periods=[1,10,100,10000],output_file='hs_tpl_tph_return_values.csv') -# plots.plot_tp_for_given_hs(ds, 'HS', 'TP',output_file='tp_for_given_hs.png') -# tables.table_tp_for_given_hs(ds, 'HS', 'TP',max_hs=20,output_file='tp_for_given_hs.csv') -# tables.table_tp_for_rv_hs(ds, var_hs='HS', var_tp='TP',periods=[1,10,100,10000],output_file='tp_for_rv_hs.csv') -# tables.table_wave_induced_current(ds, var_hs='HS',var_tp='TP',depth=200,ref_depth=200, spectrum = 'JONSWAP',output_file='JONSWAP_wave_induced_current_depth200.csv') -# tables.table_wave_induced_current(ds, var_hs='HS',var_tp='TP',depth=200,ref_depth=200, spectrum = 'TORSEHAUGEN',output_file='TORSEHAUGEN_wave_induced_current_depth200.csv') -# tables.table_hs_for_given_wind(ds, 'HS','W10', bin_width=2, max_wind=42, output_file='table_perc_hs_for_wind.csv') -# plots.plot_hs_for_given_wind(ds, 'HS', 'W10',output_file='hs_for_given_wind.png') -# tables.table_hs_for_rv_wind(ds, var_wind='W10', var_hs='HS',periods=[1,10,100,10000],output_file='hs_for_rv_wind.csv') -# df = tables.table_Hmax_crest_return_periods(ds,var_hs='HS', var_tp='TP', depth=200, periods=[1, 10, 100,10000],sea_state='long-crested') -# tables.table_directional_Hmax_return_periods(ds,var_hs='HS', var_tp = 'TP',var_dir='DIRM', periods=[10, 100,10000],adjustment='NORSOK', output_file='table_dir_Hmax_return_values.csv') - - -# Air Temperature: -#ds = air_temperature_correction_nora10(ds,var='T2m') # Temperature correction -#plots.plot_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM_L',method='minimum', units='°C',output_file='T2m_monthly_extremes_neg.png') -#tables.table_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM_L', method='minimum' ,units='°C',output_file='T2m_monthly_extremes_neg.csv') -#plots.plot_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM', method='maximum', units='°C',output_file='T2m_monthly_extremes_pos.png') -#tables.table_monthly_return_periods(ds,var='T2m',periods=[1, 10, 100],distribution='GUM', method='maximum' ,units='°C',output_file='T2m_monthly_extremes_pos.csv') -# plots.plot_monthly_stats(ds,var1='T2m',show=['Minimum','Mean','Maximum'], title = 'T2m', output_file='T2m_monthly_stats.png') -# tables.table_monthly_non_exceedance(ds,var1='T2m',step_var1=0.5,output_file='T2m_table_monthly_non_exceedance.csv') - - -# Currents: -#import pandas as pd -#ds_ocean = pd.read_csv('../tests/data/NorkystDA_test.csv',comment='#',index_col=0, parse_dates=True) -#depth = ['0m', '1m', '2.5m', '5m', '10m', '15m', '20m', '25m', '30m', '40m', '50m', '75m', '100m', '150m', '200m'] - -#ds_all = pd.concat([ds.loc['2017-01-02 00:00:00':'2018-12-31 21:00:00'], ds_ocean.resample('3h').mean()], axis=1) -#ds_all = ds_all.dropna(how='all') - - -#plots.plot_monthly_return_periods(ds_ocean,var='current_speed_0m',periods=[1, 10, 100],distribution='Weibull3P_MOM',method='POT',threshold='P99', units='m/s',output_file='csp0m_monthly_extremes.png') -#plots.var_rose(ds, f'current_direction_{depth}',f'current_speed_{depth}',max_perc=30,decimal_places=2, units='m/s', method='monthly', output_file='monthly_rose.png') -#plots.var_rose(ds,f'current_direction_{depth}',f'current_speed_{depth}',max_perc=30,decimal_places=2, units='m/s', method='overall', output_file='overall_rose.png') -#plots.plot_monthly_stats(ds,var1=f'current_speed_{depth}',show=['Mean','P99','Maximum'], title = f'Current[m/s], depth:{depth}', output_file=f'current{depth}_monthly_stats.png') -#plots.plot_directional_stats(ds,var1=f'current_speed_{depth}',var_dir=f'current_direction_{depth}',step_var1=0.05,show=['Mean','P99','Maximum'], title = f'Current[m/s], depth:{depth}', output_file=f'current{depth}_dir_stats.png') -#tables.table_directional_return_periods(ds,var=f'current_speed_{depth}',periods=[1, 10, 100, 10000], units='m/s',var_dir = f'current_direction_{depth}',distribution='Weibull', adjustment='NORSOK' ,output_file=f'directional_extremes_weibull_current{depth}.csv') -#tables.table_monthly_return_periods(ds,var=f'current_speed_{depth}',periods=[1, 10, 100, 10000], units='m/s',distribution='Weibull3P_MOM',method='POT',threshold='P99',output_file=f'monthly_extremes_weibull_current{depth}.csv') -#df = tables.table_monthly_return_periods(ds,var='HS',periods=[1, 10, 100, 10000], units='m',distribution='Weibull3P_MOM',method='POT',threshold='P99',output_file='HS_monthly_extremes_Weibull_POT_P99.csv') -#df = tables.table_monthly_return_periods(ds,var='HS',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_monthly_extremes_Weibull.csv') -#df = tables.table_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',output_file='HS_dir_extremes_Weibull.csv') -#df = tables.table_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM',method='POT',threshold='P99', units='m',output_file='HS_dir_extremes_Weibull_POT.csv') -# plots.plot_directional_return_periods(ds,var='HS',var_dir='DIRM',periods=[1, 10, 100, 10000],distribution='Weibull3P_MOM', units='m',adjustment='NORSOK',method='POT',threshold='P99',output_file='dir_extremes_Weibull_norsok.png') -# df = tables.table_profile_return_values(ds,var=['W10','W50','W80','W100','W150'], z=[10, 50, 80, 100, 150], periods=[1, 10, 100, 10000], output_file='RVE_wind_profile.csv') -#fig = plots.plot_profile_return_values(ds_ocean,var=['current_speed_' + d for d in depth], z=[float(d[:-1]) for d in depth], periods=[1, 10, 100, 10000],reverse_yaxis=True, output_file='RVE_current_profile.png') -#df = tables.table_current_for_given_wind(ds_all, var_curr='current_speed_0m', var_wind='W10', bin_width=2, max_wind=42, output_file='table_perc_current_for_wind.csv') -#plots.plot_current_for_given_wind(ds_all, var_curr='current_speed_0m', var_wind='W10',max_wind=40 ,output_file='curr_for_given_wind.png') -#df = tables.table_current_for_given_hs(ds_all, var_curr='current_speed_0m', var_hs='HS', bin_width=2, max_hs=20, output_file='table_perc_current_for_Hs.csv') -#ds['current_speed_0m'] = 0.05*ds['W10'] -#df = tables.table_current_for_given_wind(ds, var_curr='current_speed_0m', var_wind='W10', bin_width=2, max_wind=42, output_file='table_perc_current_for_wind.csv') -#df = tables.table_current_for_given_hs(ds, var_curr='current_speed_0m', var_hs='HS', bin_width=2, max_hs=20, output_file='table_perc_current_for_Hs.csv') - -#plots.plot_current_for_given_hs(ds_all, var_curr='current_speed_0m', var_hs='HS', max_hs=20, output_file='curr_for_given_hs.png') -#df = tables.table_extreme_current_profile_rv(ds_ocean, var=['current_speed_' + d for d in depth], z=[float(d[:-1]) for d in depth], periods=[1,100,1000],percentile=95, output_file='table_extreme_current_profile_rv.png') -#df = tables.table_profile_stats(ds_ocean, var=['current_speed_' + d for d in depth], z=[float(d[:-1]) for d in depth], var_dir=['current_direction_' + d for d in depth], output_file='table_profile_stats.csv') -#fig = plots.plot_profile_stats(ds_ocean,var=['current_speed_' + d for d in depth], z=[float(d[:-1]) for d in depth],reverse_yaxis=True, output_file='stats_current_profile.png') -#df = tables.table_current_for_rv_wind(ds_all, var_curr='current_speed_0m', var_wind='W10',periods=[1,10,100,10000],output_file='Uc_for_rv_wind.csv') -#df = tables.table_current_for_rv_hs(ds_all, var_curr='current_speed_0m', var_hs='HS',periods=[1,10,100,10000],output_file='Uc_for_rv_hs.csv') - - -# Sea temperature -# df = tables.table_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='mean', output_file='table_mean_temp_profile_monthly_stats.png') -# df = tables.table_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='std.dev', output_file='table_std_temp_profile_monthly_stats.png') -# df = tables.table_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='minimum', output_file='table_min_temp_profile_monthly_stats.png') -# df = tables.table_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='maximum', output_file='table_max_temp_profile_monthly_stats.png') - -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='mean',title='Mean Sea Temperature [°C]', output_file='plot_mean_temp_profile_monthly_stats.png') -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='std.dev',title='St.Dev Sea Temperature [°C]', output_file='plot_std_temp_profile_monthly_stats.png') -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='minimum',title='Min. Sea Temperature [°C]', output_file='plot_min_temp_profile_monthly_stats.png') -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['temp_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='maximum',title='Max. Sea Temperature [°C]', output_file='plot_max_temp_profile_monthly_stats.png') - -# # Sainity: -# df = tables.table_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='mean', output_file='table_mean_sal_profile_monthly_stats.png') -# df = tables.table_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='std.dev', output_file='table_std_sal_profile_monthly_stats.png') -# df = tables.table_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='minimum', output_file='table_min_sal_profile_monthly_stats.png') -# df = tables.table_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='maximum', output_file='table_max_sal_profile_monthly_stats.png') - -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='mean',title='Mean Salinity [PSU]', output_file='plot_mean_sal_profile_monthly_stats.png') -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='std.dev',title='St.Dev Salinity [PSU]', output_file='plot_std_sal_profile_monthly_stats.png') -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='minimum',title='Min. Salinity [PSU]', output_file='plot_min_sal_profile_monthly_stats.png') -# fig = plots.plot_profile_monthly_stats(ds_ocean, var=['salt_' + d for d in depth], z=[float(d[:-1]) for d in depth], method='maximum',title='Max. Salinity [PSU]', output_file='plot_max_sal_profile_monthly_stats.png') - - -# Water levels: -#ds_tide = pd.read_csv('../tests/data/GTSM_test.csv',comment='#',index_col=0, parse_dates=True) -#df = tables.table_tidal_levels(ds_tide, var='tide', output_file='tidal_levels.csv') -#fig = plots.plot_tidal_levels(ds_tide, var='tide',start_time='2010-01-01',end_time='2010-03-30', output_file='tidal_levels.png') -#df = tables.table_storm_surge_for_given_hs(ds_all, var_surge='zeta_0m', var_hs='HS', bin_width=1, max_hs=20, output_file='table_perc_surge_for_Hs.csv') -#fig = plots.plot_storm_surge_for_given_hs(ds,var_surge='zeta_0m', var_hs='HS', max_hs=20, output_file='surge_for_given_hs.png') -#df = tables.table_extreme_total_water_level(ds, var_hs='HS',var_tp='TP',var_surge='zeta_0m', var_tide='tide', periods=[100,10000], output_file='table_extreme_total_water_level.csv') -#df = tables.table_storm_surge_for_rv_hs(ds, var_hs='HS',var_tp='TP',var_surge='zeta_0m', var_tide='tide', periods=[1,10,100,10000],depth=200, output_file='table_storm_surge_for_rv_hs.csv') - -#print(df.shape) diff --git a/examples/example_report.py b/examples/example_report.py new file mode 100644 index 0000000..b805887 --- /dev/null +++ b/examples/example_report.py @@ -0,0 +1,63 @@ +from metocean_stats import plots, tables, stats, maps +from metocean_stats.stats.aux_funcs import * +from docx import Document +from docx.shared import Inches + + +# Read test data +ds = readNora10File('../tests/data/NORA_test.txt') +# Create a new Document +doc = Document() + + + +# Add introductory text +intro_text = "This is an automated metocean report produced by MET Norway." +doc.add_paragraph(intro_text) + + +# Add a title +doc.add_heading('Metocean Report', level=1) + + +# Add the map figure +output_file='map.png' +maps.plot_points_on_map(lon=[3.35], lat=[60.40],label=['NORA3'],bathymetry='NORA3',output_file=output_file) +doc.add_heading('Figure 1: The figure shows the NORA3 grid points selected for the analysis', level=2) +doc.add_picture(output_file, width=Inches(5)) + +output_file='wave_100yrs.png' +maps.plot_extreme_wave_map(return_period=100, product='NORA3', title='100-yr return values Hs (NORA3)', set_extent = [0,30,52,73],output_file=output_file) +doc.add_heading('Figure 2: 100-year return period for wind speed at 10 m and 100 m in the Nordics based on NORA3 (period: 1991-2020) using Generalized Pareto distribution', level=2) +doc.add_picture(output_file, width=Inches(5)) + +output_file='wind_100yrs.png' +maps.plot_extreme_wind_map(return_period=100, product='NORA3',z=10, title='100-yr return values Wind at 100 m (NORA3)', set_extent = [0,30,52,73], output_file=output_file) +doc.add_heading('Figure 3: 100-year return period for significant wave height in the Nordics based on NORA3 (period: 1991-2020) using Gumbel distribution', level=2) +doc.add_picture(output_file, width=Inches(5)) + + + +# Add the first table +df = tables.table_monthly_non_exceedance(ds,var= 'W10',step_var=2,output_file=None) +doc.add_heading('Table 1: Monthly non-exceedance', level=2) +table1 = doc.add_table(rows=df.shape[0] + 1, cols=df.shape[1]) +table1.style = 'Table Grid' + +# Add the header row for the first table +hdr_cells = table1.rows[0].cells +for i, column in enumerate(df.columns): + hdr_cells[i].text = column + +# Add the data rows for the first table +for i, row in df.iterrows(): + row_cells = table1.add_row().cells + for j, value in enumerate(row): + row_cells[j].text = str(value) + + + +# Save the document +doc.save('metocean-report.docx') + +print("Document created successfully.") diff --git a/metocean_stats/maps/__init__.py b/metocean_stats/maps/__init__.py new file mode 100644 index 0000000..2cd1062 --- /dev/null +++ b/metocean_stats/maps/__init__.py @@ -0,0 +1,10 @@ +""" +This is the Plots-module that contains functions for plotting. + +Copyright 2024, Konstantinos Christakos, MET Norway +""" + +from ..stats.general import * +from ..stats.dir import * +from ..stats.extreme import * +from .maps import * diff --git a/metocean_stats/maps/maps.py b/metocean_stats/maps/maps.py new file mode 100644 index 0000000..59818e0 --- /dev/null +++ b/metocean_stats/maps/maps.py @@ -0,0 +1,197 @@ +import numpy as np +import xarray as xr +import cartopy.crs as ccrs +import cartopy.feature as cfeature +from matplotlib.colors import ListedColormap +import matplotlib.pyplot as plt + +# Helper function to handle projection and coordinate transformation +def get_transformed_coordinates(ds, lon_var, lat_var, projection_type='rotated_pole'): + """ + Get transformed coordinates and the appropriate Cartopy projection based on the dataset. + + Parameters: + - ds: xarray.Dataset, the dataset to extract projection info from + - lon_var: str, the longitude variable name in the dataset + - lat_var: str, the latitude variable name in the dataset + - projection_type: str, type of projection to use ('rotated_pole' or 'lambert_conformal') + + Returns: + - transformed_lon: np.array, transformed longitudes + - transformed_lat: np.array, transformed latitudes + - proj: cartopy.crs.Projection, the projection object + """ + lon = ds[lon_var].values + lat = ds[lat_var].values + lon, lat = np.meshgrid(lon, lat) + + if projection_type == 'rotated_pole': + rotated_pole = ccrs.RotatedPole( + pole_latitude=ds.projection_ob_tran.grid_north_pole_latitude, + pole_longitude=ds.projection_ob_tran.grid_north_pole_longitude + ) + proj = ccrs.PlateCarree() + transformed_coords = proj.transform_points(rotated_pole, lon, lat) + elif projection_type == 'lambert_conformal': + lambert_params = ds['projection_lambert'].attrs + lambert_conformal = ccrs.LambertConformal( + central_longitude=lambert_params['longitude_of_central_meridian'], + central_latitude=lambert_params['latitude_of_projection_origin'], + standard_parallels=lambert_params.get('standard_parallel', None), + globe=ccrs.Globe(ellipse='sphere', semimajor_axis=lambert_params.get('earth_radius', 6371000.0)) + ) + proj = ccrs.PlateCarree() + transformed_coords = proj.transform_points(lambert_conformal, lon, lat) + else: + raise ValueError(f"Unknown projection type: {projection_type}") + + transformed_lon = transformed_coords[..., 0] + transformed_lat = transformed_coords[..., 1] + return transformed_lon, transformed_lat, proj + +# Function to plot points on the map +def plot_points_on_map(lon, lat, label, bathymetry='NORA3', output_file='map.png'): + lon_list = lon if isinstance(lon, (list, tuple)) else [lon] + lat_list = lat if isinstance(lat, (list, tuple)) else [lat] + label_list = label if isinstance(label, (list, tuple)) else [label] + + if bathymetry == 'NORA3': + ds = xr.open_dataset('https://thredds.met.no/thredds/dodsC/windsurfer/mywavewam3km_files/2022/12/20221231_MyWam3km_hindcast.nc') + standard_lon, standard_lat, _ = get_transformed_coordinates(ds, 'rlon', 'rlat', projection_type='rotated_pole') + depth = ds['model_depth'].values + else: + pass + + # Plotting + fig = plt.figure(figsize=(10, 8)) + ax = plt.axes(projection=ccrs.PlateCarree()) + + for lon, lat, label in zip(lon_list, lat_list, label_list): + ax.plot(lon, lat, marker='o', markersize=8, linewidth=0, label=f'{label}', transform=ccrs.PlateCarree()) + + lat_min, lat_max = max(min(lat_list) - 3, -90), min(max(lat_list) + 3, 90) + lon_min, lon_max = max(min(lon_list) - 5, -180), min(max(lon_list) + 5, 180) + + ax.set_extent([lon_min, lon_max, lat_min, lat_max], crs=ccrs.PlateCarree()) + coast = cfeature.NaturalEarthFeature(category='physical', name='coastline', scale='10m', edgecolor='lightgrey', facecolor='darkkhaki') + ax.add_feature(coast) + + if bathymetry == 'NORA3': + mask_extent = ( + (standard_lon >= lon_min) & (standard_lon <= lon_max) & + (standard_lat >= lat_min) & (standard_lat <= lat_max) + ) + depth_extent = np.ma.masked_where(~mask_extent, depth) + cont = ax.contourf(standard_lon, standard_lat, depth_extent, levels=30, + cmap='binary', transform=ccrs.PlateCarree()) + cbar = plt.colorbar(cont, orientation='vertical', pad=0.02, aspect=16, shrink=0.6) + cbar.set_label('Depth [m]') + else: + pass + + gl = ax.gridlines(draw_labels=True) + gl.top_labels = False + gl.right_labels = False + ax.legend(loc='upper left') + + plt.tight_layout() + plt.savefig(output_file) + plt.close() + return fig + +# Function to plot extreme wave map +def plot_extreme_wave_map(return_period=50, product='NORA3', title='empty title', set_extent=[0, 30, 52, 73], output_file='extreme_wave_map.png'): + if product == 'NORA3': + ds = xr.open_dataset(f'https://thredds.met.no/thredds/dodsC/nora3_subset_stats/wave/CF_hs_{return_period}y_extreme_gumbel_NORA3.nc') + standard_lon, standard_lat, _ = get_transformed_coordinates(ds, 'rlon', 'rlat', projection_type='rotated_pole') + hs_flat = ds['hs'].values.flatten() + else: + print(product, 'is not available') + return + + mask = ~np.isnan(hs_flat) + hs_flat = hs_flat[mask] + lon_flat = standard_lon.flatten()[mask] + lat_flat = standard_lat.flatten()[mask] + + cmap = ListedColormap(plt.cm.get_cmap('coolwarm', 25)(np.linspace(0, 1, 25))) + + fig = plt.figure(figsize=(9, 10)) + ax = plt.axes(projection=ccrs.LambertConformal(central_longitude=15)) + ax.coastlines(resolution='10m', zorder=3) + ax.add_feature(cfeature.BORDERS, linestyle=':', zorder=3) + + hb = ax.hexbin(lon_flat, lat_flat, C=hs_flat, gridsize=180, cmap=cmap, vmin=0, vmax=25, edgecolors='white', transform=ccrs.PlateCarree(), reduce_C_function=np.mean, zorder=1) + ax.add_feature(cfeature.LAND, color='darkkhaki', zorder=2) + + ax.set_extent(set_extent, crs=ccrs.PlateCarree()) + gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, x_inline=False, y_inline=False, linewidth=0.5, color='black', alpha=1, linestyle='--') + gl.right_labels = False + gl.top_labels = False + + cb = plt.colorbar(hb, ax=ax, orientation='vertical', shrink=0.7, pad=0.05) + cb.set_label(ds['hs'].attrs.get('standard_name', 'hs') + ' [' + ds['hs'].attrs.get('units', 'm') + ']', fontsize=14) + plt.tight_layout() + + if title: + plt.title(title, fontsize=16) + plt.savefig(output_file, dpi=300) + return fig + +# Function to plot extreme wind map +def plot_extreme_wind_map(return_period=50, product='NORA3', z=0, title='empty title', set_extent=[0, 30, 52, 73], output_file='extreme_wind_map.png'): + """ + Plots an extreme wind speed map based on the specified return level and dataset. + + Parameters: + - return_period: int, optional, default=50 + The return period in years (e.g., 50 years) for the extreme wind speed. + - product: str, optional, default='NORA3' + The dataset to use for the wind speed data. Currently only 'NORA3' is supported. + - title: str, optional, default='empty title' + The title to display on the map. + - set_extent: list, optional, default=[0, 30, 52, 73] + The map extent specified as [lon_min, lon_max, lat_min, lat_max]. + - output_file: str, optional, default='extreme_wind_map.png' + The filename to save the plot. + + Returns: + - fig: matplotlib.figure.Figure + The figure object containing the plot. + """ + if product == 'NORA3': + ds = xr.open_dataset(f'https://thredds.met.no/thredds/dodsC/nora3_subset_stats/atm/CF_Return_Levels_3hourly_WindSpeed_6heights_1991_2020_zlev_period{return_period}yr.nc') + standard_lon, standard_lat, _ = get_transformed_coordinates(ds, 'x', 'y', projection_type='lambert_conformal') + hs_flat = ds['wind_speed'].sel(z=z).values.flatten() + else: + print(product, 'is not available') + return + + mask = ~np.isnan(hs_flat) + hs_flat = hs_flat[mask] + lon_flat = standard_lon.flatten()[mask] + lat_flat = standard_lat.flatten()[mask] + + cmap = ListedColormap(plt.cm.get_cmap('terrain', int(70/2.5))(np.linspace(0, 1, int(70/2.5)))) + + fig = plt.figure(figsize=(9, 10)) + ax = plt.axes(projection=ccrs.LambertConformal(central_longitude=15)) + ax.coastlines(resolution='10m', zorder=3) + ax.add_feature(cfeature.BORDERS, linestyle=':', zorder=3) + + hb = ax.hexbin(lon_flat, lat_flat, C=hs_flat, gridsize=180, cmap=cmap, vmin=0, vmax=70, edgecolors='white', transform=ccrs.PlateCarree(), reduce_C_function=np.mean, zorder=1) + ax.add_feature(cfeature.LAND, color='darkkhaki', zorder=2) + + ax.set_extent(set_extent, crs=ccrs.PlateCarree()) + gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, x_inline=False, y_inline=False, linewidth=0.5, color='black', alpha=1, linestyle='--') + gl.right_labels = False + gl.top_labels = False + + cb = plt.colorbar(hb, ax=ax, orientation='vertical', shrink=0.7, pad=0.05) + cb.set_label(ds['wind_speed'].attrs.get('standard_name', 'wind_speed') + ' [' + ds['wind_speed'].attrs.get('units', 'm/s') + ']', fontsize=14) + plt.tight_layout() + + if title: + plt.title(title, fontsize=16) + plt.savefig(output_file, dpi=300) + return fig diff --git a/metocean_stats/plots/dir.py b/metocean_stats/plots/dir.py index 7ae6327..eb3609a 100644 --- a/metocean_stats/plots/dir.py +++ b/metocean_stats/plots/dir.py @@ -15,7 +15,10 @@ def rose(wd,ws,max_ws,step_ws,min_percent, max_percent, step_percent): return fig -def var_rose(data, direction,intensity, method='overall',max_perc=40,decimal_places=1, units='m/s',single_figure=True, output_file='rose.png'): +def var_rose(data, var_dir,var, method='overall',max_perc=40,decimal_places=1, units='m/s',single_figure=True, output_file='rose.png'): + + direction = var_dir + intensity = var direction2 = data[direction] intensity2 = data[intensity] diff --git a/metocean_stats/plots/extreme.py b/metocean_stats/plots/extreme.py index e94a46f..aeb7bad 100644 --- a/metocean_stats/plots/extreme.py +++ b/metocean_stats/plots/extreme.py @@ -270,7 +270,7 @@ def plot_multi_diagnostic_return_levels(data, var, if output_file is not None: plt.savefig(output_file) - plt.show() + return fig def plot_threshold_sensitivity(df, output_file): """ @@ -300,8 +300,7 @@ def plot_threshold_sensitivity(df, output_file): # Save the plot if a path is given plt.savefig(output_file) - plt.show() - + return fig def plot_joint_2D_contour(data=pd.DataFrame,var1='hs', var2='tp', periods=[50,100], output_file='2D_contours.png') -> Sequence[Dict]: contours, data_2D = get_joint_2D_contour(data=data,var1=var1,var2=var2, periods=periods) diff --git a/metocean_stats/plots/general.py b/metocean_stats/plots/general.py index d91ca02..ee2b360 100644 --- a/metocean_stats/plots/general.py +++ b/metocean_stats/plots/general.py @@ -160,14 +160,14 @@ def plot_scatter(df,var1,var2,var1_units='m', var2_units='m',title=' ',regressio return fig -def plot_monthly_stats(data: pd.DataFrame, var1: str, show=['Mean','P99','Maximum'], title: str='Variable [units] location',output_file: str = 'monthly_stats.png'): +def plot_monthly_stats(data: pd.DataFrame, var: str, show=['Mean','P99','Maximum'], title: str='Variable [units] location',output_file: str = 'monthly_stats.png'): """ Plot monthly statistics of a variable from a DataFrame. Parameters: data (pd.DataFrame): The DataFrame containing the data. - var1 (str): The name of the variable to plot. - step_var1 (float): The step size for computing cumulative statistics. + var (str): The name of the variable to plot. + step_var (float): The step size for computing cumulative statistics. title (str, optional): Title of the plot. Default is 'Variable [units] location'. output_file (str, optional): File path to save the plot. Default is 'monthly_stats.png'. @@ -182,7 +182,7 @@ def plot_monthly_stats(data: pd.DataFrame, var1: str, show=['Mean','P99','Maximu plot_monthly_stats(data, 'temperature', 0.1, title='Monthly Temperature Statistics', output_file='temp_stats.png') """ fig, ax = plt.subplots() - cumulative_percentage = table_monthly_non_exceedance(data,var1,step_var1=0.5) + cumulative_percentage = table_monthly_non_exceedance(data,var,step_var=0.5) for i in show: cumulative_percentage.loc[i][:-1].plot(marker = 'o') plt.title(title,fontsize=16) @@ -192,14 +192,14 @@ def plot_monthly_stats(data: pd.DataFrame, var1: str, show=['Mean','P99','Maximu plt.savefig(output_file) return fig -def plot_directional_stats(data: pd.DataFrame, var1: str, step_var1: float, var_dir: str,show=['Mean','P99','Maximum'], title: str='Variable [units] location',output_file: str = 'directional_stats.png'): +def plot_directional_stats(data: pd.DataFrame, var: str, step_var: float, var_dir: str,show=['Mean','P99','Maximum'], title: str='Variable [units] location',output_file: str = 'directional_stats.png'): """ Plot directional statistics of a variable from a DataFrame. Parameters: data (pd.DataFrame): The DataFrame containing the data. - var1 (str): The name of the variable to plot. - step_var1 (float): The step size for computing cumulative statistics. + var (str): The name of the variable to plot. + step_var (float): The step size for computing cumulative statistics. var_dir (str): The name of the directional variable. title (str, optional): Title of the plot. Default is 'Variable [units] location'. output_file (str, optional): File path to save the plot. Default is 'directional_stats.png'. @@ -215,7 +215,7 @@ def plot_directional_stats(data: pd.DataFrame, var1: str, step_var1: float, var_ plot_directional_stats(data, 'hs', 0.1, 'Pdir', title='Directional Wave Statistics', output_file='directional_hs_stats.png') """ fig, ax = plt.subplots() - cumulative_percentage = table_directional_non_exceedance(data,var1,step_var1,var_dir) + cumulative_percentage = table_directional_non_exceedance(data,var,step_var,var_dir) for i in show: cumulative_percentage.loc[i][:-1].plot(marker = 'o') #cumulative_percentage.loc['Maximum'][:-1].plot(marker = 'o') diff --git a/metocean_stats/stats/map_funcs.py b/metocean_stats/stats/map_funcs.py deleted file mode 100644 index 93feca5..0000000 --- a/metocean_stats/stats/map_funcs.py +++ /dev/null @@ -1,275 +0,0 @@ -import numpy as np -import xarray as xr -import cartopy.crs as ccrs -import cartopy.feature as cfeature -from matplotlib.colors import ListedColormap -import matplotlib.pyplot as plt - -def plot_points_on_map(lon, lat, label, bathymetry='NORA3',output_file='map.png'): - lon_list = lon - lat_list = lat - label_list = label - - # Ensure lon_list and lat_list are iterable (lists or arrays) - if not isinstance(lon_list, (list, tuple)): - lon_list = [lon_list] - if not isinstance(lat_list, (list, tuple)): - lat_list = [lat_list] - if not isinstance(label_list, (list, tuple)): - label_list = [label_list] - - # Extract the data variables - if bathymetry == 'NORA3': - var = 'model_depth' - file = 'https://thredds.met.no/thredds/dodsC/windsurfer/mywavewam3km_files/2022/12/20221231_MyWam3km_hindcast.nc' - ds = xr.open_dataset(file) - depth = ds[var].values - rlat = ds['rlat'].values - rlon = ds['rlon'].values - - # Get the rotated pole parameters from the dataset attributes - rotated_pole = ccrs.RotatedPole( - pole_latitude=ds.projection_ob_tran.grid_north_pole_latitude, - pole_longitude=ds.projection_ob_tran.grid_north_pole_longitude - ) - - # Create meshgrid for lat/lon - lon, lat = np.meshgrid(rlon, rlat) - - # Transform rotated coordinates to standard lat/lon coordinates - proj = ccrs.PlateCarree() - transformed_coords = proj.transform_points(rotated_pole, lon, lat) - standard_lon = transformed_coords[..., 0] - standard_lat = transformed_coords[..., 1] - else: - pass - - # Plotting - fig = plt.figure(figsize=(10, 8)) - ax = plt.axes(projection=ccrs.PlateCarree()) - # Plot the points with automatic labels - for i, (lon, lat, label) in enumerate(zip(lon_list, lat_list, label_list)): - ax.plot(lon, lat, marker='o', markersize=8, linewidth=0, label=f'{label}', transform=ccrs.PlateCarree()) - - # Calculate zoom-in extent - lat_min = max(min(lat_list) - 3, -90) # Ensure latitude does not go below -90 - lat_max = min(max(lat_list) + 3, 90) # Ensure latitude does not go above 90 - lon_min = max(min(lon_list) - 5, -180) # Ensure longitude does not go below -180 - lon_max = min(max(lon_list) + 5, 180) # Ensure longitude does not go above 180 - - # Set extent of the map - ax.set_extent([lon_min, lon_max, lat_min, lat_max], crs=ccrs.PlateCarree()) - # Add coastlines and other features - # Add high-resolution coastlines with grey facecolor for land - coast = cfeature.NaturalEarthFeature( - category='physical', name='coastline', scale='10m', edgecolor='lightgrey', facecolor='darkkhaki') - ax.add_feature(coast) - - # Plot depth - if bathymetry == 'NORA3': - # Mask depth data to the zoomed extent - mask_extent = ( - (standard_lon >= lon_min) & (standard_lon <= lon_max) & - (standard_lat >= lat_min) & (standard_lat <= lat_max) - ) - depth_extent = np.ma.masked_where(~mask_extent, depth) - - # Plot depth with masked data - cont = ax.contourf(standard_lon, standard_lat, depth_extent, levels=30, - cmap='binary', transform=ccrs.PlateCarree()) - - # Add colorbar with limits based on the depth in the zoomed extent - cbar = plt.colorbar(cont, orientation='vertical', pad=0.02, aspect=16, shrink=0.6) - cbar.set_label('Depth [m]') - else: - pass - - # Add gridlines - gl = ax.gridlines(draw_labels=True) - gl.top_labels = False - gl.right_labels = False - - # Add legend - ax.legend(loc='upper left') - - plt.tight_layout() - plt.savefig(output_file) - plt.close() - return fig - -def plot_extreme_wave_map(return_level=50, product='NORA3', title='empty title', set_extent = [0,30,52,73], output_file='extreme_wave_map.png'): - if product == 'NORA3': - lon = 'rlon' - lat = 'rlat' - var = 'hs' - num_colors= 25 - file = f'https://thredds.met.no/thredds/dodsC/nora3_subset_stats/wave/CF_hs_{return_level}y_extreme_gumbel_NORA3.nc' - else: - print(product,'is not available') - - ds = xr.open_dataset(file) - # Extract the data variables - hs = ds[var].values - rlat = ds[lat].values - rlon = ds[lon].values - - # Get the rotated pole parameters from the dataset attributes - rotated_pole = ccrs.RotatedPole( - pole_latitude=ds.projection_ob_tran.grid_north_pole_latitude, - pole_longitude=ds.projection_ob_tran.grid_north_pole_longitude - ) - - # Create meshgrid for lat/lon - lon, lat = np.meshgrid(rlon, rlat) - - # Transform rotated coordinates to standard lat/lon coordinates - proj = ccrs.PlateCarree() - transformed_coords = proj.transform_points(rotated_pole, lon, lat) - standard_lon = transformed_coords[..., 0] - standard_lat = transformed_coords[..., 1] - - # Flatten the arrays - hs_flat = hs.flatten() - lat_flat = standard_lat.flatten() - lon_flat = standard_lon.flatten() - - # Create a mask for NaN values - mask = ~np.isnan(hs_flat) - hs_flat = hs_flat[mask] - lat_flat = lat_flat[mask] - lon_flat = lon_flat[mask] - - # Create a colormap with discrete colors - num_colors = num_colors # Number of discrete colors - cmap = plt.cm.get_cmap('coolwarm', num_colors) - cmap = ListedColormap(cmap(np.linspace(0, 1, num_colors))) - - # Create the plot - fig = plt.figure(figsize=(9, 10)) - ax = plt.axes(projection=ccrs.LambertConformal(central_longitude=15)) - ax.coastlines(resolution='10m', zorder=3) - ax.add_feature(cfeature.BORDERS, linestyle=':', zorder=3) - - # Plot the significant wave height using hexbin with a higher gridsize and discrete colors - hb = ax.hexbin(lon_flat, lat_flat, C=hs_flat, gridsize=180, cmap=cmap, vmin=0, vmax=num_colors, edgecolors='white', transform=ccrs.PlateCarree(), reduce_C_function=np.mean, zorder=1) - - # Add the land feature on top of the hexbin plot - ax.add_feature(cfeature.LAND, color='darkkhaki', zorder=2) - - # Add gridlines for latitude and longitude - coast=cfeature.NaturalEarthFeature(category='physical', name='coastline', scale='10m', edgecolor='lightgrey', facecolor='darkkhaki') - ax.add_feature(coast) - - ax.set_extent(set_extent, crs=ccrs.PlateCarree()) - gl=ax.gridlines(crs=ccrs.PlateCarree(),draw_labels=True,x_inline=False,y_inline=False,linewidth=0.5,color='black',alpha=1,linestyle='--') - gl.xlabel_style = {'size': 12} - gl.ylabel_style = {'size': 12} - gl.right_labels=False - gl.left_labels=True - gl.top_labels=False - gl.bottom_labels=True - gl.rotate_labels=False - - # Add colorbar with discrete colors - cb = plt.colorbar(hb, ax=ax, orientation='vertical', shrink=0.7, pad=0.05, ticks=np.arange(0, num_colors, 2)) - cb.set_label(ds[var].attrs.get('standard_name', var)+' ['+ds[var].attrs.get('units', var)+']', fontsize=14) - plt.tight_layout() - - if title is None: - pass - else: - plt.title(title, fontsize=16) - plt.savefig(output_file,dpi=300) - return fig - -def plot_extreme_wind_map(return_level=50, product='NORA3', z=0, title='empty title', set_extent=[0, 30, 52, 73],output_file='extreme_wind_map.png'): - if product == 'NORA3': - lon = 'x' - lat = 'y' - var = 'wind_speed' - num_colors= int(70/2.5) - file= f'https://thredds.met.no/thredds/dodsC/nora3_subset_stats/atm/CF_Return_Levels_3hourly_WindSpeed_6heights_1991_2020_zlev_period{return_level}yr.nc' - height = [] - else: - print(product,'is not available') - - ds = xr.open_dataset(file) - # Extract the data variables - hs = ds[var].sel(z=z).values - - rlat = ds[lat].values - rlon = ds[lon].values - - - # Get the Lambert conformal parameters from the dataset attributes - lambert_params = ds['projection_lambert'].attrs - rotated_pole = ccrs.LambertConformal( - central_longitude=lambert_params['longitude_of_central_meridian'], - central_latitude=lambert_params['latitude_of_projection_origin'], - standard_parallels=lambert_params.get('standard_parallel', None), - globe=ccrs.Globe(ellipse='sphere', semimajor_axis=lambert_params.get('earth_radius', 6371000.0)) - ) - - # Create meshgrid for lat/lon - lon, lat = np.meshgrid(rlon, rlat) - - # Transform rotated coordinates to standard lat/lon coordinates - proj = ccrs.PlateCarree() - transformed_coords = proj.transform_points(rotated_pole, lon, lat) - standard_lon = transformed_coords[..., 0] - standard_lat = transformed_coords[..., 1] - - # Flatten the arrays - hs_flat = hs.flatten() - lat_flat = standard_lat.flatten() - lon_flat = standard_lon.flatten() - - # Create a mask for NaN values - mask = ~np.isnan(hs_flat) - hs_flat = hs_flat[mask] - # Remove values greater than 70 - #hs_flat = hs_flat[hs_flat <= 70] - lat_flat = lat_flat[mask] - lon_flat = lon_flat[mask] - #lat_flat = lat_flat[hs_flat <= 70] - #lon_flat = lon_flat[hs_flat <= 70] - - - # Create a colormap with discrete colors - cmap = plt.cm.get_cmap('terrain', num_colors) - cmap = ListedColormap(cmap(np.linspace(0, 1, num_colors))) - - # Create the plot - fig = plt.figure(figsize=(9, 10)) - ax = plt.axes(projection=ccrs.LambertConformal(central_longitude=15)) - ax.coastlines(resolution='10m', zorder=3) - ax.add_feature(cfeature.BORDERS, linestyle=':', zorder=3) - # Plot the significant wave height using hexbin with a higher gridsize and discrete colors -# hb = ax.hexbin(lon_flat, lat_flat, C=hs_flat, gridsize=100, cmap=cmap, vmin=hs_flat.min(), vmax=hs_flat.max(), edgecolors='white', transform=ccrs.PlateCarree(), reduce_C_function=np.mean, zorder=1) - hb = ax.hexbin(lon_flat, lat_flat, C=hs_flat, gridsize=100, cmap=cmap, vmin=0, vmax=70, edgecolors='white', transform=ccrs.PlateCarree(), reduce_C_function=np.mean, zorder=1) - - # Add the land feature on top of the hexbin plot - ax.add_feature(cfeature.LAND, color='darkkhaki', zorder=2) - - # Add gridlines for latitude and longitude - coast = cfeature.NaturalEarthFeature(category='physical', name='coastline', scale='10m', edgecolor='lightgrey', facecolor='darkkhaki') - ax.add_feature(coast) - - ax.set_extent(set_extent, crs=ccrs.PlateCarree()) - gl = ax.gridlines(crs=ccrs.PlateCarree(), draw_labels=True, x_inline=False, y_inline=False, linewidth=0.5, color='black', alpha=1, linestyle='--') - gl.xlabel_style = {'size': 12} - gl.ylabel_style = {'size': 12} - gl.right_labels = False - gl.left_labels = True - gl.top_labels = False - gl.bottom_labels = True - gl.rotate_labels = False - - # Add colorbar with discrete colors - cb = plt.colorbar(hb, ax=ax, orientation='vertical', shrink=0.7, pad=0.05) - cb.set_label(ds[var].attrs.get('standard_name', var)+' ['+ds[var].attrs.get('units', var)+']', fontsize=14) - plt.tight_layout() - if title is not None: - plt.title(title, fontsize=16) - plt.savefig(output_file, dpi=300) - return fig diff --git a/metocean_stats/tables/general.py b/metocean_stats/tables/general.py index 6dff2f2..6b565b3 100644 --- a/metocean_stats/tables/general.py +++ b/metocean_stats/tables/general.py @@ -256,14 +256,14 @@ def table_monthly_min_mean_max(data, var,output_file='montly_min_mean_max.txt') return -def table_monthly_non_exceedance(data: pd.DataFrame, var1: str, step_var1: float, output_file: str = None): +def table_monthly_non_exceedance(data: pd.DataFrame, var: str, step_var: float, output_file: str = None): """ Calculate monthly non-exceedance table for a given variable. Parameters: data (pd.DataFrame): Input DataFrame containing the data. - var1 (str): Name of the column in the DataFrame representing the variable. - step_var1 (float): Step size for binning the variable. + var (str): Name of the column in the DataFrame representing the variable. + step_var (float): Step size for binning the variable. output_file (str, optional): File path to save the output CSV file. Default is None. Returns: @@ -271,14 +271,14 @@ def table_monthly_non_exceedance(data: pd.DataFrame, var1: str, step_var1: float """ # Define bins - bins = np.arange(int(data[var1].min()), data[var1].max() + step_var1, step_var1).tolist() + bins = np.arange(int(data[var].min()), data[var].max() + step_var, step_var).tolist() labels = [f'<{num}' for num in [round(bin, 2) for bin in bins]] # Categorize data into bins - data[var1+'-level'] = pd.cut(data[var1], bins=bins, labels=labels[1:]) + data[var+'-level'] = pd.cut(data[var], bins=bins, labels=labels[1:]) - # Group by month and var1 bin, then count occurrences - grouped = data.groupby([data.index.month, var1+'-level'], observed=True).size().unstack(fill_value=0) + # Group by month and var bin, then count occurrences + grouped = data.groupby([data.index.month, var+'-level'], observed=True).size().unstack(fill_value=0) # Calculate percentage of time each data bin occurs in each month @@ -288,29 +288,29 @@ def table_monthly_non_exceedance(data: pd.DataFrame, var1: str, step_var1: float cumulative_percentage = percentage_by_month.T.cumsum() # Insert 'Annual', 'Mean', 'P99', 'Maximum' rows - cumulative_percentage.loc['Minimum'] = data.groupby(data.index.month, observed=True)[var1].min() - cumulative_percentage.loc['Mean'] = data.groupby(data.index.month, observed=True)[var1].mean() - cumulative_percentage.loc['P50'] = data.groupby(data.index.month, observed=True)[var1].quantile(0.50) - cumulative_percentage.loc['P75'] = data.groupby(data.index.month, observed=True)[var1].quantile(0.75) - cumulative_percentage.loc['P95'] = data.groupby(data.index.month, observed=True)[var1].quantile(0.95) - cumulative_percentage.loc['P99'] = data.groupby(data.index.month, observed=True)[var1].quantile(0.99) - cumulative_percentage.loc['Maximum'] = data.groupby(data.index.month, observed=True)[var1].max() + cumulative_percentage.loc['Minimum'] = data.groupby(data.index.month, observed=True)[var].min() + cumulative_percentage.loc['Mean'] = data.groupby(data.index.month, observed=True)[var].mean() + cumulative_percentage.loc['P50'] = data.groupby(data.index.month, observed=True)[var].quantile(0.50) + cumulative_percentage.loc['P75'] = data.groupby(data.index.month, observed=True)[var].quantile(0.75) + cumulative_percentage.loc['P95'] = data.groupby(data.index.month, observed=True)[var].quantile(0.95) + cumulative_percentage.loc['P99'] = data.groupby(data.index.month, observed=True)[var].quantile(0.99) + cumulative_percentage.loc['Maximum'] = data.groupby(data.index.month, observed=True)[var].max() cumulative_percentage['Year'] = cumulative_percentage.mean(axis=1)[:-6] - #cumulative_percentage['Year'].iloc[-7] = data[var1].min() - #cumulative_percentage['Year'].iloc[-6] = data[var1].mean() - #cumulative_percentage['Year'].iloc[-5] = data[var1].quantile(0.50) - #cumulative_percentage['Year'].iloc[-4] = data[var1].quantile(0.75) - #cumulative_percentage['Year'].iloc[-3] = data[var1].quantile(0.95) - #cumulative_percentage['Year'].iloc[-2] = data[var1].quantile(0.99) - #cumulative_percentage['Year'].iloc[-1] = data[var1].max() - - cumulative_percentage.loc[cumulative_percentage.index[-7], 'Year'] = data[var1].min() - cumulative_percentage.loc[cumulative_percentage.index[-6], 'Year'] = data[var1].mean() - cumulative_percentage.loc[cumulative_percentage.index[-5], 'Year'] = data[var1].quantile(0.50) - cumulative_percentage.loc[cumulative_percentage.index[-4], 'Year'] = data[var1].quantile(0.75) - cumulative_percentage.loc[cumulative_percentage.index[-3], 'Year'] = data[var1].quantile(0.95) - cumulative_percentage.loc[cumulative_percentage.index[-2], 'Year'] = data[var1].quantile(0.99) - cumulative_percentage.loc[cumulative_percentage.index[-1], 'Year'] = data[var1].max() + #cumulative_percentage['Year'].iloc[-7] = data[var].min() + #cumulative_percentage['Year'].iloc[-6] = data[var].mean() + #cumulative_percentage['Year'].iloc[-5] = data[var].quantile(0.50) + #cumulative_percentage['Year'].iloc[-4] = data[var].quantile(0.75) + #cumulative_percentage['Year'].iloc[-3] = data[var].quantile(0.95) + #cumulative_percentage['Year'].iloc[-2] = data[var].quantile(0.99) + #cumulative_percentage['Year'].iloc[-1] = data[var].max() + + cumulative_percentage.loc[cumulative_percentage.index[-7], 'Year'] = data[var].min() + cumulative_percentage.loc[cumulative_percentage.index[-6], 'Year'] = data[var].mean() + cumulative_percentage.loc[cumulative_percentage.index[-5], 'Year'] = data[var].quantile(0.50) + cumulative_percentage.loc[cumulative_percentage.index[-4], 'Year'] = data[var].quantile(0.75) + cumulative_percentage.loc[cumulative_percentage.index[-3], 'Year'] = data[var].quantile(0.95) + cumulative_percentage.loc[cumulative_percentage.index[-2], 'Year'] = data[var].quantile(0.99) + cumulative_percentage.loc[cumulative_percentage.index[-1], 'Year'] = data[var].max() # Round 2 decimals cumulative_percentage = round(cumulative_percentage,2) @@ -338,14 +338,14 @@ def table_monthly_non_exceedance(data: pd.DataFrame, var1: str, step_var1: float return cumulative_percentage -def table_directional_non_exceedance(data: pd.DataFrame, var1: str, step_var1: float, var_dir: str, output_file: str = None): +def table_directional_non_exceedance(data: pd.DataFrame, var: str, step_var: float, var_dir: str, output_file: str = None): """ Calculate directional non-exceedance table for a given variable. Parameters: data (pd.DataFrame): Input DataFrame containing the data. - var1 (str): Name of the column in the DataFrame representing the variable. - step_var1 (float): Step size for binning the variable. + var (str): Name of the column in the DataFrame representing the variable. + step_var (float): Step size for binning the variable. var_dir (str): Name of the column in the DataFrame representing the direction. output_file (str, optional): File path to save the output CSV file. Default is None. @@ -354,39 +354,39 @@ def table_directional_non_exceedance(data: pd.DataFrame, var1: str, step_var1: f """ # Define bins - bins = np.arange(0, data[var1].max() + step_var1, step_var1).tolist() + bins = np.arange(0, data[var].max() + step_var, step_var).tolist() labels = [f'<{num}' for num in [round(bin, 2) for bin in bins]] add_direction_sector(data=data,var_dir=var_dir) # Categorize data into bins - data[var1+'-level'] = pd.cut(data[var1], bins=bins, labels=labels[1:]) + data[var+'-level'] = pd.cut(data[var], bins=bins, labels=labels[1:]) data = data.sort_values(by='direction_sector') data = data.set_index('direction_sector') data.index.name = 'direction_sector' - # Group by direction and var1 bin, then count occurrences - # Calculate percentage of time each var1 bin occurs in each month - percentage_by_dir = 100*data.groupby([data.index, var1+'-level'], observed=True)[var1].count().unstack()/len(data[var1]) + # Group by direction and var bin, then count occurrences + # Calculate percentage of time each var bin occurs in each month + percentage_by_dir = 100*data.groupby([data.index, var+'-level'], observed=True)[var].count().unstack()/len(data[var]) cumulative_percentage = np.cumsum(percentage_by_dir,axis=1).T cumulative_percentage = cumulative_percentage.fillna(method='ffill') # Calculate cumulative percentage for each bin across all months # Insert 'Omni', 'Mean', 'P99', 'Maximum' rows - cumulative_percentage.loc['Mean'] = data.groupby(data.index, observed=True)[var1].mean() - cumulative_percentage.loc['P50'] = data.groupby(data.index, observed=True)[var1].quantile(0.50) - cumulative_percentage.loc['P75'] = data.groupby(data.index, observed=True)[var1].quantile(0.75) - cumulative_percentage.loc['P95'] = data.groupby(data.index, observed=True)[var1].quantile(0.95) - cumulative_percentage.loc['P99'] = data.groupby(data.index, observed=True)[var1].quantile(0.99) - cumulative_percentage.loc['Maximum'] = data.groupby(data.index, observed=True)[var1].max() + cumulative_percentage.loc['Mean'] = data.groupby(data.index, observed=True)[var].mean() + cumulative_percentage.loc['P50'] = data.groupby(data.index, observed=True)[var].quantile(0.50) + cumulative_percentage.loc['P75'] = data.groupby(data.index, observed=True)[var].quantile(0.75) + cumulative_percentage.loc['P95'] = data.groupby(data.index, observed=True)[var].quantile(0.95) + cumulative_percentage.loc['P99'] = data.groupby(data.index, observed=True)[var].quantile(0.99) + cumulative_percentage.loc['Maximum'] = data.groupby(data.index, observed=True)[var].max() cumulative_percentage['Omni'] = cumulative_percentage.sum(axis=1)[:-6] - cumulative_percentage.loc[cumulative_percentage.index[-6], 'Omni'] = data[var1].mean() - cumulative_percentage.loc[cumulative_percentage.index[-5], 'Omni'] = data[var1].quantile(0.50) - cumulative_percentage.loc[cumulative_percentage.index[-4], 'Omni'] = data[var1].quantile(0.75) - cumulative_percentage.loc[cumulative_percentage.index[-3], 'Omni'] = data[var1].quantile(0.95) - cumulative_percentage.loc[cumulative_percentage.index[-2], 'Omni'] = data[var1].quantile(0.99) - #cumulative_percentage['Omni'].iloc[-1] = data[var1].max() - cumulative_percentage.loc[cumulative_percentage.index[-1], 'Omni'] = data[var1].max() + cumulative_percentage.loc[cumulative_percentage.index[-6], 'Omni'] = data[var].mean() + cumulative_percentage.loc[cumulative_percentage.index[-5], 'Omni'] = data[var].quantile(0.50) + cumulative_percentage.loc[cumulative_percentage.index[-4], 'Omni'] = data[var].quantile(0.75) + cumulative_percentage.loc[cumulative_percentage.index[-3], 'Omni'] = data[var].quantile(0.95) + cumulative_percentage.loc[cumulative_percentage.index[-2], 'Omni'] = data[var].quantile(0.99) + #cumulative_percentage['Omni'].iloc[-1] = data[var].max() + cumulative_percentage.loc[cumulative_percentage.index[-1], 'Omni'] = data[var].max() # Round 2 decimals cumulative_percentage = round(cumulative_percentage,2) diff --git a/tests/test_maps.py b/tests/test_maps.py index 1e86de5..e55f7da 100644 --- a/tests/test_maps.py +++ b/tests/test_maps.py @@ -1,44 +1,47 @@ import os -from metocean_stats.stats.map_funcs import * +from metocean_stats import maps -# def test_plot_points_on_map(): +#def test_plot_points_on_map(): # output_file = 'test_points.png' -# fig = plot_points_on_map(lon=[3.35, 3.10], lat=[60.40, 60.90], label=['NORA3', 'NORKYST800'], bathymetry='NORA3', output_file=output_file) +# fig = maps.plot_points_on_map(lon=[3.35, 3.10], lat=[60.40, 60.90], label=['NORA3', 'NORKYST800'], bathymetry='NORA3', output_file=output_file) # if int(fig.axes[0].lines[0].get_xdata()[0]) == 3: # pass # else: # raise ValueError("FigValue is not correct") - +# # # Check if file is created # if os.path.isfile(output_file): # os.remove(output_file) # else: # raise FileNotFoundError(f"{output_file} was not created") +# return -# def test_plot_extreme_wave_map(): +#def test_plot_extreme_wave_map(): # output_file = 'test_wave.png' -# fig = plot_extreme_wave_map(return_level=50, product='NORA3', title='50-yr return values Hs (NORA3)', set_extent=[0, 30, 52, 73], output_file=output_file) +# fig = maps.plot_extreme_wave_map(return_period=50, product='NORA3', title='50-yr return values Hs (NORA3)', set_extent=[0, 30, 52, 73], output_file=output_file) # if fig.dpi == 100.0: # pass # else: # raise ValueError("FigValue is not correct") - +# # # Check if file is created # if os.path.isfile(output_file): # os.remove(output_file) # else: # raise FileNotFoundError(f"{output_file} was not created") +# return -# def test_plot_extreme_wind_map(): +#def test_plot_extreme_wind_map(): # output_file = 'test_wind.png' -# fig = plot_extreme_wind_map(return_level=100, product='NORA3', level=1, title='100-yr return values Wind at 10 m (NORA3)', set_extent=[0, 30, 52, 73], output_file=output_file) +# fig = maps.plot_extreme_wind_map(return_period=100, product='NORA3', z=100, title='100-yr return values Wind at 10 m (NORA3)', set_extent=[0, 30, 52, 73], output_file=output_file) # if fig.dpi == 100.0: # pass # else: # raise ValueError("FigValue is not correct") - +# # # Check if file is created # if os.path.isfile(output_file): # os.remove(output_file) # else: # raise FileNotFoundError(f"{output_file} was not created") +# return diff --git a/tests/test_plots.py b/tests/test_plots.py index 8fa0008..c927ad9 100644 --- a/tests/test_plots.py +++ b/tests/test_plots.py @@ -1,7 +1,6 @@ from metocean_api import ts -from metocean_stats import plots, tables, stats +from metocean_stats import plots, tables, stats, maps from metocean_stats.stats.aux_funcs import * -from metocean_stats.stats.map_funcs import * import pandas as pd import os @@ -45,7 +44,7 @@ def test_plot_prob_non_exceedance_fitted_3p_weibull(ds=ds): def test_plot_monthly_stats(ds=ds): output_file = 'test_monthly_stats.png' - fig = plots.plot_monthly_stats(ds, var1='T2m', show=['Minimum', 'Mean', 'Maximum'], title='T2m', output_file=output_file) + fig = plots.plot_monthly_stats(ds, var='T2m', show=['Minimum', 'Mean', 'Maximum'], title='T2m', output_file=output_file) if os.path.exists(output_file): os.remove(output_file) if fig.axes[0].lines[0].get_xdata()[0].round(2) == 0: @@ -55,7 +54,7 @@ def test_plot_monthly_stats(ds=ds): def test_plot_directional_stats(ds=ds): output_file = 'test_directional_stats.png' - fig = plots.plot_directional_stats(ds, var1='HS', step_var1=0.5, var_dir='DIRM', title='$H_s$[m]', output_file=output_file) + fig = plots.plot_directional_stats(ds, var='HS', step_var=0.5, var_dir='DIRM', title='$H_s$[m]', output_file=output_file) if os.path.exists(output_file): os.remove(output_file) if fig.axes[0].lines[0].get_xdata()[0].round(2) == 0: diff --git a/tests/test_tables.py b/tests/test_tables.py index 99653f3..18f0bcd 100644 --- a/tests/test_tables.py +++ b/tests/test_tables.py @@ -1,7 +1,6 @@ from metocean_api import ts -from metocean_stats import plots, tables, stats +from metocean_stats import plots, tables, stats, maps from metocean_stats.stats.aux_funcs import * -from metocean_stats.stats.map_funcs import * import pandas as pd import os @@ -34,7 +33,7 @@ def test_table_var_sorted_by_hs(ds=ds): def test_table_monthly_non_exceedance(ds=ds): output_file = 'test_monthly_non_exceedance.csv' - df = tables.table_monthly_non_exceedance(ds, var1='HS', step_var1=0.5, output_file=output_file) + df = tables.table_monthly_non_exceedance(ds, var='HS', step_var=0.5, output_file=output_file) if os.path.exists(output_file): os.remove(output_file) if df.shape == (31, 13): @@ -44,7 +43,7 @@ def test_table_monthly_non_exceedance(ds=ds): def test_table_directional_non_exceedance(ds=ds): output_file = 'test_directional_non_exceedance.csv' - df = tables.table_directional_non_exceedance(ds, var1='HS', step_var1=0.5, var_dir='DIRM', output_file=output_file) + df = tables.table_directional_non_exceedance(ds, var='HS', step_var=0.5, var_dir='DIRM', output_file=output_file) if os.path.exists(output_file): os.remove(output_file) if df.shape == (30, 13): @@ -198,6 +197,7 @@ def test_table_hshs_for_given_wind(ds=ds): df = tables.table_hs_for_given_wind(ds, 'HS', 'W10', bin_width=2, max_wind=42, output_file=output_file) if os.path.exists(output_file): os.remove(output_file) + os.remove(output_file.split('.')[0]+'_coeff.csv') if df.shape == (21, 10): pass else: @@ -292,6 +292,7 @@ def test_table_current_for_given_wind(ds=ds): if os.path.exists(output_file): os.remove(output_file) + os.remove(output_file.split('.')[0]+'_coeff.csv') if df.shape == (21, 10): pass else: @@ -339,11 +340,12 @@ def test_table_profile_monthly_stats(ds=ds_ocean): def test_table_storm_surge_for_given_hs(ds=ds): - output_file = 'test_table_sotrm_surge_for_given_Hs.csv' + output_file = 'test_table_storm_surge_for_given_Hs.csv' ds['zeta_0m'] = 0.02*ds['HS'] + 0.05*np.log(ds['HS']) df, df_coeff = tables.table_storm_surge_for_given_hs(ds, var_surge='zeta_0m', var_hs='HS', bin_width=1, max_hs=20, output_file=output_file) if os.path.exists(output_file): os.remove(output_file) + os.remove(output_file.split('.')[0]+'_coeff.csv') if df.shape == (20, 10): pass else: