Skip to content

Commit

Permalink
utility grouped functions
Browse files Browse the repository at this point in the history
  • Loading branch information
callumrollo committed Jul 12, 2023
1 parent 9997156 commit 875feaf
Show file tree
Hide file tree
Showing 2 changed files with 56 additions and 18 deletions.
55 changes: 54 additions & 1 deletion seaexplorertools/process_adcp.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
import seaborn as sns
import cmocean.cm as cmo
import json
from urllib import request
from urllib import request

warnings.filterwarnings(action='ignore', message='Mean of empty slice')
warnings.filterwarnings(action='ignore', message='invalid value encountered in divide')
Expand Down Expand Up @@ -1885,3 +1885,56 @@ def callbackF(Xi):
plt.xlabel('Depth')
plt.title('Northward velocity (m.s-1)')
return out


def make_dataset(out):
profiles = np.arange(out["Pressure"].shape[1])
depth_bins = np.arange(out["Pressure"].shape[0])

ds_dict = {}
for key, val in out.items():
ds_dict[key] = (("depth_bin", "profile_num",), val)
coords_dict = {"profile_num": ("profile_num", profiles),
"depth_bin": ("depth_bin", depth_bins)
}
ds = xr.Dataset(data_vars=ds_dict, coords=coords_dict)
return ds


def shear_from_adcp(adcp_path, glider_pqt_path, options):
ADCP, data, options = load_adcp_glider_data(adcp_path, glider_pqt_path, options)
ADCP = remapADCPdepth(ADCP, options)
ADCP = correct_heading(ADCP, data, options)
ADCP = soundspeed_correction(ADCP)
ADCP = remove_outliers(ADCP, options)
ADCP = correct_shear(ADCP, options)
ADCP = correct_backscatter(ADCP, data)
ADCP = regridADCPdata(ADCP, options)
ADCP = calcXYZfrom3beam(ADCP, options)
ADCP = calcENUfromXYZ(ADCP, data, options)
return ADCP


def grid_shear(ADCP, data):
xaxis, yaxis, taxis, days = grid_shear_data(ADCP, data)
out = grid_data(ADCP, data, {}, xaxis, yaxis)
ds = make_dataset(out)
return ds


def velocity_from_shear(adcp_path, glider_pqt_path, options, data, ADCP):
extra_data = pd.read_parquet(glider_pqt_path)
data["speed_vert"] = extra_data["speed_vert"]
data["speed_horz"] = extra_data["speed_horz"]
data["DeadReckoning"] = extra_data["DeadReckoning"]
data["NAV_RESOURCE"] = extra_data["NAV_RESOURCE"]
data["diveNum"] = extra_data["diveNum"]
xaxis, yaxis, taxis, days = grid_shear_data(ADCP, data)
data = get_DAC(ADCP, data)
dE, dN, dT = getSurfaceDrift(data)
ADCP = bottom_track(ADCP, adcp_path, options)
out = verify_bottom_track(ADCP, data, dE, dN, dT, xaxis, yaxis, taxis)
out = grid_data(ADCP, data, out, xaxis, yaxis)
out = calc_bias(out, yaxis, taxis, days)
ds = make_dataset(out)
return ds
19 changes: 2 additions & 17 deletions tests/test_adcp_processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,7 @@ def test_processing():
xaxis, yaxis, taxis, days = process_adcp.grid_shear_data(ADCP, data)
out = process_adcp.grid_data(ADCP, data, {}, xaxis, yaxis)

profiles = np.arange(out["Pressure"].shape[1])
depth_bins = np.arange(out["Pressure"].shape[0])

ds_dict = {}
for key, val in out.items():
ds_dict[key] = (("depth_bin", "profile_num",), val)
coords_dict = {"profile_num": ("profile_num", profiles),
"depth_bin": ("depth_bin", depth_bins)
}
ds = xr.Dataset(data_vars=ds_dict, coords=coords_dict)
ds = process_adcp.make_dataset(out)
ds_min = ds[['Sh_E', 'Sh_N', 'Sh_U']]
ds_min_test = xr.open_dataset("tests/test_files/ds_out_min.nc")
for var in list(ds_min):
Expand All @@ -73,13 +64,7 @@ def test_processing():
out = process_adcp.grid_data(ADCP, data, out, xaxis, yaxis)
out = process_adcp.calc_bias(out, yaxis, taxis, days)

ds_dict = {}
for key, val in out.items():
ds_dict[key] = (("depth_bin", "profile_num",), val)
coords_dict = {"profile_num": ("profile_num", profiles),
"depth_bin": ("depth_bin", depth_bins)
}
ds = xr.Dataset(data_vars=ds_dict, coords=coords_dict)
ds = process_adcp.make_dataset(out)
ds_min = ds[['ADCP_E', 'ADCP_N']]
for var in list(ds_min):
assert np.allclose(ds_min[var], ds_min_test[var], equal_nan=True, atol=1e-7, rtol=1e-3)
Expand Down

0 comments on commit 875feaf

Please sign in to comment.