Skip to content

Commit

Permalink
✅ Properly tested deltatime_to_utctime function
Browse files Browse the repository at this point in the history
Turn the ICESat-2 delta_time to utc_time conversion code in our jupyter notebook into a well tested function! The cool bit is that we can pass in either a dask or numpy backed xarray.DataArray, and get the equivalent output, with dimensions and coordinates preserved! Gotta love [NEP18](https://numpy.org/neps/nep-0018-array-function-protocol.html). Added a chunks statement to test_catalog.yaml, and ensure the file is cached in a relative path. Had to make sure the atl11_dataset is closed after each test or subsequent tests will see a numpy.array instead of a dask.array, should do proper setup/teardown next time or test everything using dask. Also bumping up cftime from 1.1.1.2 to 1.1.3 to bust the CI cache, just in case.
  • Loading branch information
weiji14 committed May 29, 2020
1 parent 3c3d021 commit 2551cdb
Show file tree
Hide file tree
Showing 8 changed files with 95 additions and 41 deletions.
18 changes: 4 additions & 14 deletions atl11_play.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
"import glob\n",
"\n",
"import deepicedrain\n",
"import pointCollection.is2_calendar\n",
"\n",
"import dask\n",
"import dask.array\n",
Expand Down Expand Up @@ -214,18 +213,7 @@
"metadata": {},
"outputs": [],
"source": [
"ICESAT2_EPOCH = np.datetime64(pointCollection.is2_calendar.t_0())\n",
"# ICESAT2_EPOCH = np.datetime64(datetime.datetime(2018, 1, 1, 0, 0, 0))"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"utc_time = dask.array.asarray(ICESAT2_EPOCH) + ds.delta_time.data\n",
"ds[\"utc_time\"] = xr.DataArray(data=utc_time, coords=ds.delta_time.coords)"
"ds[\"utc_time\"] = deepicedrain.deltatime_to_utctime(dataarray=ds.delta_time)"
]
},
{
Expand Down Expand Up @@ -284,7 +272,9 @@
" ymin=-699564.516934089,\n",
" ymax=-365489.6822096751,\n",
" ),\n",
" \"antarctica\": deepicedrain.Region(\"Antarctica\", -2700000, 2800000, -2200000, 2300000),\n",
" \"antarctica\": deepicedrain.Region(\n",
" \"Antarctica\", -2700000, 2800000, -2200000, 2300000\n",
" ),\n",
" \"siple_coast\": deepicedrain.Region(\n",
" \"Siple Coast\", -1000000, 250000, -1000000, -100000\n",
" ),\n",
Expand Down
8 changes: 1 addition & 7 deletions atl11_play.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
import glob

import deepicedrain
import pointCollection.is2_calendar

import dask
import dask.array
Expand Down Expand Up @@ -118,12 +117,7 @@
# in the future.

# %%
ICESAT2_EPOCH = np.datetime64(pointCollection.is2_calendar.t_0())
# ICESAT2_EPOCH = np.datetime64(datetime.datetime(2018, 1, 1, 0, 0, 0))

# %%
utc_time = dask.array.asarray(ICESAT2_EPOCH) + ds.delta_time.data
ds["utc_time"] = xr.DataArray(data=utc_time, coords=ds.delta_time.coords)
ds["utc_time"] = deepicedrain.deltatime_to_utctime(dataarray=ds.delta_time)

# %% [markdown]
# ## Mask out low quality height data
Expand Down
2 changes: 1 addition & 1 deletion deepicedrain/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

import deepicedrain
from deepicedrain.deltamath import calculate_delta
from deepicedrain.spatiotemporal import Region
from deepicedrain.spatiotemporal import Region, deltatime_to_utctime

__version__: str = "0.1.0"

Expand Down
17 changes: 17 additions & 0 deletions deepicedrain/spatiotemporal.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Does bounding box region subsets, coordinate/time conversions, and more!
"""
import dataclasses
import datetime

import numpy as np
import xarray as xr
Expand Down Expand Up @@ -55,3 +56,19 @@ def subset(
)

return ds.where(cond=cond, drop=drop)


def deltatime_to_utctime(
dataarray: xr.DataArray,
start_epoch: np.datetime64 = np.datetime64("2018-01-01T00:00:00.000000"),
) -> xr.DataArray:
"""
Converts GPS time in nanoseconds from an epoch (default is 2018 Jan 1st)
to Coordinated Universal Time (UTC).
Note, does not account for leap seconds! There are none declared since the
last one announced on 31/12/2016, so it should be fine for now as of 2020.
"""
utc_time: xr.DataArray = dataarray.__class__(start_epoch) + dataarray

return utc_time
4 changes: 4 additions & 0 deletions deepicedrain/tests/test_calculate_delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,8 @@ def test_calculate_delta_height():
npt.assert_allclose(actual=delta_height.mean().data, desired=-0.90124122)
npt.assert_allclose(actual=delta_height.max().data, desired=9.49908442)

atl11_dataset.close()


def test_calculate_delta_time():
"""
Expand All @@ -46,3 +48,5 @@ def test_calculate_delta_time():
npt.assert_equal(
actual=delta_time.max().data, desired=np.timedelta64(7846787022726588)
)

atl11_dataset.close()
6 changes: 4 additions & 2 deletions deepicedrain/tests/test_catalog.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,18 @@ metadata:
version: 1
sources:
atl11_test_case:
description: 'An example ATL11 hdf5 file for testing various calculations'
args:
chunks:
cycle_number: 2
urlpath: simplecache::https://github.com/suzanne64/ATL11/raw/125ee1a653d78e6b86864b35c9d0fcfd72d64a85/ATL11_test_case/ATL11_078805_0304_02_v002.h5
xarray_kwargs:
engine: h5netcdf
group: /pt2/corrected_h
storage_options:
simplecache:
cache_storage: tests/test_data
cache_storage: '{{ CATALOG_DIR }}/test_data'
same_names: True
description: 'An example ATL11 hdf5 file for testing various calculations'
driver: intake_xarray.netcdf.NetCDFSource
metadata:
coords:
Expand Down
47 changes: 47 additions & 0 deletions deepicedrain/tests/test_spatiotemporal_conversions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
"""
Tests various conversions between geospatial and temporal units
"""
import datetime

import dask
import numpy as np
import numpy.testing as npt
import pandas as pd
import xarray as xr

from deepicedrain import catalog, deltatime_to_utctime


def test_deltatime_to_utctime():
"""
Test that converting from ICESat-2 delta_time to utc_time works,
and that the xarray dimensions are preserved in the process.
"""
atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask()

utc_time: xr.DataArray = deltatime_to_utctime(dataarray=atl11_dataset.delta_time)

assert utc_time.shape == (1404, 2)
assert utc_time.dims == ("ref_pt", "cycle_number")
assert dask.is_dask_collection(utc_time)

utc_time = utc_time.compute()

npt.assert_equal(
actual=utc_time.data.min(),
desired=np.datetime64("2019-05-19T20:53:51.039891534"),
)
npt.assert_equal(
actual=np.datetime64(pd.DataFrame(utc_time.data)[0].mean()),
desired=np.datetime64("2019-05-19 20:54:00.925868"),
)
npt.assert_equal(
actual=np.datetime64(pd.DataFrame(utc_time.data)[1].mean()),
desired=np.datetime64("2019-08-18 16:33:47.791226"),
)
npt.assert_equal(
actual=utc_time.data.max(),
desired=np.datetime64("2019-08-18T16:33:57.834610209"),
)

atl11_dataset.close()
34 changes: 17 additions & 17 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 2551cdb

Please sign in to comment.