Skip to content

Commit

Permalink
🚚 Import ATLAS intake catalog, with ATL11 test data included
Browse files Browse the repository at this point in the history
Enable importing of the ATLAS intake catalog straight from deepicedrain! This functions almost like a test fixture, enabling us to easily load ICESat-2 data easily in our scripts. I.e. keeping things DRY. Managed to get rid of the pytest fixture in test_calculate_delta.py which did the sample data loading from the catalog before. Renamed the very generic catalog.yaml to a slightly less generic atlas_catalog.yaml. Added some description metadata to that catalog file, and include nested in at11_test_case. Also ignoring .h5 data files now.
  • Loading branch information
weiji14 committed May 28, 2020
1 parent 0e89b7f commit 7ea9c8d
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 40 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,6 @@ MANIFEST

# Jupyter Notebook
.ipynb_checkpoints

# Data files
**/*.h5
19 changes: 5 additions & 14 deletions atl06_play.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@
"import tqdm\n",
"import xarray as xr\n",
"\n",
"# %matplotlib inline"
"import deepicedrain"
]
},
{
Expand Down Expand Up @@ -116,16 +116,6 @@
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"# open the local catalog file containing ICESat-2 stuff\n",
"catalog = intake.open_catalog(uri=\"catalog.yaml\")"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
Expand Down Expand Up @@ -1035,7 +1025,7 @@
" data_rate: Data within this group are sparse. Data values are provide..."
]
},
"execution_count": 4,
"execution_count": 3,
"metadata": {},
"output_type": "execute_result"
}
Expand All @@ -1051,8 +1041,9 @@
" )\n",
" raise\n",
"\n",
"# depends on .netrc file in home folder\n",
"dataset = catalog.icesat2atl06.to_dask().unify_chunks()\n",
"# open the local intake data catalog file containing ICESat-2 stuff\n",
"# data download will depend on having a .netrc file in home folder\n",
"dataset = deepicedrain.catalog.icesat2atl06.to_dask().unify_chunks()\n",
"dataset"
]
},
Expand Down
11 changes: 4 additions & 7 deletions atl06_play.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@
import tqdm
import xarray as xr

# %matplotlib inline
import deepicedrain

# %%
# Configure intake and set number of compute cores for data download
Expand All @@ -73,10 +73,6 @@
# (while making sure we have our Earthdata credentials set up properly),
# and view it using [xarray](https://xarray.pydata.org) and [hvplot](https://hvplot.pyviz.org).

# %%
# open the local catalog file containing ICESat-2 stuff
catalog = intake.open_catalog(uri="catalog.yaml")

# %%
try:
netrc.netrc()
Expand All @@ -88,8 +84,9 @@
)
raise

# depends on .netrc file in home folder
dataset = catalog.icesat2atl06.to_dask().unify_chunks()
# open the local intake data catalog file containing ICESat-2 stuff
# data download will depend on having a .netrc file in home folder
dataset = deepicedrain.catalog.icesat2atl06.to_dask().unify_chunks()
dataset

# %%
Expand Down
10 changes: 9 additions & 1 deletion catalog.yaml → atlas_catalog.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
metadata:
description: 'An intake catalog for loading ICESat-2 ATLAS datasets'
version: 1
sources:
icesat2atlasdownloader:
description: 'A catalog for downloading Antarctic ICESat-2 ATLAS products from NSIDC based on date'
args:
urlpath:
- simplecache::https://n5eil01u.ecs.nsidc.org/ATLAS/ATL{{atlproduct}}.00{{version}}/{{date.strftime("%Y.%m.%d")}}/ATL{{atlproduct}}_*_*10_00{{version}}_0{{revision}}.h5
Expand Down Expand Up @@ -42,6 +44,7 @@ sources:
default: 1
driver: intake_xarray.netcdf.NetCDFSource
icesat2atl06:
description: 'Reads in ICESat-2 ATL06 data into an xarray.Dataset, one date and one laser pair at a time'
args:
chunks:
delta_time: 50000
Expand Down Expand Up @@ -85,7 +88,6 @@ sources:
type: int
default: 3
allowed: [1, 2, 3]
description: ''
driver: intake_xarray.netcdf.NetCDFSource
metadata:
plots:
Expand All @@ -101,3 +103,9 @@ sources:
height: 500
geo: True
coastline: True
test_data:
args:
path: tests/test_catalog.yaml
description: 'Sample ICESat-2 datasets for testing purposes'
driver: intake.catalog.local.YAMLFileCatalog
metadata: {}
10 changes: 8 additions & 2 deletions deepicedrain/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,10 @@
__version__ = "0.1.0"

import intake
from deepicedrain.deltamath import calculate_delta
from deepicedrain.spatiotemporal import Region

__version__: str = "0.1.0"

# Loads the ICESat-2 ATLAS intake data catalog
catalog: intake.catalog.local.YAMLFileCatalog = intake.open_catalog(
uri="atlas_catalog.yaml"
)
21 changes: 5 additions & 16 deletions tests/test_calculate_delta.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,26 +7,14 @@
import pytest
import xarray as xr

from deepicedrain import calculate_delta
from deepicedrain import calculate_delta, catalog


@pytest.fixture(scope="module")
def atl11_dataset():
"""
Loads a sample ATL11 test dataset from the intake catalog
"""
catalog: intake.catalog.local.YAMLFileCatalog = intake.open_catalog(
uri="tests/test_catalog.yaml"
)
atl11_dataset: xr.Dataset = catalog.atl11_test_case.read()

return atl11_dataset


def test_calculate_delta_height(atl11_dataset):
def test_calculate_delta_height():
"""
Check that calculating change in elevation works.
"""
atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.read()
delta_height = calculate_delta(
dataset=atl11_dataset, oldcyclenum=3, newcyclenum=4, variable="h_corr"
)
Expand All @@ -38,10 +26,11 @@ def test_calculate_delta_height(atl11_dataset):
npt.assert_allclose(actual=delta_height.max().data, desired=9.49908442)


def test_calculate_delta_time(atl11_dataset):
def test_calculate_delta_time():
"""
Check that calculating change in time works.
"""
atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.read()
delta_time = calculate_delta(
dataset=atl11_dataset, oldcyclenum=3, newcyclenum=4, variable="delta_time"
)
Expand Down

0 comments on commit 7ea9c8d

Please sign in to comment.