diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index f35db00..608beda 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -38,7 +38,7 @@ jobs: with: path: | /usr/share/miniconda3/envs/deepicedrain - key: cache-venv-${{ github.ref }}-${{ hashFiles('**/environment.yml') }}-${{ hashFiles('**/poetry.lock') }} + key: cache-venv-${{ github.ref }}-${{ hashFiles('**/environment.yml') }}-${{ hashFiles('**/poetry.lock') }}-${{ hashFiles('**/deepicedrain/*.py') }} restore-keys: | cache-venv-refs/heads/master- @@ -66,4 +66,4 @@ jobs: - name: Test with pytest shell: bash -l {0} - run: poetry run pytest --verbose tests/ + run: poetry run pytest --verbose deepicedrain/ diff --git a/.gitignore b/.gitignore index a6afd9b..fc6100b 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,6 @@ MANIFEST # Jupyter Notebook .ipynb_checkpoints + +# Data files +**/*.h5 diff --git a/README.md b/README.md index 550b42d..5cf8cec 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,8 @@ in Antarctica using remote sensing and machine learning. ![ATL11 Cycle 6 minus Cycle 5 height change over Antarctica](https://user-images.githubusercontent.com/23487320/83100017-ffb0ba00-a102-11ea-9603-ac469f09e58b.png) +![DeepIceDrain Pipeline](https://yuml.me/diagram/scruffy;dir:LR/class/[Land-Ice-Elevation|atl06_play.ipynb]->[Convert|atl06_to_atl11.ipynb],[Convert]->[Ice-Sheet-H(t)-Series|atl11_play.ipynb]) + # Getting started ## Quickstart @@ -66,3 +68,36 @@ Finally, double-check that the libraries have been installed. python -m ipykernel install --user --name deepicedrain # to install conda env properly jupyter kernelspec list --json # see if kernel is installed jupyter lab & + +## Usage + +Once you've installed properly installed the `deepicedrain` package, +you can use it to do some quick calculations on ICESat-2 datasets. +The example below shows how to calculate ice surface elevation change +on a sample ATL11 dataset between ICESat's Cycle 3 and Cycle 4. + + import deepicedrain + import xarray as xr + + # Loads a sample ATL11 file from the intake catalog into xarray + atl11_dataset: xr.Dataset = deepicedrain.catalog.test_data.atl11_test_case.read() + + # Calculate elevation change in metres from ICESat-2 Cycle 3 to Cycle 4 + delta_height: xr.DataArray = deepicedrain.calculate_delta( + dataset=atl11_dataset, oldcyclenum=3, newcyclenum=4, variable="h_corr" + ) + + # Quick plot of delta_height along the ICESat-2 track + delta_height.plot() + +![ATL11 delta_height along ref_pt track](https://user-images.githubusercontent.com/23487320/83319030-bf7e4280-a28e-11ea-9bed-331e35dbc266.png) + +## Related Projects + +This work would not be possible without inspiration +from the following cool open source projects! +Go check them out if you have time. + +- [ATL11](https://github.com/suzanne64/ATL11) +- [ICESAT-2 HackWeek](https://github.com/ICESAT-2HackWeek) +- [icepyx](https://github.com/icesat2py/icepyx) diff --git a/atl06_play.ipynb b/atl06_play.ipynb index 207abc9..0fa589a 100644 --- a/atl06_play.ipynb +++ b/atl06_play.ipynb @@ -48,7 +48,7 @@ "import tqdm\n", "import xarray as xr\n", "\n", - "# %matplotlib inline" + "import deepicedrain" ] }, { @@ -109,23 +109,19 @@ "\n", "Use our [intake catalog](https://intake.readthedocs.io/en/latest/catalog.html) to get some sample ATL06 data\n", "(while making sure we have our Earthdata credentials set up properly),\n", - "and view it using [xarray](https://xarray.pydata.org) and [hvplot](https://hvplot.pyviz.org)." + "and view it using [xarray](https://xarray.pydata.org) and [hvplot](https://hvplot.pyviz.org).\n", + "\n", + "open the local intake data catalog file containing ICESat-2 stuff\n", + "catalog = intake.open_catalog(\"deepicedrain/atlas_catalog.yaml\")\n", + "or if the deepicedrain python package is installed, you can use either of the below:\n", + "catalog = deepicedrain.catalog\n", + "catalog = intake.cat.atlas_cat" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, - "outputs": [], - "source": [ - "# open the local catalog file containing ICESat-2 stuff\n", - "catalog = intake.open_catalog(uri=\"catalog.yaml\")" - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, "outputs": [ { "data": { @@ -1035,7 +1031,7 @@ " data_rate: Data within this group are sparse. Data values are provide..." ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -1051,7 +1047,7 @@ " )\n", " raise\n", "\n", - "# depends on .netrc file in home folder\n", + "# data download will depend on having a .netrc file in home folder\n", "dataset = catalog.icesat2atl06.to_dask().unify_chunks()\n", "dataset" ] @@ -1752,21 +1748,8 @@ "metadata": {}, "outputs": [], "source": [ - "transformer = pyproj.Transformer.from_crs(\n", - " crs_from=pyproj.CRS.from_epsg(4326),\n", - " crs_to=pyproj.CRS.from_epsg(3031),\n", - " always_xy=True,\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": 21, - "metadata": {}, - "outputs": [], - "source": [ - "dfs[\"x\"], dfs[\"y\"] = transformer.transform(\n", - " xx=dfs.longitude.values, yy=dfs.latitude.values\n", + "dfs[\"x\"], dfs[\"y\"] = deepicedrain.lonlat_to_xy(\n", + " longitude=dfs.longitude, latitude=dfs.latitude\n", ")" ] }, diff --git a/atl06_play.py b/atl06_play.py index 14562e2..4a95868 100644 --- a/atl06_play.py +++ b/atl06_play.py @@ -52,7 +52,7 @@ import tqdm import xarray as xr -# %matplotlib inline +import deepicedrain # %% # Configure intake and set number of compute cores for data download @@ -73,9 +73,11 @@ # (while making sure we have our Earthdata credentials set up properly), # and view it using [xarray](https://xarray.pydata.org) and [hvplot](https://hvplot.pyviz.org). -# %% -# open the local catalog file containing ICESat-2 stuff -catalog = intake.open_catalog(uri="catalog.yaml") +# open the local intake data catalog file containing ICESat-2 stuff +catalog = intake.open_catalog("deepicedrain/atlas_catalog.yaml") +# or if the deepicedrain python package is installed, you can use either of the below: +# catalog = deepicedrain.catalog +# catalog = intake.cat.atlas_cat # %% try: @@ -88,7 +90,7 @@ ) raise -# depends on .netrc file in home folder +# data download will depend on having a .netrc file in home folder dataset = catalog.icesat2atl06.to_dask().unify_chunks() dataset @@ -345,15 +347,8 @@ def six_laser_beams(filepaths: list) -> dask.dataframe.DataFrame: # ### Transform from EPSG:4326 (lat/lon) to EPSG:3031 (Antarctic Polar Stereographic) # %% -transformer = pyproj.Transformer.from_crs( - crs_from=pyproj.CRS.from_epsg(4326), - crs_to=pyproj.CRS.from_epsg(3031), - always_xy=True, -) - -# %% -dfs["x"], dfs["y"] = transformer.transform( - xx=dfs.longitude.values, yy=dfs.latitude.values +dfs["x"], dfs["y"] = deepicedrain.lonlat_to_xy( + longitude=dfs.longitude, latitude=dfs.latitude ) # %% diff --git a/atl11_play.ipynb b/atl11_play.ipynb index 3d0b110..c5b02c5 100644 --- a/atl11_play.ipynb +++ b/atl11_play.ipynb @@ -20,7 +20,6 @@ "import glob\n", "\n", "import deepicedrain\n", - "import pointCollection.is2_calendar\n", "\n", "import dask\n", "import dask.array\n", @@ -160,24 +159,11 @@ }, "outputs": [], "source": [ - "lonlat_to_xy = lambda longitude, latitude: pyproj.Proj(projparams=3031)(\n", - " longitude, latitude\n", + "ds[\"x\"], ds[\"y\"] = deepicedrain.lonlat_to_xy(\n", + " longitude=ds.longitude, latitude=ds.latitude\n", ")" ] }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": { - "lines_to_next_cell": 2 - }, - "outputs": [], - "source": [ - "x, y = lonlat_to_xy(ds.longitude.values, ds.latitude.values)\n", - "ds[\"x\"] = xr.DataArray(data=x, coords=ds.longitude.coords)\n", - "ds[\"y\"] = xr.DataArray(data=y, coords=ds.latitude.coords)" - ] - }, { "cell_type": "code", "execution_count": 7, @@ -214,18 +200,7 @@ "metadata": {}, "outputs": [], "source": [ - "ICESAT2_EPOCH = np.datetime64(pointCollection.is2_calendar.t_0())\n", - "# ICESAT2_EPOCH = np.datetime64(datetime.datetime(2018, 1, 1, 0, 0, 0))" - ] - }, - { - "cell_type": "code", - "execution_count": 9, - "metadata": {}, - "outputs": [], - "source": [ - "utc_time = dask.array.asarray(ICESAT2_EPOCH) + ds.delta_time.data\n", - "ds[\"utc_time\"] = xr.DataArray(data=utc_time, coords=ds.delta_time.coords)" + "ds[\"utc_time\"] = deepicedrain.deltatime_to_utctime(dataarray=ds.delta_time)" ] }, { @@ -277,19 +252,21 @@ "source": [ "# Dictionary of Antarctic bounding box locations with EPSG:3031 coordinates\n", "regions = {\n", - " \"kamb\": deepicedrain.BBox(\n", + " \"kamb\": deepicedrain.Region(\n", " name=\"Kamb Ice Stream\",\n", " xmin=-739741.7702261859,\n", " xmax=-411054.19240523444,\n", " ymin=-699564.516934089,\n", " ymax=-365489.6822096751,\n", " ),\n", - " \"antarctica\": deepicedrain.BBox(\"Antarctica\", -2700000, 2800000, -2200000, 2300000),\n", - " \"siple_coast\": deepicedrain.BBox(\n", + " \"antarctica\": deepicedrain.Region(\n", + " \"Antarctica\", -2700000, 2800000, -2200000, 2300000\n", + " ),\n", + " \"siple_coast\": deepicedrain.Region(\n", " \"Siple Coast\", -1000000, 250000, -1000000, -100000\n", " ),\n", - " \"kamb2\": deepicedrain.BBox(\"Kamb Ice Stream\", -500000, -400000, -600000, -500000),\n", - " \"whillans\": deepicedrain.BBox(\n", + " \"kamb2\": deepicedrain.Region(\"Kamb Ice Stream\", -500000, -400000, -600000, -500000),\n", + " \"whillans\": deepicedrain.Region(\n", " \"Whillans Ice Stream\", -350000, -100000, -700000, -450000\n", " ),\n", "}" @@ -303,7 +280,7 @@ "source": [ "# Do the actual computation to find data points within region of interest\n", "region = regions[\"kamb\"] # Select Kamb Ice Stream region\n", - "ds_subset = ds.where(cond=region.subset(ds=ds), drop=True)\n", + "ds_subset = region.subset(ds=ds)\n", "ds_subset = ds_subset.unify_chunks()\n", "ds_subset = ds_subset.compute()" ] @@ -671,7 +648,7 @@ "source": [ "# Select region here, see dictionary of regions at top\n", "placename: str = \"antarctica\"\n", - "region: deepicedrain.BBox = regions[placename]" + "region: deepicedrain.Region = regions[placename]" ] }, { diff --git a/atl11_play.py b/atl11_play.py index 944f314..fcb74c8 100644 --- a/atl11_play.py +++ b/atl11_play.py @@ -24,7 +24,6 @@ import glob import deepicedrain -import pointCollection.is2_calendar import dask import dask.array @@ -87,17 +86,10 @@ # to the Antarctic Polar Stereographic (EPSG:3031) projection. # %% -lonlat_to_xy = lambda longitude, latitude: pyproj.Proj(projparams=3031)( - longitude, latitude +ds["x"], ds["y"] = deepicedrain.lonlat_to_xy( + longitude=ds.longitude, latitude=ds.latitude ) - -# %% -x, y = lonlat_to_xy(ds.longitude.values, ds.latitude.values) -ds["x"] = xr.DataArray(data=x, coords=ds.longitude.coords) -ds["y"] = xr.DataArray(data=y, coords=ds.latitude.coords) - - # %% # Also set x, y as coordinates in xarray.Dataset ds = ds.set_coords(names=["x", "y"]) @@ -118,12 +110,7 @@ # in the future. # %% -ICESAT2_EPOCH = np.datetime64(pointCollection.is2_calendar.t_0()) -# ICESAT2_EPOCH = np.datetime64(datetime.datetime(2018, 1, 1, 0, 0, 0)) - -# %% -utc_time = dask.array.asarray(ICESAT2_EPOCH) + ds.delta_time.data -ds["utc_time"] = xr.DataArray(data=utc_time, coords=ds.delta_time.coords) +ds["utc_time"] = deepicedrain.deltatime_to_utctime(dataarray=ds.delta_time) # %% [markdown] # ## Mask out low quality height data @@ -148,19 +135,21 @@ # %% # Dictionary of Antarctic bounding box locations with EPSG:3031 coordinates regions = { - "kamb": deepicedrain.BBox( + "kamb": deepicedrain.Region( name="Kamb Ice Stream", xmin=-739741.7702261859, xmax=-411054.19240523444, ymin=-699564.516934089, ymax=-365489.6822096751, ), - "antarctica": deepicedrain.BBox("Antarctica", -2700000, 2800000, -2200000, 2300000), - "siple_coast": deepicedrain.BBox( + "antarctica": deepicedrain.Region( + "Antarctica", -2700000, 2800000, -2200000, 2300000 + ), + "siple_coast": deepicedrain.Region( "Siple Coast", -1000000, 250000, -1000000, -100000 ), - "kamb2": deepicedrain.BBox("Kamb Ice Stream", -500000, -400000, -600000, -500000), - "whillans": deepicedrain.BBox( + "kamb2": deepicedrain.Region("Kamb Ice Stream", -500000, -400000, -600000, -500000), + "whillans": deepicedrain.Region( "Whillans Ice Stream", -350000, -100000, -700000, -450000 ), } @@ -168,7 +157,7 @@ # %% # Do the actual computation to find data points within region of interest region = regions["kamb"] # Select Kamb Ice Stream region -ds_subset = ds.where(cond=region.subset(ds=ds), drop=True) +ds_subset = region.subset(ds=ds) ds_subset = ds_subset.unify_chunks() ds_subset = ds_subset.compute() @@ -317,7 +306,7 @@ # %% # Select region here, see dictionary of regions at top placename: str = "antarctica" -region: deepicedrain.BBox = regions[placename] +region: deepicedrain.Region = regions[placename] # %% # Find subglacial lakes (Smith et al., 2009) within region of interest diff --git a/deepicedrain/__init__.py b/deepicedrain/__init__.py index bd39aaf..f695f62 100644 --- a/deepicedrain/__init__.py +++ b/deepicedrain/__init__.py @@ -1,4 +1,18 @@ -__version__ = "0.1.0" +import importlib.resources +import logging -from deepicedrain.geo import BBox +import intake + +import deepicedrain from deepicedrain.deltamath import calculate_delta +from deepicedrain.spatiotemporal import Region, deltatime_to_utctime, lonlat_to_xy + +__version__: str = "0.1.0" + +# Loads the ICESat-2 ATLAS intake data catalog +_catalog_path = importlib.resources.path( + package=deepicedrain, resource="atlas_catalog.yaml" +) +with _catalog_path as uri: + logging.info(f"Loading intake catalog from {uri}") + catalog: intake.catalog.local.YAMLFileCatalog = intake.open_catalog(uri=str(uri)) diff --git a/catalog.yaml b/deepicedrain/atlas_catalog.yaml similarity index 88% rename from catalog.yaml rename to deepicedrain/atlas_catalog.yaml index 4c7efa9..ea16697 100644 --- a/catalog.yaml +++ b/deepicedrain/atlas_catalog.yaml @@ -1,7 +1,9 @@ metadata: + description: 'An intake catalog for loading ICESat-2 ATLAS datasets' version: 1 sources: icesat2atlasdownloader: + description: 'A catalog for downloading Antarctic ICESat-2 ATLAS products from NSIDC based on date' args: urlpath: - simplecache::https://n5eil01u.ecs.nsidc.org/ATLAS/ATL{{atlproduct}}.00{{version}}/{{date.strftime("%Y.%m.%d")}}/ATL{{atlproduct}}_*_*10_00{{version}}_0{{revision}}.h5 @@ -42,6 +44,7 @@ sources: default: 1 driver: intake_xarray.netcdf.NetCDFSource icesat2atl06: + description: 'Reads in ICESat-2 ATL06 data into an xarray.Dataset, one date and one laser pair at a time' args: chunks: delta_time: 50000 @@ -85,7 +88,6 @@ sources: type: int default: 3 allowed: [1, 2, 3] - description: '' driver: intake_xarray.netcdf.NetCDFSource metadata: plots: @@ -101,3 +103,9 @@ sources: height: 500 geo: True coastline: True + test_data: + args: + path: '{{ CATALOG_DIR }}/tests/test_catalog.yaml' + description: 'Sample ICESat-2 datasets for testing purposes' + driver: intake.catalog.local.YAMLFileCatalog + metadata: {} diff --git a/deepicedrain/geo.py b/deepicedrain/geo.py deleted file mode 100644 index 93696ce..0000000 --- a/deepicedrain/geo.py +++ /dev/null @@ -1,52 +0,0 @@ -""" -Geographic class that implements some handy geographic tools. -""" -import dataclasses - -import numpy as np -import xarray as xr - - -@dataclasses.dataclass(frozen=True) -class BBox: - """ - A BoundingBox structure that outputs nice tuples of coordinates, - includes xarray subsetting capabilities and has a map scale property. - """ - - name: str # name of region - xmin: float # left coordinate - xmax: float # right coordinate - ymin: float # bottom coordinate - ymax: float # top coordinate - - @property - def scale(self) -> int: - """ - Automatically set a map scale (1:scale) - based on x-coordinate range divided by 0.2 - """ - return int((self.xmax - self.xmin) / 0.2) - - def bounds(self, style="lrbt") -> tuple: - """ - Convenience function to get the bounding box coordinates - of the region in two different styles, lrbt or lbrt. - Defaults to 'lrbt', i.e. left, right, bottom, top. - """ - if style == "lrbt": # left, right, bottom, top (for PyGMT) - return (self.xmin, self.xmax, self.ymin, self.ymax) - elif style == "lbrt": # left, bottom, right, top (for Shapely, etc) - return (self.xmin, self.ymin, self.xmax, self.ymax) - else: - raise NotImplementedError(f"Unknown style type {style}") - - def subset(self, ds: xr.Dataset, x_dim: str = "x", y_dim: str = "y") -> xr.Dataset: - """ - Convenience function to find datapoints in an xarray.Dataset - that fit within the bounding boxes of this region - """ - return np.logical_and( - np.logical_and(ds[x_dim] > self.xmin, ds[x_dim] < self.xmax), - np.logical_and(ds[y_dim] > self.ymin, ds[y_dim] < self.ymax), - ) diff --git a/deepicedrain/spatiotemporal.py b/deepicedrain/spatiotemporal.py new file mode 100644 index 0000000..3d95be4 --- /dev/null +++ b/deepicedrain/spatiotemporal.py @@ -0,0 +1,100 @@ +""" +Geospatial and Temporal class that implements some handy tools. +Does bounding box region subsets, coordinate/time conversions, and more! +""" +import dataclasses +import datetime + +import numpy as np +import pyproj +import xarray as xr + + +@dataclasses.dataclass(frozen=True) +class Region: + """ + A nice region data structure that outputs a tuple of bounding box + coordinates, has xarray subsetting capabilities and a map scale property. + """ + + name: str # name of region + xmin: float # left coordinate + xmax: float # right coordinate + ymin: float # bottom coordinate + ymax: float # top coordinate + + @property + def scale(self) -> int: + """ + Automatically set a map scale (1:scale) + based on x-coordinate range divided by 0.2 + """ + return int((self.xmax - self.xmin) / 0.2) + + def bounds(self, style="lrbt") -> tuple: + """ + Convenience function to get the bounding box coordinates + of the region in two different styles, lrbt or lbrt. + Defaults to 'lrbt', i.e. left, right, bottom, top. + """ + if style == "lrbt": # left, right, bottom, top (for PyGMT) + return (self.xmin, self.xmax, self.ymin, self.ymax) + elif style == "lbrt": # left, bottom, right, top (for Shapely, etc) + return (self.xmin, self.ymin, self.xmax, self.ymax) + else: + raise NotImplementedError(f"Unknown style type {style}") + + def subset( + self, ds: xr.Dataset, x_dim: str = "x", y_dim: str = "y", drop: bool = True + ) -> xr.Dataset: + """ + Convenience function to find datapoints in an xarray.Dataset + that fit within the bounding boxes of this region + """ + cond = np.logical_and( + np.logical_and(ds[x_dim] > self.xmin, ds[x_dim] < self.xmax), + np.logical_and(ds[y_dim] > self.ymin, ds[y_dim] < self.ymax), + ) + + return ds.where(cond=cond, drop=drop) + + +def deltatime_to_utctime( + dataarray: xr.DataArray, + start_epoch: np.datetime64 = np.datetime64("2018-01-01T00:00:00.000000"), +) -> xr.DataArray: + """ + Converts GPS time in nanoseconds from an epoch (default is 2018 Jan 1st) + to Coordinated Universal Time (UTC). + + Note, does not account for leap seconds! There are none declared since the + last one announced on 31/12/2016, so it should be fine for now as of 2020. + """ + utc_time: xr.DataArray = dataarray.__class__(start_epoch) + dataarray + + return utc_time + + +def lonlat_to_xy( + longitude: xr.DataArray, latitude: xr.DataArray, epsg: int = 3031 +) -> (xr.DataArray, xr.DataArray): + """ + Reprojects longitude/latitude EPSG:4326 coordinates to x/y coordinates. + Default conversion is to Antarctic Stereographic Projection EPSG:3031. + """ + if hasattr(longitude, "__array__") and callable(longitude.__array__): + # TODO upgrade to PyProj 3.0 to remove this workaround for passing in + # dask.dataframe.core.Series or xarray.DataArray objects + # Based on https://github.com/pyproj4/pyproj/pull/625 + _longitude = longitude.__array__() + _latitude = latitude.__array__() + + x, y = pyproj.Proj(projparams=epsg)(_longitude, _latitude) + + if hasattr(longitude, "coords"): + return ( + xr.DataArray(data=x, coords=longitude.coords), + xr.DataArray(data=y, coords=latitude.coords), + ) + else: + return x, y diff --git a/tests/__init__.py b/deepicedrain/tests/__init__.py similarity index 100% rename from tests/__init__.py rename to deepicedrain/tests/__init__.py diff --git a/tests/test_calculate_delta.py b/deepicedrain/tests/test_calculate_delta.py similarity index 58% rename from tests/test_calculate_delta.py rename to deepicedrain/tests/test_calculate_delta.py index 7d58363..0fbf4d9 100644 --- a/tests/test_calculate_delta.py +++ b/deepicedrain/tests/test_calculate_delta.py @@ -7,26 +7,14 @@ import pytest import xarray as xr -from deepicedrain import calculate_delta +from deepicedrain import calculate_delta, catalog -@pytest.fixture(scope="module") -def atl11_dataset(): - """ - Loads a sample ATL11 test dataset from the intake catalog - """ - catalog: intake.catalog.local.YAMLFileCatalog = intake.open_catalog( - uri="tests/test_catalog.yaml" - ) - atl11_dataset: xr.Dataset = catalog.atl11_test_case.read() - - return atl11_dataset - - -def test_calculate_delta_height(atl11_dataset): +def test_calculate_delta_height(): """ Check that calculating change in elevation works. """ + atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask() delta_height = calculate_delta( dataset=atl11_dataset, oldcyclenum=3, newcyclenum=4, variable="h_corr" ) @@ -37,11 +25,14 @@ def test_calculate_delta_height(atl11_dataset): npt.assert_allclose(actual=delta_height.mean().data, desired=-0.90124122) npt.assert_allclose(actual=delta_height.max().data, desired=9.49908442) + atl11_dataset.close() -def test_calculate_delta_time(atl11_dataset): + +def test_calculate_delta_time(): """ Check that calculating change in time works. """ + atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask() delta_time = calculate_delta( dataset=atl11_dataset, oldcyclenum=3, newcyclenum=4, variable="delta_time" ) @@ -49,11 +40,13 @@ def test_calculate_delta_time(atl11_dataset): assert isinstance(delta_time, xr.DataArray) assert delta_time.shape == (1404,) npt.assert_equal( - actual=delta_time.min().data, desired=np.timedelta64(7846786703322903) + actual=np.asarray(delta_time.min()), desired=np.timedelta64(7846786703322903) ) npt.assert_equal( - actual=delta_time.mean().data, desired=np.timedelta64(7846786865357197) + actual=np.asarray(delta_time.mean()), desired=np.timedelta64(7846786865357197), ), npt.assert_equal( - actual=delta_time.max().data, desired=np.timedelta64(7846787022726588) + actual=np.asarray(delta_time.max()), desired=np.timedelta64(7846787022726588) ) + + atl11_dataset.close() diff --git a/tests/test_catalog.yaml b/deepicedrain/tests/test_catalog.yaml similarity index 91% rename from tests/test_catalog.yaml rename to deepicedrain/tests/test_catalog.yaml index 21e0d7d..14c2238 100644 --- a/tests/test_catalog.yaml +++ b/deepicedrain/tests/test_catalog.yaml @@ -2,16 +2,18 @@ metadata: version: 1 sources: atl11_test_case: + description: 'An example ATL11 hdf5 file for testing various calculations' args: + chunks: + cycle_number: 2 urlpath: simplecache::https://github.com/suzanne64/ATL11/raw/125ee1a653d78e6b86864b35c9d0fcfd72d64a85/ATL11_test_case/ATL11_078805_0304_02_v002.h5 xarray_kwargs: engine: h5netcdf group: /pt2/corrected_h storage_options: simplecache: - cache_storage: tests/test_data + cache_storage: '{{ CATALOG_DIR }}/test_data' same_names: True - description: 'An example ATL11 hdf5 file for testing various calculations' driver: intake_xarray.netcdf.NetCDFSource metadata: coords: diff --git a/deepicedrain/tests/test_deepicedrain.py b/deepicedrain/tests/test_deepicedrain.py new file mode 100644 index 0000000..1577f87 --- /dev/null +++ b/deepicedrain/tests/test_deepicedrain.py @@ -0,0 +1,17 @@ +import intake + +from deepicedrain import __version__, catalog + + +def test_version(): + assert __version__ == "0.1.0" + + +def test_deepicedrain_catalog(): + """ + Test that the intake ATLAS data catalog can be loaded via both + `deepicedrain.catalog` and `intake.cat.atlas_cat` + """ + catalog_entries = ["icesat2atlasdownloader", "icesat2atl06", "test_data"] + assert list(catalog) == catalog_entries + assert list(intake.cat.atlas_cat) == catalog_entries diff --git a/tests/test_bbox.py b/deepicedrain/tests/test_region.py similarity index 60% rename from tests/test_bbox.py rename to deepicedrain/tests/test_region.py index a2e3176..440f135 100644 --- a/tests/test_bbox.py +++ b/deepicedrain/tests/test_region.py @@ -1,51 +1,51 @@ """ -Tests behaviour of the BBox class +Tests behaviour of the Region class """ import numpy as np import pytest import xarray as xr -from deepicedrain import BBox +from deepicedrain import Region -def test_bbox_scale(): +def test_region_scale(): """ - Tests that a map scale is output based on the BBox region. + Tests that a map scale is output based on the region. """ - region = BBox("Antarctica", -2700000, 2800000, -2200000, 2300000) + region = Region("Antarctica", -2700000, 2800000, -2200000, 2300000) assert region.scale == 27500000 -def test_bbox_bounds_lrbt(): +def test_region_bounds_lrbt(): """ Tests that PyGMT style bounds are given (by default). """ - region = BBox("Siple Coast", -1000000, 250000, -1000000, -100000) + region = Region("Siple Coast", -1000000, 250000, -1000000, -100000) assert region.bounds() == (-1000000, 250000, -1000000, -100000) -def test_bbox_bounds_lbrt(): +def test_region_bounds_lbrt(): """ Tests that Shapely style bounds are given """ - region = BBox("Whillans Ice Stream", -350000, -100000, -700000, -450000) + region = Region("Whillans Ice Stream", -350000, -100000, -700000, -450000) assert region.bounds(style="lbrt") == (-350000, -700000, -100000, -450000) -def test_bbox_bounds_ltrb(): +def test_region_bounds_ltrb(): """ Tests that error is raised when passing in a style that is not implemented. """ - region = BBox("Kamb Ice Stream", -500000, -400000, -600000, -500000) + region = Region("Kamb Ice Stream", -500000, -400000, -600000, -500000) with pytest.raises(NotImplementedError): print(region.bounds(style="ltrb")) -def test_bbox_subset(): +def test_region_subset(): """ Test that we can subset an xarray.Dataset based on the region's bounds """ - region = BBox("South Pole", -100, 100, -100, 100) + region = Region("South Pole", -100, 100, -100, 100) dataset = xr.Dataset( data_vars={"h_corr": (["x", "y"], np.random.rand(50, 50))}, coords={ @@ -53,6 +53,6 @@ def test_bbox_subset(): "y": np.linspace(start=-160, stop=160, num=50), }, ) - ds_subset = dataset.where(cond=region.subset(ds=dataset), drop=True) + ds_subset = region.subset(ds=dataset) assert isinstance(ds_subset, xr.Dataset) assert ds_subset.h_corr.shape == (24, 30) diff --git a/deepicedrain/tests/test_spatiotemporal_conversions.py b/deepicedrain/tests/test_spatiotemporal_conversions.py new file mode 100644 index 0000000..d6f03e4 --- /dev/null +++ b/deepicedrain/tests/test_spatiotemporal_conversions.py @@ -0,0 +1,84 @@ +""" +Tests various conversions between geospatial and temporal units +""" +import datetime + +import dask +import numpy as np +import numpy.testing as npt +import pandas as pd +import xarray as xr + +from deepicedrain import catalog, deltatime_to_utctime, lonlat_to_xy + + +def test_deltatime_to_utctime(): + """ + Test that converting from ICESat-2 delta_time to utc_time works, + and that the xarray dimensions are preserved in the process. + """ + atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask() + + utc_time: xr.DataArray = deltatime_to_utctime(dataarray=atl11_dataset.delta_time) + + assert utc_time.shape == (1404, 2) + assert utc_time.dims == ("ref_pt", "cycle_number") + assert dask.is_dask_collection(utc_time) + + utc_time = utc_time.compute() + + npt.assert_equal( + actual=utc_time.data.min(), + desired=np.datetime64("2019-05-19T20:53:51.039891534"), + ) + npt.assert_equal( + actual=np.datetime64(pd.DataFrame(utc_time.data)[0].mean()), + desired=np.datetime64("2019-05-19 20:54:00.925868"), + ) + npt.assert_equal( + actual=np.datetime64(pd.DataFrame(utc_time.data)[1].mean()), + desired=np.datetime64("2019-08-18 16:33:47.791226"), + ) + npt.assert_equal( + actual=utc_time.data.max(), + desired=np.datetime64("2019-08-18T16:33:57.834610209"), + ) + + atl11_dataset.close() + + +def test_lonlat_to_xy_dask_series(): + """ + Test that converting from longitude/latitude to x/y in EPSG:3031 works when + passing them in as dask.dataframe.core.Series objects. + """ + atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask() + atl11_dataframe: dask.dataframe.core.DataFrame = atl11_dataset.to_dask_dataframe() + + x, y = lonlat_to_xy( + longitude=atl11_dataframe.longitude, latitude=atl11_dataframe.latitude, + ) + npt.assert_equal(actual=x.mean(), desired=-56900105.00307033) + npt.assert_equal(actual=y.mean(), desired=48141607.48486084) + + atl11_dataset.close() + + +def test_lonlat_to_xy_xarray_dataarray(): + """ + Test that converting from longitude/latitude to x/y in EPSG:3031 works when + passing them in as xarray.DataArray objects. Ensure that the xarray + dimensions are preserved in the process. + """ + atl11_dataset: xr.Dataset = catalog.test_data.atl11_test_case.to_dask() + + x, y = lonlat_to_xy( + longitude=atl11_dataset.longitude, latitude=atl11_dataset.latitude + ) + + assert x.dims == y.dims == ("ref_pt",) + assert x.shape == y.shape == (1404,) + npt.assert_equal(actual=x.mean().data, desired=-56900105.00307034) + npt.assert_equal(actual=y.mean().data, desired=48141607.48486084) + + atl11_dataset.close() diff --git a/poetry.lock b/poetry.lock index 5fab5db..376f9d8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -172,7 +172,7 @@ description = "Python package for providing Mozilla's CA Bundle." name = "certifi" optional = false python-versions = "*" -version = "2019.11.28" +version = "2020.4.5.1" [[package]] category = "main" @@ -180,7 +180,7 @@ description = "Time-handling functionality from netcdf4-python" name = "cftime" optional = false python-versions = "*" -version = "1.1.1.2" +version = "1.1.3" [package.dependencies] numpy = "*" @@ -751,7 +751,10 @@ description = "A Python implementation of the JSON5 data format." name = "json5" optional = false python-versions = "*" -version = "0.9.4" +version = "0.9.5" + +[package.extras] +dev = ["hypothesis"] [[package]] category = "main" @@ -1403,7 +1406,7 @@ description = "Python parsing module" name = "pyparsing" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.6" +version = "2.4.7" [[package]] category = "main" @@ -1529,7 +1532,7 @@ description = "Python bindings for 0MQ" name = "pyzmq" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" -version = "19.0.0" +version = "19.0.1" [[package]] category = "dev" @@ -1898,26 +1901,26 @@ cartopy = [ {file = "Cartopy-0.18.0.tar.gz", hash = "sha256:7ffa317e8f8011e0d965a3ef1179e57a049f77019867ed677d49dcc5c0744434"}, ] certifi = [ - {file = "certifi-2019.11.28-py2.py3-none-any.whl", hash = "sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3"}, - {file = "certifi-2019.11.28.tar.gz", hash = "sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"}, + {file = "certifi-2020.4.5.1-py2.py3-none-any.whl", hash = "sha256:1d987a998c75633c40847cc966fcf5904906c920a7f17ef374f5aa4282abd304"}, + {file = "certifi-2020.4.5.1.tar.gz", hash = "sha256:51fcb31174be6e6664c5f69e3e1691a2d72a1a12e90f872cbdb1567eb47b6519"}, ] cftime = [ - {file = "cftime-1.1.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:33a78faacac37cdfb68f998400c81ece5acf368cbf803a9fde7cf01e527d0860"}, - {file = "cftime-1.1.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:ba44e4ae980db94e999ae91e160d734c7865ab437e48591a96fe98ad46b541cb"}, - {file = "cftime-1.1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9d49569ed1c2e8fc3a4c0f95bdbe14a2e146a794ed7a6970dcb54fea455acb88"}, - {file = "cftime-1.1.1.2-cp36-none-win32.whl", hash = "sha256:a47357917b1f28af3a8b53e3fa0004fa1c7b4e454d1744a26fda46571991def5"}, - {file = "cftime-1.1.1.2-cp36-none-win_amd64.whl", hash = "sha256:43644b85c8a9351f5b208dda7442132c78c782ed2ed86e6cdc8580306f2d9afb"}, - {file = "cftime-1.1.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:056a3843f3b74789f73770180d4a80416c0558841c55a22c05a99a04a44d0f1c"}, - {file = "cftime-1.1.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a915c640da4baff6221b87a2ab09f1aef1132611072e300cfd6ccc6960693bc9"}, - {file = "cftime-1.1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1a3e9025d315ef7d023a2865f377f105d75907f0c66913fca56158fce8aa6f2e"}, - {file = "cftime-1.1.1.2-cp37-none-win32.whl", hash = "sha256:10974085d22c8345fbaf7ae5dbf8560ddca5477fc899126a2bcd57f65f1f52b0"}, - {file = "cftime-1.1.1.2-cp37-none-win_amd64.whl", hash = "sha256:9c3698285c77e24a3250d0366d872526470e5333d66874d0c0733ce99e242114"}, - {file = "cftime-1.1.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d09d0469e617c78115d860c2483fcc6e5e55154738c7ea7146a3591254aa366f"}, - {file = "cftime-1.1.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:21a28362757b3a7db4db0e8f174843798116b07c93976f6194d0afe3eb6fe1c0"}, - {file = "cftime-1.1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:298c97970031c2073064249184395cc040b0ec5f6439720bab2ac49354285715"}, - {file = "cftime-1.1.1.2-cp38-none-win32.whl", hash = "sha256:89ee6f94dbb81c22576f81c939a1b8d985c3bd31f63063babeda7e969339ff3b"}, - {file = "cftime-1.1.1.2-cp38-none-win_amd64.whl", hash = "sha256:2405ca220a9f90edbf4836f175fd5535455d8cdd1863213a09f41613c03530fb"}, - {file = "cftime-1.1.1.2.tar.gz", hash = "sha256:35711b5ec3928b9e724817bfa1b7325da205788ee04eae9166cbcd96ea7976a6"}, + {file = "cftime-1.1.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:106d8bd1c144c83de1288c51225fd6846539074b60bb9e05b5e357d5e1eed6b9"}, + {file = "cftime-1.1.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:ead81301dbfcfef2b452ce6997f4f82cc0c2d968d27a2795251b5cdfc4b17295"}, + {file = "cftime-1.1.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:623255e0264de5cde085f0027ab03365965f3f416de407b604b00abe676f20f2"}, + {file = "cftime-1.1.3-cp36-none-win32.whl", hash = "sha256:bdd3a0b85fda45585529825e0954d6140d6df40b7bc489770e72fa6ce79ab9ea"}, + {file = "cftime-1.1.3-cp36-none-win_amd64.whl", hash = "sha256:3fb1f637aed7391c9a5d718175014f4cf705970e1d596bbc80bcac078eefefb0"}, + {file = "cftime-1.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9ad8521598be2b354d159538e9572afb4c0d4ac2b0b6240eef7197e1d709a89e"}, + {file = "cftime-1.1.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:fb9a5ed38dee8ac43235ba161bf5fb61274e965b9b396a650ba4d515b9d70a7c"}, + {file = "cftime-1.1.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9e1c863e38d410de0b59c5be7799ef05cd6f241cc254c6d656dfdae7546c1025"}, + {file = "cftime-1.1.3-cp37-none-win32.whl", hash = "sha256:555c101a0d03f6f6231253d0d84620a5ffb69757680be532086a70b033b9969b"}, + {file = "cftime-1.1.3-cp37-none-win_amd64.whl", hash = "sha256:ad0b9db793eae28ed1f53157e87044119c8153bf211420414f2c6b6e27fc86db"}, + {file = "cftime-1.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dd44afef96467f2cb3ec9324e9f471653e5daa55b05198f8da389ccb85d38157"}, + {file = "cftime-1.1.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:7c4d0de4ab846b76f73a9a45d9085e5c6321610e74548bdeaaf6c588fe841587"}, + {file = "cftime-1.1.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c9b27142cbd41049346128c8a5b78d292b675a3c185e16ed58645c273156e16"}, + {file = "cftime-1.1.3-cp38-none-win32.whl", hash = "sha256:d65d0fcc7120db05ddd34625310bc56038063fddcf63f3129379949d2ee762e9"}, + {file = "cftime-1.1.3-cp38-none-win_amd64.whl", hash = "sha256:eab0fb0268fc4743f65eb6efac5998af2b0805d914c2e2f83226531add57fd98"}, + {file = "cftime-1.1.3.tar.gz", hash = "sha256:fd84b8631dca1db9b40a75e18671b9edafd3515580d8ab33ce1ebafee75451f0"}, ] chardet = [ {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, @@ -2126,8 +2129,8 @@ jinja2 = [ {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, ] json5 = [ - {file = "json5-0.9.4-py2.py3-none-any.whl", hash = "sha256:4e0fc461b5508196a3ddb3b981dc677805923b86d6eb603c7f58f2459ab1458f"}, - {file = "json5-0.9.4.tar.gz", hash = "sha256:2ebfad1cd502dca6aecab5b5c36a21c732c3461ddbc412fb0e9a52b07ddfe586"}, + {file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"}, + {file = "json5-0.9.5.tar.gz", hash = "sha256:703cfee540790576b56a92e1c6aaa6c4b0d98971dc358ead83812aa4d06bdb96"}, ] jsonschema = [ {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, @@ -2610,8 +2613,8 @@ pygmt = [ {file = "pygmt-0.1.1.tar.gz", hash = "sha256:7eb1d4957b10b2281376606d7f40896da4d657988e1c93a2d1224750d3e1dbd0"}, ] pyparsing = [ - {file = "pyparsing-2.4.6-py2.py3-none-any.whl", hash = "sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"}, - {file = "pyparsing-2.4.6.tar.gz", hash = "sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f"}, + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pyproj = [ {file = "pyproj-2.6.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6839ce14635ebfb01c67e456148f4f1fa04b03ef9645551b89d36593f2a3e57d"}, @@ -2703,34 +2706,34 @@ pyyaml = [ {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, ] pyzmq = [ - {file = "pyzmq-19.0.0-cp27-cp27m-macosx_10_9_intel.whl", hash = "sha256:3f12ce1e9cc9c31497bd82b207e8e86ccda9eebd8c9f95053aae46d15ccd2196"}, - {file = "pyzmq-19.0.0-cp27-cp27m-win32.whl", hash = "sha256:e8e4efb52ec2df8d046395ca4c84ae0056cf507b2f713ec803c65a8102d010de"}, - {file = "pyzmq-19.0.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f5b6d015587a1d6f582ba03b226a9ddb1dfb09878b3be04ef48b01b7d4eb6b2a"}, - {file = "pyzmq-19.0.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:bb10361293d96aa92be6261fa4d15476bca56203b3a11c62c61bd14df0ef89ba"}, - {file = "pyzmq-19.0.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4557d5e036e6d85715b4b9fdb482081398da1d43dc580d03db642b91605b409f"}, - {file = "pyzmq-19.0.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:84b91153102c4bcf5d0f57d1a66a0f03c31e9e6525a5f656f52fc615a675c748"}, - {file = "pyzmq-19.0.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6aaaf90b420dc40d9a0e1996b82c6a0ff91d9680bebe2135e67c9e6d197c0a53"}, - {file = "pyzmq-19.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:ad48865a29efa8a0cecf266432ea7bc34e319954e55cf104be0319c177e6c8f5"}, - {file = "pyzmq-19.0.0-cp35-cp35m-win32.whl", hash = "sha256:32234c21c5e0a767c754181c8112092b3ddd2e2a36c3f76fc231ced817aeee47"}, - {file = "pyzmq-19.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:f37c29da2a5b0c5e31e6f8aab885625ea76c807082f70b2d334d3fd573c3100a"}, - {file = "pyzmq-19.0.0-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:1e076ad5bd3638a18c376544d32e0af986ca10d43d4ce5a5d889a8649f0d0a3d"}, - {file = "pyzmq-19.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f4d558bc5668d2345773a9ff8c39e2462dafcb1f6772a2e582fbced389ce527f"}, - {file = "pyzmq-19.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4f562dab21c03c7aa061f63b147a595dbe1006bf4f03213272fc9f7d5baec791"}, - {file = "pyzmq-19.0.0-cp36-cp36m-win32.whl", hash = "sha256:7f7e7b24b1d392bb5947ba91c981e7d1a43293113642e0d8870706c8e70cdc71"}, - {file = "pyzmq-19.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:75238d3c16cab96947705d5709187a49ebb844f54354cdf0814d195dd4c045de"}, - {file = "pyzmq-19.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb3b7156ef6b1a119e68fbe3a54e0a0c40ecacc6b7838d57dd708c90b62a06dc"}, - {file = "pyzmq-19.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a99ae601b4f6917985e9bb071549e30b6f93c72f5060853e197bdc4b7d357e5f"}, - {file = "pyzmq-19.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:242d949eb6b10197cda1d1cec377deab1d5324983d77e0d0bf9dc5eb6d71a6b4"}, - {file = "pyzmq-19.0.0-cp37-cp37m-win32.whl", hash = "sha256:a49fd42a29c1cc1aa9f461c5f2f5e0303adba7c945138b35ee7f4ab675b9f754"}, - {file = "pyzmq-19.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:5f10a31f288bf055be76c57710807a8f0efdb2b82be6c2a2b8f9a61f33a40cea"}, - {file = "pyzmq-19.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26f4ae420977d2a8792d7c2d7bda43128b037b5eeb21c81951a94054ad8b8843"}, - {file = "pyzmq-19.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:944f6bb5c63140d76494467444fd92bebd8674236837480a3c75b01fe17df1ab"}, - {file = "pyzmq-19.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:b08e425cf93b4e018ab21dc8fdbc25d7d0502a23cc4fea2380010cf8cf11e462"}, - {file = "pyzmq-19.0.0-cp38-cp38-win32.whl", hash = "sha256:a1f957c20c9f51d43903881399b078cddcf710d34a2950e88bce4e494dcaa4d1"}, - {file = "pyzmq-19.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:bd1a769d65257a7a12e2613070ca8155ee348aa9183f2aadf1c8b8552a5510f5"}, - {file = "pyzmq-19.0.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:0bbc1728fe4314b4ca46249c33873a390559edac7c217ec7001b5e0c34a8fb7f"}, - {file = "pyzmq-19.0.0-pp36-pypy36_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5e071b834051e9ecb224915398f474bfad802c2fff883f118ff5363ca4ae3edf"}, - {file = "pyzmq-19.0.0.tar.gz", hash = "sha256:5e1f65e576ab07aed83f444e201d86deb01cd27dcf3f37c727bc8729246a60a8"}, + {file = "pyzmq-19.0.1-cp27-cp27m-macosx_10_9_intel.whl", hash = "sha256:58688a2dfa044fad608a8e70ba8d019d0b872ec2acd75b7b5e37da8905605891"}, + {file = "pyzmq-19.0.1-cp27-cp27m-win32.whl", hash = "sha256:87c78f6936e2654397ca2979c1d323ee4a889eef536cc77a938c6b5be33351a7"}, + {file = "pyzmq-19.0.1-cp27-cp27m-win_amd64.whl", hash = "sha256:97b6255ae77328d0e80593681826a0479cb7bac0ba8251b4dd882f5145a2293a"}, + {file = "pyzmq-19.0.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:15b4cb21118f4589c4db8be4ac12b21c8b4d0d42b3ee435d47f686c32fe2e91f"}, + {file = "pyzmq-19.0.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:931339ac2000d12fe212e64f98ce291e81a7ec6c73b125f17cf08415b753c087"}, + {file = "pyzmq-19.0.1-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:2a88b8fabd9cc35bd59194a7723f3122166811ece8b74018147a4ed8489e6421"}, + {file = "pyzmq-19.0.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:bafd651b557dd81d89bd5f9c678872f3e7b7255c1c751b78d520df2caac80230"}, + {file = "pyzmq-19.0.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:8952f6ba6ae598e792703f3134af5a01af8f5c7cf07e9a148f05a12b02412cea"}, + {file = "pyzmq-19.0.1-cp35-cp35m-win32.whl", hash = "sha256:54aa24fd60c4262286fc64ca632f9e747c7cc3a3a1144827490e1dc9b8a3a960"}, + {file = "pyzmq-19.0.1-cp35-cp35m-win_amd64.whl", hash = "sha256:dcbc3f30c11c60d709c30a213dc56e88ac016fe76ac6768e64717bd976072566"}, + {file = "pyzmq-19.0.1-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:6ca519309703e95d55965735a667809bbb65f52beda2fdb6312385d3e7a6d234"}, + {file = "pyzmq-19.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4ee0bfd82077a3ff11c985369529b12853a4064320523f8e5079b630f9551448"}, + {file = "pyzmq-19.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ba6f24431b569aec674ede49cad197cad59571c12deed6ad8e3c596da8288217"}, + {file = "pyzmq-19.0.1-cp36-cp36m-win32.whl", hash = "sha256:956775444d01331c7eb412c5fb9bb62130dfaac77e09f32764ea1865234e2ca9"}, + {file = "pyzmq-19.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b08780e3a55215873b3b8e6e7ca8987f14c902a24b6ac081b344fd430d6ca7cd"}, + {file = "pyzmq-19.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:21f7d91f3536f480cb2c10d0756bfa717927090b7fb863e6323f766e5461ee1c"}, + {file = "pyzmq-19.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:bfff5ffff051f5aa47ba3b379d87bd051c3196b0c8a603e8b7ed68a6b4f217ec"}, + {file = "pyzmq-19.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:07fb8fe6826a229dada876956590135871de60dbc7de5a18c3bcce2ed1f03c98"}, + {file = "pyzmq-19.0.1-cp37-cp37m-win32.whl", hash = "sha256:342fb8a1dddc569bc361387782e8088071593e7eaf3e3ecf7d6bd4976edff112"}, + {file = "pyzmq-19.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:faee2604f279d31312bc455f3d024f160b6168b9c1dde22bf62d8c88a4deca8e"}, + {file = "pyzmq-19.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b9d21fc56c8aacd2e6d14738021a9d64f3f69b30578a99325a728e38a349f85"}, + {file = "pyzmq-19.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:af0c02cf49f4f9eedf38edb4f3b6bb621d83026e7e5d76eb5526cc5333782fd6"}, + {file = "pyzmq-19.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:5f1f2eb22aab606f808163eb1d537ac9a0ba4283fbeb7a62eb48d9103cf015c2"}, + {file = "pyzmq-19.0.1-cp38-cp38-win32.whl", hash = "sha256:f9d7e742fb0196992477415bb34366c12e9bb9a0699b8b3f221ff93b213d7bec"}, + {file = "pyzmq-19.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:5b99c2ae8089ef50223c28bac57510c163bfdff158c9e90764f812b94e69a0e6"}, + {file = "pyzmq-19.0.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:cf5d689ba9513b9753959164cf500079383bc18859f58bf8ce06d8d4bef2b054"}, + {file = "pyzmq-19.0.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aaa8b40b676576fd7806839a5de8e6d5d1b74981e6376d862af6c117af2a3c10"}, + {file = "pyzmq-19.0.1.tar.gz", hash = "sha256:13a5638ab24d628a6ade8f794195e1a1acd573496c3b85af2f1183603b7bf5e0"}, ] regex = [ {file = "regex-2020.5.7-cp27-cp27m-win32.whl", hash = "sha256:5493a02c1882d2acaaf17be81a3b65408ff541c922bfd002535c5f148aa29f74"}, diff --git a/pyproject.toml b/pyproject.toml index e7040e9..24bcbf5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,9 @@ black = "^19.10b0" jupytext = "^1.4.2" pytest = "^5.4.2" +[tool.poetry.plugins."intake.catalogs"] +"atlas_cat" = "deepicedrain:catalog" + [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" diff --git a/tests/test_deepicedrain.py b/tests/test_deepicedrain.py deleted file mode 100644 index c3f9e32..0000000 --- a/tests/test_deepicedrain.py +++ /dev/null @@ -1,5 +0,0 @@ -from deepicedrain import __version__ - - -def test_version(): - assert __version__ == "0.1.0"