Skip to content

Commit

Permalink
Add tests for Xarray io functions (#30)
Browse files Browse the repository at this point in the history
* removed (commented-out) CONUS/AK search constraint

* removed CONUS/AK constraint (cryocloud dev instead of local)

* updated CONUS/AK (not sure why my branch didn't have this already) and win-64 compat

* WSL build complete

* Deleted CONUS/AK test and box import

* added shapely make_valid to cascading_search

* Return rasters for  cop30 and ESA search

* Added ODC dependency and fixed formatting

* Added test for .io.xarray and synced search main.py

* Cleaned test_xarray and added aoi, large_aoi to init

* test_xarray matplotlib import inside test func

* removed depends_on_optional from xarray test

* use conftest.py

* add back unused import for auth test

* streamline environments

* streamline environments 2

---------

Co-authored-by: Scott Henderson <[email protected]>
  • Loading branch information
Jack-Hayes and scottyhq authored Dec 14, 2024
1 parent c5a1a85 commit f8b0973
Show file tree
Hide file tree
Showing 8 changed files with 1,833 additions and 3,159 deletions.
4,841 changes: 1,722 additions & 3,119 deletions pixi.lock

Large diffs are not rendered by default.

29 changes: 13 additions & 16 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,39 +31,37 @@ classifiers = [
dynamic = ["version"]

dependencies = [
#"aiohttp>=3.10.6,<4",
"cloudpathlib[s3]>=0.20.0,<0.21",
#"fsspec>=2024.9.0,<2025",
"geopandas>=1.0.1,<2",
"maxar-platform>=1.0.2,<2",
"odc-stac>=0.3.10,<0.4",
"planetary-computer>=1.0.0,<2",
"pyarrow>=18.0.0,<19",
"pystac-client>=0.8.3,<0.9",
"requests>=2.32.3,<3",
"rioxarray>=0.17.0,<0.18",
"stac-asset>=0.4.3,<0.5",
"stac-geoparquet>=0.6.0,<0.7",
"odc-stac>=0.3.10,<0.4",
]

[project.optional-dependencies]
dev = [
"ipykernel>=6.29.5,<7",
"matplotlib>=3.9.4,<4",
"mypy>=1.11.2,<2",
"pre-commit>=3.8.0,<4",
"pylint>=3.3.1,<4",
"pytest >=6",
"pytest-cov >=3",
"sliderule>=4.7.1,<5",
]
docs = [
"folium", # comes w/ geopandas on conda-forge but not pypi
#"furo>=2023.08.17",
"mapclassify",
"matplotlib",
"myst-nb",
"myst_parser>=0.13",
"pydata-sphinx-theme>=0.16.0,<0.17",
#"rpds-py>=0.21.0,<0.22",
"sphinx>=7.0",
"sphinx_autodoc_typehints",
"sphinx_copybutton",
Expand Down Expand Up @@ -211,37 +209,36 @@ docs = { features = ["docs"], solve-group = "default" }
# If a package is listed in project.dependencies but not repeated here, it is installed from pypi
[tool.pixi.dependencies]
python = "<3.13" # https://github.com/stac-utils/stac-geoparquet/issues/81
#aiohttp = "*"
#fsspec = "*"
geopandas = "*"
odc-stac = "*"
planetary-computer = "*"
pystac-client = "*"
requests = "*"
rioxarray = "*"
# stac-asset = "*" # not on conda-forge
#s3fs = "*"
stac-geoparquet = "*"
pyarrow = "*"
# Testing additional dependencies (not in pypi list
jsonschema = ">=4.23.0,<5"
libgdal-arrow-parquet = ">=3.10.0,<4"
odc-stac = "*"
#nbconvert = ">=7.16.4,<8"
#cloudpathlib-s3 = ">=0.20.0,<0.21"
#matplotlib-base = ">=3.9.2,<4"
#sliderule = ">=4.7.1,<5"


[tool.pixi.feature.dev.dependencies]
# NOTE: ipykernel to run notebooks in vscode
ipykernel = ">=6.29.5,<7"
matplotlib = "*"
mypy = "*"
# Bug if=*? Because you require pre-commit>=3.8.0,<4 and pre-commit==4.0.0, we can conclude that your requirements are unsatisfiable.
pre-commit = "<4"
pylint = "*"
pytest = "*"
mypy = "*"
# Testing additional dependencies
sliderule = "*"

# NOTE: test new dependencies locally by adding below
#rich = ">=13.8.1,<14" # Optional. convenient for rich.print(dataset)
#xvec = ">=0.3.0,<0.4"
sliderule = ">=4.7.1,<5"
#sliderule = ">=4.7.1,<5"
#matplotlib = ">=3.9.4,<4"

[tool.pixi.pypi-dependencies]
coincident = { path = ".", editable = false }
Expand Down
6 changes: 3 additions & 3 deletions src/coincident/datasets/planetary_computer.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,15 +19,15 @@

@dataclass
class COP30(Dataset):
"""Essential metadata for Copernicus DEM"""
"""Essential metadata and data access for Copernicus DEM"""

alias: str = "cop30"
has_stac_api: bool = True
collections: list[str] = field(default_factory=lambda: ["cop-dem-glo-30"])
search: str = STACAPI
start: str | None = None # NOTE: has 'representative' datetime of 2021-04-22
start: str | None = None # Copernicus DEM has 'representative' datetime: 2021-04-22
end: str | None = None
type: str = "sar"
type: str = "dem"
provider: str = "microsoft"


Expand Down
2 changes: 1 addition & 1 deletion src/coincident/io/xarray.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def to_dataset(
bands: list[str] | None = None,
aoi: gpd.GeoDataFrame | None = None,
mask: bool = False,
**kwargs: dict[str, Any],
**kwargs: Any,
) -> xr.DataArray:
"""
Convert a GeoDataFrame to an xarray DataArray using odc.stac
Expand Down
5 changes: 0 additions & 5 deletions tests/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +0,0 @@
from __future__ import annotations

# import os
# if not os.environ.get('MAXAR_API_KEY'):
# os.environ['MAXAR_API_KEY'] = 'fake-test-key'
23 changes: 23 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
# ruff: noqa: ARG001
from __future__ import annotations

import geopandas as gpd
import pytest

# import os
# if not os.environ.get('MAXAR_API_KEY'):
# os.environ['MAXAR_API_KEY'] = 'fake-test-key'


@pytest.fixture(scope="package")
def aoi():
# 11 vertices, 1,361km^2
aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson"
return gpd.read_file(aoi_url)


@pytest.fixture
def large_aoi(scope="package"):
# 260 vertices, large area 269,590 km^2
aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson"
return gpd.read_file(aoi_url)
16 changes: 1 addition & 15 deletions tests/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,24 +18,10 @@
except: # noqa: E722
not_authenticated = True
maxar_authenticated = pytest.mark.skipif(
not_authenticated, reason="tests for linux only"
not_authenticated, reason="Not authenticated with Maxar API"
)


@pytest.fixture
def aoi():
# 11 vertices, 1,361km^2
aoi_url = "https://raw.githubusercontent.com/SlideRuleEarth/sliderule-python/main/data/grandmesa.geojson"
return gpd.read_file(aoi_url)


@pytest.fixture
def large_aoi():
# 260 vertices, large area 269,590 km^2
aoi_url = "https://raw.githubusercontent.com/unitedstates/districts/refs/heads/gh-pages/states/CO/shape.geojson"
return gpd.read_file(aoi_url)


@typing.no_type_check
def test_no_dataset_specified():
with pytest.raises(
Expand Down
70 changes: 70 additions & 0 deletions tests/test_xarray.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
from __future__ import annotations

import pytest
import xarray as xr
from matplotlib.collections import QuadMesh

import coincident
from coincident.io.xarray import plot_esa_worldcover, to_dataset

# Decorate tests requiring internet (slow & flaky)
network = pytest.mark.network


@network
def test_to_dataset_with_cop30(aoi):
"""Test `to_dataset` functionality with COP30 dataset."""
gf_cop30 = coincident.search.search(
dataset="cop30",
intersects=aoi,
)
ds = to_dataset(
gf_cop30,
aoi=aoi,
resolution=0.1, # ~1km
).compute()
assert isinstance(ds, xr.Dataset), "Expected output to be an xarray Dataset."
assert "data" in ds.data_vars, "Expected 'data' variable in the Dataset."


@network
def test_to_dataset_with_worldcover(aoi):
"""Test `to_dataset` functionality with WorldCover dataset."""
gf_wc = coincident.search.search(
dataset="worldcover",
intersects=aoi,
datetime=["2020"],
)
ds = to_dataset(
gf_wc,
bands=["map"],
aoi=aoi,
resolution=0.1, # ~1km
).compute()
assert isinstance(ds, xr.Dataset), "Expected output to be an xarray Dataset."
assert "map" in ds.data_vars, "Expected 'map' variable in the Dataset."


@network
def test_plot_esa_worldcover_valid(aoi):
"""Test `plot_esa_worldcover` with valid WorldCover dataset."""

gf_wc = coincident.search.search(
dataset="worldcover",
intersects=aoi,
datetime=["2021"],
)
ds = to_dataset(
gf_wc,
bands=["map"],
aoi=aoi,
resolution=0.1, # ~1km
).compute()
ds = ds.rename(map="landcover")
ax = plot_esa_worldcover(ds)
assert ax is not None, "Expected a valid Matplotlib Axes object."
# https://matplotlib.org/stable/users/prev_whats_new/whats_new_3.4.0.html
# https://github.com/matplotlib/matplotlib/blob/main/lib/matplotlib/tests/test_contour.py#L146
assert any(
isinstance(c, QuadMesh) for c in ax.get_children()
), "Expected at least one pcolormesh object in the plot."

0 comments on commit f8b0973

Please sign in to comment.