Skip to content

Commit

Permalink
Test open_mfdataset_manyfiles with engine zarr
Browse files Browse the repository at this point in the history
Zarr objects are folders with seem to cause issues with closing, so added a try-except to api.py to catch failures in f.close(). Some tests failing when chunks=None because a numpy array is returned instead of a dask array.
  • Loading branch information
weiji14 committed Jul 1, 2020
1 parent 5571fff commit b9a239e
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 3 deletions.
5 changes: 4 additions & 1 deletion xarray/backends/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,10 @@ def __init__(self, file_objs):

def close(self):
for f in self.file_objs:
f.close()
try:
f.close()
except AttributeError: # 'NoneType' object has no attribute 'close'
pass


def open_mfdataset(
Expand Down
7 changes: 5 additions & 2 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -2507,7 +2507,7 @@ def test_write_inconsistent_chunks(self):
assert actual["y"].encoding["chunksizes"] == (100, 50)


@pytest.fixture(params=["scipy", "netcdf4", "h5netcdf", "pynio"])
@pytest.fixture(params=["scipy", "netcdf4", "h5netcdf", "pynio", "zarr"])
def readengine(request):
return request.param

Expand Down Expand Up @@ -2567,7 +2567,10 @@ def test_open_mfdataset_manyfiles(
# split into multiple sets of temp files
for ii in original.x.values:
subds = original.isel(x=slice(ii, ii + 1))
subds.to_netcdf(tmpfiles[ii], engine=writeengine)
if writeengine != "zarr":
subds.to_netcdf(tmpfiles[ii], engine=writeengine)
else: # if writeengine == "zarr":
subds.to_zarr(store=tmpfiles[ii])

# check that calculation on opened datasets works properly
with open_mfdataset(
Expand Down

0 comments on commit b9a239e

Please sign in to comment.