Skip to content

Commit

Permalink
CLN: Standardize imports to only use 'as' when naming conflicts arise (
Browse files Browse the repository at this point in the history
  • Loading branch information
snowman2 authored Jul 27, 2023
1 parent 998ee3b commit d954dfe
Show file tree
Hide file tree
Showing 9 changed files with 63 additions and 56 deletions.
10 changes: 5 additions & 5 deletions docs/examples/categorical.ipynb

Large diffs are not rendered by default.

19 changes: 12 additions & 7 deletions docs/examples/grid_to_vector_map.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
"source": [
"import json\n",
"\n",
"import geopandas as gpd\n",
"import geopandas\n",
"\n",
"from geocube.api.core import make_geocube\n",
"\n",
Expand All @@ -32,7 +32,7 @@
"metadata": {},
"outputs": [],
"source": [
"ssurgo_data = gpd.read_file(\"../../test/test_data/input/soil_data_group.geojson\")"
"ssurgo_data = geopandas.read_file(\"../../test/test_data/input/soil_data_group.geojson\")"
]
},
{
Expand Down Expand Up @@ -192,9 +192,9 @@
"cell_type": "markdown",
"metadata": {},
"source": [
"## Convert data to grid\n",
"\n",
"See docs for [make_geocube](../geocube.rst#make-geocube)"
"## Convert data to grid\n",
"\n",
"See docs for [make_geocube](../geocube.rst#make-geocube)"
]
},
{
Expand Down Expand Up @@ -532,7 +532,7 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"display_name": "Python 3.10.6 64-bit",
"language": "python",
"name": "python3"
},
Expand All @@ -546,7 +546,12 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.6.7"
"version": "3.10.6"
},
"vscode": {
"interpreter": {
"hash": "e7370f93d1d0cde622a1f8e1c04877d8463912d04d973331ad4851f04de6915a"
}
}
},
"nbformat": 4,
Expand Down
4 changes: 2 additions & 2 deletions docs/examples/rasterize_function.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"metadata": {},
"outputs": [],
"source": [
"import geopandas as gpd\n",
"import geopandas\n",
"\n",
"from functools import partial\n",
"from geocube.rasterize import rasterize_image\n",
Expand All @@ -29,7 +29,7 @@
"metadata": {},
"outputs": [],
"source": [
"gdf = gpd.read_file(gpd.datasets.get_path('naturalearth_lowres'))"
"gdf = geopandas.read_file(geopandas.datasets.get_path('naturalearth_lowres'))"
]
},
{
Expand Down
12 changes: 6 additions & 6 deletions docs/examples/rasterize_point_data.ipynb

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions docs/examples/timestamp_missing_data.ipynb

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions docs/examples/zonal_statistics.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
"metadata": {},
"outputs": [],
"source": [
"import geopandas as gpd\n",
"import geopandas\n",
"import numpy\n",
"import rioxarray\n",
"import xarray\n",
Expand Down Expand Up @@ -43,7 +43,7 @@
"# https://github.com/corteva/geocube/\n",
"# You could also use the full path: \n",
"# https://raw.githubusercontent.com/corteva/geocube/master/test/test_data/input/soil_data_group.geojson\n",
"ssurgo_data = gpd.read_file(\"../../test/test_data/input/soil_data_group.geojson\")\n",
"ssurgo_data = geopandas.read_file(\"../../test/test_data/input/soil_data_group.geojson\")\n",
"ssurgo_data = ssurgo_data.loc[ssurgo_data.hzdept_r==0]\n",
"# convert the key to group to the vector data to an integer as that is one of the\n",
"# best data types for this type of mapping. If your data is not integer,\n",
Expand Down
4 changes: 2 additions & 2 deletions geocube/xarray_extensions/vectorxarray.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""
This module is an extension for xarray to provide support for vector datasets.
"""
import geopandas as gpd
import geopandas
import numpy
import rioxarray # noqa: F401 pylint: disable=unused-import
import xarray
Expand Down Expand Up @@ -68,7 +68,7 @@ def to_geodataframe(self):
extra_coords = list(set(list(out_obj.coords)) - {"geometry"})
if extra_coords:
out_obj = out_obj.copy().reset_coords(extra_coords)
geodf = gpd.GeoDataFrame(out_obj.to_dataframe().reset_index())
geodf = geopandas.GeoDataFrame(out_obj.to_dataframe().reset_index())
geodf.crs = self._obj.rio.crs
return geodf

Expand Down
46 changes: 24 additions & 22 deletions test/integration/api/test_core_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import json
from functools import partial

import geopandas as gpd
import geopandas
import pandas
import pytest
import xarray
Expand Down Expand Up @@ -41,8 +41,10 @@
"input_geodata",
[
str(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
pandas.DataFrame(gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
pandas.DataFrame(
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
),
],
)
def test_make_geocube(input_geodata, tmpdir):
Expand Down Expand Up @@ -79,7 +81,7 @@ def test_make_geocube(input_geodata, tmpdir):

@pytest.mark.parametrize(
"input_geodata",
[gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")],
[geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")],
)
def test_make_geocube__categorical(input_geodata, tmpdir):
input_geodata["soil_type"] = [
Expand Down Expand Up @@ -119,7 +121,7 @@ def test_make_geocube__categorical(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_flat.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
],
)
def test_make_geocube__interpolate_na(input_geodata, tmpdir):
Expand Down Expand Up @@ -162,7 +164,7 @@ def test_make_geocube__interpolate_na(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_flat.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
],
)
def test_make_geocube__like(input_geodata, tmpdir):
Expand Down Expand Up @@ -198,7 +200,7 @@ def test_make_geocube__like(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_flat.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
],
)
def test_make_geocube__only_resolution(input_geodata, tmpdir):
Expand Down Expand Up @@ -238,9 +240,9 @@ def test_make_geocube__only_resolution(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "naive_time_vector_data.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "naive_time_vector_data.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "naive_time_vector_data.geojson"),
TEST_INPUT_DATA_DIR / "time_vector_data.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "time_vector_data.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "time_vector_data.geojson"),
],
)
def test_make_geocube__convert_time(input_geodata, tmpdir):
Expand Down Expand Up @@ -307,7 +309,7 @@ def test_make_geocube__like_error_invalid_args(load_extra_kwargs):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_flat.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson"),
],
)
def test_make_geocube__no_measurements(input_geodata, tmpdir):
Expand Down Expand Up @@ -358,9 +360,9 @@ def test_make_geocube__no_geom(tmpdir):
@pytest.mark.parametrize(
"input_geodata",
[
gpd.GeoDataFrame(columns=["test_col", "geometry"]),
gpd.GeoDataFrame(),
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson").drop(
geopandas.GeoDataFrame(columns=["test_col", "geometry"]),
geopandas.GeoDataFrame(),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson").drop(
columns="geometry"
),
],
Expand All @@ -385,7 +387,7 @@ def test_make_geocube__no_resolution_error():
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_group.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
],
)
def test_make_geocube__group_by(input_geodata, tmpdir):
Expand Down Expand Up @@ -425,7 +427,7 @@ def test_make_geocube__group_by(input_geodata, tmpdir):

@pytest.mark.parametrize(
"input_geodata",
[gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson")],
[geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson")],
)
def test_make_geocube__group_by__categorical(input_geodata, tmpdir):
input_geodata["soil_type"] = [
Expand Down Expand Up @@ -470,7 +472,7 @@ def test_make_geocube__group_by__categorical(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_group.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
],
)
def test_make_geocube__group_by_like(input_geodata, tmpdir):
Expand Down Expand Up @@ -510,7 +512,7 @@ def test_make_geocube__group_by_like(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_group.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
],
)
def test_make_geocube__group_by_only_resolution(input_geodata, tmpdir):
Expand Down Expand Up @@ -544,7 +546,7 @@ def test_make_geocube__group_by_only_resolution(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "time_vector_data.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "time_vector_data.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "time_vector_data.geojson"),
],
)
def test_make_geocube__group_by_time(input_geodata, tmpdir):
Expand Down Expand Up @@ -573,7 +575,7 @@ def test_make_geocube__group_by_time(input_geodata, tmpdir):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "time_vector_data.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "time_vector_data.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "time_vector_data.geojson"),
],
)
def test_make_geocube__group_by_convert_with_time(input_geodata, tmpdir):
Expand Down Expand Up @@ -643,7 +645,7 @@ def test_make_geocube__group_by_like_error_invalid_args(load_extra_kwargs):
"input_geodata",
[
TEST_INPUT_DATA_DIR / "soil_data_group.geojson",
gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_group.geojson"),
],
)
def test_make_geocube__group_by_no_measurements(input_geodata, tmpdir):
Expand Down Expand Up @@ -819,7 +821,7 @@ def test_make_geocube__custom_rasterize_function__filter_null(
],
)
def test_make_geocube__minimize_dtype(dtype, fill, expected_type, tmpdir):
gdf = gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf = geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf["mask"] = 1
gdf["mask"] = gdf["mask"].astype(dtype)
out_grid = make_geocube(
Expand Down Expand Up @@ -849,7 +851,7 @@ def test_rasterize__like_1d():
)

geom_array = make_geocube(
gpd.GeoDataFrame({"in_geom": [1]}, geometry=[geom], crs="epsg:4326"),
geopandas.GeoDataFrame({"in_geom": [1]}, geometry=[geom], crs="epsg:4326"),
like=like,
)
assert geom_array.rio.transform() == like.rio.transform()
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import geopandas as gpd
import geopandas
import pytest
import xarray
from dateutil.parser import parse
Expand All @@ -16,7 +16,7 @@ def assert_test_dataframes_equal(gdf, gdf2):


def test_from_geodataframe():
gdf = gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf = geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
vxd = vectorxarray.from_geodataframe(gdf)
assert all(gdf.geometry == vxd.geometry.values)
assert sorted(gdf.columns.tolist() + ["spatial_ref"]) == sorted(vxd.variables)
Expand All @@ -26,13 +26,13 @@ def test_from_geodataframe():


def test_to_geodataframe():
gdf = gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf = geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf2 = vectorxarray.from_geodataframe(gdf).vector.to_geodataframe()
assert_test_dataframes_equal(gdf, gdf2)


def test_to_netcdf(tmpdir):
gdf = gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf = geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
vxd = vectorxarray.from_geodataframe(gdf)
output_file = tmpdir / "test_vector.nc"
vxd.vector.to_netcdf(output_file)
Expand All @@ -41,7 +41,7 @@ def test_to_netcdf(tmpdir):


def test_multidimensional_error():
gdf = gpd.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
gdf = geopandas.read_file(TEST_INPUT_DATA_DIR / "soil_data_flat.geojson")
vxd = vectorxarray.from_geodataframe(gdf)
vxd2 = vxd.copy()
vxd.coords["time"] = parse("20170516T000000")
Expand Down

0 comments on commit d954dfe

Please sign in to comment.