Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pre-commit.ci] pre-commit autoupdate #179

Merged
merged 3 commits into from
Oct 8, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
files: "core\/|notebooks\/"
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.5.6"
rev: "v0.6.9"
hooks:
- id: ruff
- id: ruff-format
Expand Down
8 changes: 4 additions & 4 deletions notebooks/_addartifacts_8989.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -37,10 +37,10 @@
"# find predicted artifacts\n",
"option = \"circular_compactness_index\"\n",
"threshold = results[city][option][\"threshold\"]\n",
"polygons[\"is_artifact\"] = False # set default to False\n",
"polygons.loc[\n",
" polygons[\"circular_compactness_index\"] <= threshold, \"is_artifact\"\n",
"] = True # set to True for polygons where index is below the threshold"
"polygons[\"is_artifact\"] = False # set default to False\n",
"polygons.loc[polygons[\"circular_compactness_index\"] <= threshold, \"is_artifact\"] = (\n",
" True # set to True for polygons where index is below the threshold\n",
")"
]
},
{
Expand Down
4 changes: 1 addition & 3 deletions notebooks/_clip_networks.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -331,9 +331,7 @@
"metadata": {},
"outputs": [],
"source": [
"slc = gpd.read_parquet(\"../data/809/original/809.parquet\").query(\n",
" \"highway != 'service'\"\n",
")"
"slc = gpd.read_parquet(\"../data/809/original/809.parquet\").query(\"highway != 'service'\")"
]
},
{
Expand Down
16 changes: 4 additions & 12 deletions notebooks/check_corrected_input_data.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -65,23 +65,15 @@
"outputs": [],
"source": [
"for fua, city in utils.fua_city.items():\n",
" print(\n",
" \"================================\\n\"\n",
" f\"* {city} *\"\n",
" \"\\n--------------\"\n",
" )\n",
" print(\"================================\\n\" f\"* {city} *\" \"\\n--------------\")\n",
"\n",
" o = utils.read_original(city)\n",
" m = utils.read_manual(city, o.crs)\n",
"\n",
" print(\n",
" \"Records\\n\"\n",
" f\" Orig: {o.shape[0]:,}\\n\"\n",
" f\" Manu: {m.shape[0]:,}\\n\"\n",
" )\n",
" print(\"Records\\n\" f\" Orig: {o.shape[0]:,}\\n\" f\" Manu: {m.shape[0]:,}\\n\")\n",
"\n",
" b = o.plot(figsize=(20,20), ec=\"k\", lw=.5, alpha=.5, zorder=1)\n",
" m.plot(ax=b, ec=\"b\", lw=.2, alpha=1, zorder=2)\n",
" b = o.plot(figsize=(20, 20), ec=\"k\", lw=0.5, alpha=0.5, zorder=1)\n",
" m.plot(ax=b, ec=\"b\", lw=0.2, alpha=1, zorder=2)\n",
"\n",
" # large plot\n",
" matplotlib.pyplot.savefig(f\"{city}_orig_vs_manu.png\", dpi=400)"
Expand Down
12 changes: 6 additions & 6 deletions notebooks/cityseer_overview_gaboardi.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -2273,8 +2273,8 @@
"outputs": [],
"source": [
"mm_nodes_raw, mm_edges_raw = momepy.nx_to_gdf(G_nx_mm)\n",
"#(mm_nodes_raw.to_crs(4326).to_file(fpath / fbase.format(\"mm\", \"nodes\", \"raw\")))\n",
"#(mm_edges_raw.to_crs(4326).to_file(fpath / fbase.format(\"mm\", \"edges\", \"raw\")))"
"# (mm_nodes_raw.to_crs(4326).to_file(fpath / fbase.format(\"mm\", \"nodes\", \"raw\")))\n",
"# (mm_edges_raw.to_crs(4326).to_file(fpath / fbase.format(\"mm\", \"edges\", \"raw\")))"
]
},
{
Expand Down Expand Up @@ -2321,8 +2321,8 @@
")\n",
"G_cs_simp = momepy.gdf_to_nx(cs_road_simp, integer_labels=True)\n",
"cs_nodes_simp, cs_edges_simp = momepy.nx_to_gdf(G_cs_simp)\n",
"#(cs_nodes_simp.to_crs(4326).to_file(fpath / fbase.format(\"cs\", \"nodes\", \"simplified\")))\n",
"#(cs_edges_simp.to_crs(4326).to_file(fpath / fbase.format(\"cs\", \"edges\", \"simplified\")))"
"# (cs_nodes_simp.to_crs(4326).to_file(fpath / fbase.format(\"cs\", \"nodes\", \"simplified\")))\n",
"# (cs_edges_simp.to_crs(4326).to_file(fpath / fbase.format(\"cs\", \"edges\", \"simplified\")))"
]
},
{
Expand All @@ -2341,8 +2341,8 @@
"outputs": [],
"source": [
"cs_nodes_simp, cs_edges_simp = momepy.nx_to_gdf(G_cs_simp)\n",
"#cs_nodes_simp.to_file(fpath / fbase.format(\"cs\", \"nodes\", \"simplified\"))\n",
"#cs_edges_simp.to_file(fpath / fbase.format(\"cs\", \"edges\", \"simplified\"))"
"# cs_nodes_simp.to_file(fpath / fbase.format(\"cs\", \"nodes\", \"simplified\"))\n",
"# cs_edges_simp.to_file(fpath / fbase.format(\"cs\", \"edges\", \"simplified\"))"
]
},
{
Expand Down
12 changes: 3 additions & 9 deletions notebooks/parenx.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -71,24 +71,18 @@
"outputs": [],
"source": [
"for subfolder in glob.glob(\"../temp-parenx/*\"):\n",
"\n",
" fua = int(re.findall(r'\\d+', subfolder)[0])\n",
" fua = int(re.findall(r\"\\d+\", subfolder)[0])\n",
"\n",
" os.makedirs(f\"../data/{fua}/parenx/\", exist_ok=True)\n",
"\n",
" ske = gpd.read_file(\n",
" filename = subfolder + \"/skeletonize.gpkg\",\n",
" driver = \"fiona\",\n",
" layer = \"line\"\n",
" filename=subfolder + \"/skeletonize.gpkg\", driver=\"fiona\", layer=\"line\"\n",
" )\n",
"\n",
"\n",
" ske.to_parquet(f\"../data/{fua}/parenx/skeletonize.parquet\")\n",
"\n",
" vor = gpd.read_file(\n",
" filename = subfolder + \"/voronoi.gpkg\",\n",
" driver = \"fiona\",\n",
" layer = \"line\"\n",
" filename=subfolder + \"/voronoi.gpkg\", driver=\"fiona\", layer=\"line\"\n",
" )\n",
"\n",
" vor.to_parquet(f\"../data/{fua}/parenx/voronoi.parquet\")"
Expand Down
8 changes: 5 additions & 3 deletions notebooks/simplification_api.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"import folium\n",
"import sgeop\n",
"\n",
"from core import utils\n"
"from core import utils"
]
},
{
Expand Down Expand Up @@ -134,8 +134,10 @@
"metadata": {},
"outputs": [],
"source": [
"m = roads.explore(max_zoom=52, tiles=\"cartodb positron\", color=\"black\", prefer_canvas=True)\n",
"m = new_roads.explore(\"_status\", m=m, highlight_kwds=dict(color='red'))\n",
"m = roads.explore(\n",
" max_zoom=52, tiles=\"cartodb positron\", color=\"black\", prefer_canvas=True\n",
")\n",
"m = new_roads.explore(\"_status\", m=m, highlight_kwds=dict(color=\"red\"))\n",
"folium.LayerControl().add_to(m)\n",
"m"
]
Expand Down
2 changes: 1 addition & 1 deletion notebooks/simplification_api_bug.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"\n",
"from core import algorithms, utils\n",
"\n",
"import geopandas as gpd\n"
"import geopandas as gpd"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion notebooks/simplification_pipeline.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
"from libpysal import graph\n",
"import sgeop\n",
"\n",
"from core import utils\n"
"from core import utils"
]
},
{
Expand Down
14 changes: 11 additions & 3 deletions notebooks/simplification_prg.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,9 @@
"metadata": {},
"outputs": [],
"source": [
"buildings = geopandas.read_parquet(\"/Users/martin/Downloads/buildings_chars_69300.parquet\", columns=[\"geometry\"])"
"buildings = geopandas.read_parquet(\n",
" \"/Users/martin/Downloads/buildings_chars_69300.parquet\", columns=[\"geometry\"]\n",
")"
]
},
{
Expand Down Expand Up @@ -139,8 +141,14 @@
"metadata": {},
"outputs": [],
"source": [
"m = roads.explore(max_zoom=52, tiles=\"cartodb positron\", color=\"black\", prefer_canvas=True, highlight_kwds=dict(color='red'))\n",
"m = new_roads.explore(\"_status\", m=m, highlight_kwds=dict(color='red'))\n",
"m = roads.explore(\n",
" max_zoom=52,\n",
" tiles=\"cartodb positron\",\n",
" color=\"black\",\n",
" prefer_canvas=True,\n",
" highlight_kwds=dict(color=\"red\"),\n",
")\n",
"m = new_roads.explore(\"_status\", m=m, highlight_kwds=dict(color=\"red\"))\n",
"folium.LayerControl().add_to(m)\n",
"m"
]
Expand Down
92 changes: 37 additions & 55 deletions notebooks/typology-multiples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -608,21 +608,25 @@
"outputs": [],
"source": [
"def nx_gx_cluster(edges, cluster_geom, nodes, to_drop, to_add, eps=0.01):\n",
" '''treat an n-artifact cluster: merge all artifact polygons; drop\n",
" \"\"\"treat an n-artifact cluster: merge all artifact polygons; drop\n",
" all lines fully within the merged polygon; skeletonize and keep only\n",
" skeletonized edges and connecting nodes'''\n",
" skeletonized edges and connecting nodes\"\"\"\n",
"\n",
" # get edges on boundary\n",
" edges_on_boundary = edges.intersection(cluster_geom.boundary.buffer(eps)).explode(ignore_index=True)\n",
" edges_on_boundary = edges.intersection(cluster_geom.boundary.buffer(eps)).explode(\n",
" ignore_index=True\n",
" )\n",
" edges_on_boundary = edges_on_boundary[\n",
" (~edges_on_boundary.is_empty) &\n",
" (edges_on_boundary.geom_type.str.contains(\"Line\")) &\n",
" (edges_on_boundary.length > 10*eps)\n",
" ] # keeping only (multi)linestrings of length>>eps\n",
" (~edges_on_boundary.is_empty)\n",
" & (edges_on_boundary.geom_type.str.contains(\"Line\"))\n",
" & (edges_on_boundary.length > 10 * eps)\n",
" ] # keeping only (multi)linestrings of length>>eps\n",
" edges_on_boundary = edges_on_boundary.to_frame(\"geometry\")\n",
"\n",
" # find road segments DELINEATING cluster polygon (to be partially merged, and kept)\n",
" edges_within = edges.iloc[edges.sindex.query(cluster_geom, predicate=\"contains\")].copy()\n",
" edges_within = edges.iloc[\n",
" edges.sindex.query(cluster_geom, predicate=\"contains\")\n",
" ].copy()\n",
"\n",
" # find nodes ON the cluster polygon boundary (to be partially kept)\n",
" nodes_on_boundary = nodes.iloc[\n",
Expand All @@ -631,27 +635,25 @@
"\n",
" # find edges that cross but do not lie within\n",
" edges_crossing = edges.iloc[\n",
" edges.sindex.query(\n",
" cluster_geom.buffer(eps),\n",
" predicate = \"crosses\"\n",
" )\n",
" edges.sindex.query(cluster_geom.buffer(eps), predicate=\"crosses\")\n",
" ]\n",
"\n",
" # the nodes to keep are those that intersect with these crossing edges\n",
" nodes_to_keep = nodes_on_boundary.iloc[\n",
" nodes_on_boundary.sindex.query(\n",
" edges_crossing.union_all(),\n",
" predicate = \"intersects\"\n",
" )].copy()\n",
"\n",
" edges_crossing.union_all(), predicate=\"intersects\"\n",
" )\n",
" ].copy()\n",
"\n",
" # merging lines between nodes to keep:\n",
" buffered_nodes_to_keep = nodes_to_keep.buffer(eps).union_all()\n",
"\n",
" # make queen contiguity graph on MINUSBUFFERED outline road segments,\n",
" # and copy component labels into edges_on_boundary gdf\n",
" edges_on_boundary = edges_on_boundary.explode(ignore_index=True)\n",
" queen = graph.Graph.build_fuzzy_contiguity(edges_on_boundary.difference(buffered_nodes_to_keep))\n",
" queen = graph.Graph.build_fuzzy_contiguity(\n",
" edges_on_boundary.difference(buffered_nodes_to_keep)\n",
" )\n",
" edges_on_boundary[\"comp\"] = queen.component_labels\n",
"\n",
" # skeletonize\n",
Expand All @@ -661,7 +663,9 @@
" snap_to=False,\n",
" )\n",
"\n",
" lines_to_drop = edges.iloc[edges.sindex.query(cluster_geom.buffer(eps), predicate=\"contains\")].index.to_list()\n",
" lines_to_drop = edges.iloc[\n",
" edges.sindex.query(cluster_geom.buffer(eps), predicate=\"contains\")\n",
" ].index.to_list()\n",
" lines_to_add = list(skel)\n",
"\n",
" to_add.extend(lines_to_add)\n",
Expand All @@ -670,35 +674,27 @@
" ### RECONNECTING NON-PLANAR INTRUDING EDGES TO SKELETON\n",
"\n",
" # considering only edges that are kept\n",
" edges_kept = edges.copy().drop(lines_to_drop, axis = 0)\n",
" edges_kept = edges.copy().drop(lines_to_drop, axis=0)\n",
"\n",
" to_reconnect = []\n",
"\n",
" skel_merged = shapely.line_merge(skel)\n",
" skel_merged = gpd.GeoSeries(skel_merged, crs=edges.crs)\n",
"\n",
" skel_nodes = list(\n",
" shapely.get_point(skel_merged, 0))\n",
" skel_nodes.extend(list(\n",
" shapely.get_point(skel_merged, -1))\n",
" )\n",
" skel_nodes = list(shapely.get_point(skel_merged, 0))\n",
" skel_nodes.extend(list(shapely.get_point(skel_merged, -1)))\n",
" skel_nodes = gpd.GeoSeries(skel_nodes, crs=edges.crs).union_all()\n",
"\n",
" # loop through endpoints of kept edges...\n",
" for i in [0, -1]:\n",
"\n",
" # do the same for \"end\" points\n",
" endpoints = gpd.GeoSeries(\n",
" shapely.get_point(edges_kept.geometry, i),\n",
" crs = edges.crs\n",
" shapely.get_point(edges_kept.geometry, i), crs=edges.crs\n",
" )\n",
"\n",
" # which are contained by artifact...\n",
" endpoints = endpoints.iloc[\n",
" endpoints.sindex.query(\n",
" cluster_geom,\n",
" predicate=\"contains\"\n",
" )\n",
" endpoints.sindex.query(cluster_geom, predicate=\"contains\")\n",
" ]\n",
"\n",
" # ...but NOT on skeleton\n",
Expand All @@ -708,10 +704,7 @@
"\n",
" # to_reconnect now contains a list of points which need to be connected to the nearest skel node:\n",
" # from those nodes, we need to add shapely shortest lines between those edges_kept.endpoints and\n",
" non_planar_connections = shapely.shortest_line(\n",
" skel_nodes,\n",
" to_reconnect\n",
" )\n",
" non_planar_connections = shapely.shortest_line(skel_nodes, to_reconnect)\n",
"\n",
" ### extend our list \"to_add\" with this artifact clusters' contribution:\n",
" to_add.extend(non_planar_connections)"
Expand Down Expand Up @@ -755,8 +748,7 @@
"# planar = artifacts[~artifacts.non_planar]\n",
"\n",
"# iterate through CLUSTERS of artifacts\n",
"for _, artifact in artifacts_small.groupby(\"comp\"): # TODO: over entire table\n",
"\n",
"for _, artifact in artifacts_small.groupby(\"comp\"): # TODO: over entire table\n",
" # get artifact cluster polygon\n",
" cluster_geom = artifact.union_all(method=\"coverage\")\n",
" # get edges relevant for an artifact\n",
Expand All @@ -768,8 +760,8 @@
" nodes=nodes,\n",
" to_drop=to_drop,\n",
" to_add=to_add,\n",
" eps=my_eps\n",
" )\n"
" eps=my_eps,\n",
" )"
]
},
{
Expand All @@ -779,7 +771,6 @@
"outputs": [],
"source": [
"def rebuild_network(roads, to_drop, to_add, distance=2):\n",
"\n",
" cleaned_roads = roads.geometry.drop(to_drop)\n",
"\n",
" # # split lines on new nodes\n",
Expand Down Expand Up @@ -1569,22 +1560,13 @@
"source": [
"m = roads.iloc[roads.sindex.query(cluster_geom, predicate=\"intersects\")].explore(\n",
" tiles=\"CartoDB.Positron\",\n",
" name = \"original roads\",\n",
" color = \"orange\",\n",
" opacity = .4,\n",
" prefer_canvas=True\n",
")\n",
"new_roads.explore(\n",
" m=m,\n",
" name = \"new roads\",\n",
" color = \"blue\"\n",
")\n",
"artifacts.explore(\n",
" m=m,\n",
" color = \"yellow\",\n",
" opacity = .1,\n",
" name = \"artifact\"\n",
" name=\"original roads\",\n",
" color=\"orange\",\n",
" opacity=0.4,\n",
" prefer_canvas=True,\n",
")\n",
"new_roads.explore(m=m, name=\"new roads\", color=\"blue\")\n",
"artifacts.explore(m=m, color=\"yellow\", opacity=0.1, name=\"artifact\")\n",
"folium.LayerControl().add_to(m)\n",
"m"
]
Expand Down
Loading
Loading