Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Stroke classification & typology example demo #157

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 39 additions & 4 deletions neatnet/continuity.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@
def get_stroke_info(
artifacts: geopandas.GeoSeries | geopandas.GeoDataFrame,
roads: geopandas.GeoSeries | geopandas.GeoDataFrame,
) -> tuple[list[int], list[int], list[int], list[int]]:
typify: bool = False,
) -> tuple[list[int], list[int], list[int], list[int]] | geopandas.GeoDataFrame:
"""Generate information about strokes within ``artifacts`` and the
resulting lists can be assigned as columns to ``artifacts``. Classifies
the strokes within the CES typology.
Expand All @@ -62,6 +63,8 @@
Polygons representing the artifacts.
roads : geopandas.GeoSeries | geopandas.GeoDataFrame
LineStrings representing the road network.
typify : bool = False
....

Returns
-------
Expand All @@ -73,35 +76,67 @@
Counts for 'ending' strokes - continues only at one end.
s_ : list[int]
Counts for 'single' strokes - does not continue.
roads : geopandas.GeoDataFrame
...
"""
strokes = []
c_ = []
e_ = []
s_ = []

if typify:
roads["typology"] = "x"

Check warning on line 88 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L88

Added line #L88 was not covered by tests

for geom in artifacts.geometry:
singles = 0
ends = 0
edges = roads.iloc[roads.sindex.query(geom, predicate="covers")]
ecg = edges.coins_group
mains_index = []
if typify:
ends_index = []
singles_index = []

Check warning on line 98 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L97-L98

Added lines #L97 - L98 were not covered by tests
if ecg.nunique() == 1 and edges.shape[0] == edges.coins_count.iloc[0]:
# roundabout special case
singles = 1
mains = 0
else:
all_ends = edges[edges.coins_end]
ae_cg = all_ends.coins_group
mains = edges[~ecg.isin(ae_cg)].coins_group.nunique()

mains_slice = edges[~ecg.isin(ae_cg)]
mains_index += mains_slice.index.to_list()
mains = mains_slice.coins_group.nunique()

visited = []
for coins_count, group in zip(all_ends.coins_count, ae_cg, strict=True):
for ix, coins_count, group in zip(
all_ends.index, all_ends.coins_count, ae_cg, strict=True
):
if group not in visited:
if coins_count == (ecg == group).sum():
singles += 1
visited.append(group)
if typify:
singles_index += [ix]

Check warning on line 120 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L120

Added line #L120 was not covered by tests
else:
# do not add to visited -- may be disjoint within the artifact
ends += 1
if typify:
ends_index += [ix]

Check warning on line 125 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L125

Added line #L125 was not covered by tests
strokes.append(ecg.nunique())
c_.append(mains)
e_.append(ends)
s_.append(singles)
return strokes, c_, e_, s_
if typify:
roads.loc[mains_index, "typology"] = "C"
roads.loc[ends_index, "typology"] = "E"
roads.loc[singles_index, "typology"] = "S"

Check warning on line 133 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L131-L133

Added lines #L131 - L133 were not covered by tests

if typify:
group_mapper = dict(

Check warning on line 136 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L136

Added line #L136 was not covered by tests
roads[roads["typology"] != "x"][["coins_group", "typology"]].values
)
roads["typology"] = roads["coins_group"].map(group_mapper)
return roads

Check warning on line 140 in neatnet/continuity.py

View check run for this annotation

Codecov / codecov/patch

neatnet/continuity.py#L139-L140

Added lines #L139 - L140 were not covered by tests
else:
return strokes, c_, e_, s_
54 changes: 34 additions & 20 deletions neatnet/simplify.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,36 @@ def _identify_non_planar(
return artifacts


def classification_sequence(
roads, artifacts, compute_coins, eps: float = 1e-4, step="singletons"
):
"""Classify singletons into ``{CES}`` typology."""

# Extract network nodes and relate to artifacts
nodes, artifacts = _link_nodes_artifacts(step, roads, artifacts, eps)

# Compute number of stroke groups per artifact
if compute_coins:
roads, _ = continuity(roads)

artifacts = _classify_strokes(artifacts, roads)

# Filter artifacts caused by non-planar intersections
artifacts = _identify_non_planar(artifacts, roads)

# Count intersititial nodes (primes)
_prime_count = artifacts["node_count"] - artifacts[["C", "E", "S"]].sum(axis=1)
artifacts["interstitial_nodes"] = _prime_count

# Define the type label
ces_type = []
for x in artifacts[["node_count", "C", "E", "S"]].itertuples():
ces_type.append(f"{x.node_count}{'C' * x.C}{'E' * x.E}{'S' * x.S}")
artifacts["ces_type"] = ces_type

return nodes, artifacts, roads


def simplify_singletons(
artifacts: gpd.GeoDataFrame,
roads: gpd.GeoDataFrame,
Expand Down Expand Up @@ -159,26 +189,10 @@ def simplify_singletons(
The road network line data following the singleton procedure.
"""

# Extract network nodes and relate to artifacts
nodes, artifacts = _link_nodes_artifacts("singletons", roads, artifacts, eps)

# Compute number of stroke groups per artifact
if compute_coins:
roads, _ = continuity(roads)
artifacts = _classify_strokes(artifacts, roads)

# Filter artifacts caused by non-planar intersections
artifacts = _identify_non_planar(artifacts, roads)

# Count intersititial nodes (primes)
_prime_count = artifacts["node_count"] - artifacts[["C", "E", "S"]].sum(axis=1)
artifacts["interstitial_nodes"] = _prime_count

# Define the type label
ces_type = []
for x in artifacts[["node_count", "C", "E", "S"]].itertuples():
ces_type.append(f"{x.node_count}{'C' * x.C}{'E' * x.E}{'S' * x.S}")
artifacts["ces_type"] = ces_type
# Classify artifact typology
nodes, artifacts, roads = classification_sequence(
roads, artifacts, compute_coins, eps=eps
)

# Collect changes
to_drop: list[int] = []
Expand Down
Loading