Skip to content

Commit

Permalink
moved conncomp from laplace_beltrami.py to gen_isosurface.py
Browse files Browse the repository at this point in the history
  • Loading branch information
Jordan DeKraker committed Jan 27, 2025
1 parent 272a46d commit abe1acb
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 46 deletions.
47 changes: 47 additions & 0 deletions hippunfold/workflow/scripts/gen_isosurface.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import nibabel as nib
import numpy as np
from copy import deepcopy
import networkx as nx


def write_surface_to_gifti(points, faces, out_surf_gii):
Expand All @@ -21,6 +22,48 @@ def write_surface_to_gifti(points, faces, out_surf_gii):
gifti.to_filename(out_surf_gii)


def largest_connected_component(vertices: np.ndarray, faces: np.ndarray):
"""
Returns the vertices, faces, and original indices of the largest connected component of a 3D surface mesh.
Parameters:
vertices (np.ndarray): Array of vertex coordinates (N x 3).
faces (np.ndarray): Array of face indices (M x 3).
Returns:
tuple: (new_vertices, new_faces, largest_component), where new_vertices are the vertex coordinates of the largest component,
new_faces are the face indices adjusted to the new vertex order, and largest_component contains the indices of the original vertices.
"""
# Build adjacency graph from face connectivity
G = nx.Graph()
for face in faces:
G.add_edges_from(
[(face[i], face[j]) for i in range(3) for j in range(i + 1, 3)]
)

# Find connected components
components = list(nx.connected_components(G))

# Select the largest connected component
largest_component = max(components, key=len)
largest_component = np.array(list(largest_component))

# Create a mapping from old vertex indices to new ones
index_map = {old_idx: new_idx for new_idx, old_idx in enumerate(largest_component)}

# Filter the vertices and faces
new_vertices = vertices[largest_component]
new_faces = np.array(
[
[index_map[v] for v in face]
for face in faces
if all(v in index_map for v in face)
]
)

return new_vertices, new_faces, largest_component


def remove_nan_vertices(vertices, faces):
"""
Removes vertices containing NaNs and updates faces accordingly.
Expand Down Expand Up @@ -186,5 +229,9 @@ def compute_geodesic_distances(vertices, faces, source_indices):
points, faces = remove_nan_vertices(points, faces)


# apply largest connected component
points, faces, i_concomp = largest_connected_component(vertices_orig, faces)


# write to gifti
write_surface_to_gifti(points, faces, snakemake.output.surf_gii)
47 changes: 1 addition & 46 deletions hippunfold/workflow/scripts/laplace_beltrami.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,49 +3,6 @@
import nibabel as nib
from scipy.interpolate import NearestNDInterpolator
from collections import defaultdict
import networkx as nx


def largest_connected_component(vertices: np.ndarray, faces: np.ndarray):
"""
Returns the vertices, faces, and original indices of the largest connected component of a 3D surface mesh.
Parameters:
vertices (np.ndarray): Array of vertex coordinates (N x 3).
faces (np.ndarray): Array of face indices (M x 3).
Returns:
tuple: (new_vertices, new_faces, largest_component), where new_vertices are the vertex coordinates of the largest component,
new_faces are the face indices adjusted to the new vertex order, and largest_component contains the indices of the original vertices.
"""
# Build adjacency graph from face connectivity
G = nx.Graph()
for face in faces:
G.add_edges_from(
[(face[i], face[j]) for i in range(3) for j in range(i + 1, 3)]
)

# Find connected components
components = list(nx.connected_components(G))

# Select the largest connected component
largest_component = max(components, key=len)
largest_component = np.array(list(largest_component))

# Create a mapping from old vertex indices to new ones
index_map = {old_idx: new_idx for new_idx, old_idx in enumerate(largest_component)}

# Filter the vertices and faces
new_vertices = vertices[largest_component]
new_faces = np.array(
[
[index_map[v] for v in face]
for face in faces
if all(v in index_map for v in face)
]
)

return new_vertices, new_faces, largest_component


def find_boundary_vertices(vertices, faces):
Expand Down Expand Up @@ -154,11 +111,9 @@ def solve_laplace_beltrami_open_mesh(vertices, faces, boundary_conditions=None):


surf = nib.load(snakemake.input.surf_gii)
vertices_orig = surf.agg_data("NIFTI_INTENT_POINTSET")
vertices = surf.agg_data("NIFTI_INTENT_POINTSET")
faces = surf.agg_data("NIFTI_INTENT_TRIANGLE")

vertices, faces, i_concomp = largest_connected_component(vertices_orig, faces)


# get source/sink vertices by nearest neighbour (only of edge vertices)
boundary_vertices = np.array(find_boundary_vertices(vertices, faces))
Expand Down

0 comments on commit abe1acb

Please sign in to comment.