-
Notifications
You must be signed in to change notification settings - Fork 14
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
proof of concept subfield label from unfold to native working
- Loading branch information
Showing
5 changed files
with
1,063 additions
and
228 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,192 @@ | ||
surf_thresholds={'inner': 0.05, 'outer':0.95, 'midthickness':0.5} | ||
print(bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="{surfname}.surf.gii", | ||
space="unfold", | ||
desc="{desc}nostruct", | ||
hemi="{hemi}", | ||
label="{label}", | ||
**inputs.subj_wildcards | ||
)) | ||
|
||
print(bids( | ||
root=root, | ||
datatype="surf_", | ||
fromdensity="{density}", | ||
suffix="subfields.label.gii", | ||
desc="{desc}", | ||
space="corobl", | ||
hemi="{hemi}", | ||
label="hipp", | ||
atlas="{atlas}", | ||
**inputs.subj_wildcards | ||
)) | ||
|
||
rule gen_native_hipp_mesh: | ||
input: | ||
nii=bids( | ||
root=work, | ||
datatype="coords", | ||
dir="IO", | ||
label="hipp", | ||
suffix="coords.nii.gz", | ||
desc="{desc}", | ||
space="corobl", | ||
hemi="{hemi}", | ||
**inputs.subj_wildcards | ||
), | ||
params: | ||
threshold=lambda wildcards: surf_thresholds[wildcards.surfname], | ||
decimate_percent=0 | ||
output: | ||
surf_gii=bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="{surfname}.surf.gii", | ||
space="corobl", | ||
desc="{desc}nostruct", | ||
hemi="{hemi}", | ||
label="hipp", | ||
**inputs.subj_wildcards | ||
), | ||
group: | ||
"subj" | ||
#container (will need pyvista dependency) | ||
script: | ||
"../scripts/gen_isosurface.py" | ||
|
||
rule update_native_hipp_mesh_structure: | ||
input: | ||
surf_gii=bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="{surfname}.surf.gii", | ||
space="corobl", | ||
desc="{desc}nostruct", | ||
hemi="{hemi}", | ||
label="{label}", | ||
**inputs.subj_wildcards | ||
), | ||
params: | ||
structure_type=lambda wildcards: hemi_to_structure[wildcards.hemi], | ||
secondary_type=lambda wildcards: surf_to_secondary_type[wildcards.surfname], | ||
surface_type="ANATOMICAL", | ||
output: | ||
surf_gii=bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="{surfname}.surf.gii", | ||
space="corobl", | ||
desc="{desc}", | ||
hemi="{hemi}", | ||
label="{label}", | ||
**inputs.subj_wildcards | ||
), | ||
container: | ||
config["singularity"]["autotop"] | ||
group: | ||
"subj" | ||
shell: | ||
"cp {input} {output} && wb_command -set-structure {output.surf_gii} {params.structure_type} -surface-type {params.surface_type}" | ||
" -surface-secondary-type {params.secondary_type}" | ||
|
||
|
||
rule warp_native_mesh_to_unfold: | ||
input: | ||
surf_gii=bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="{surfname}.surf.gii", | ||
space="corobl", | ||
desc="{desc}", | ||
hemi="{hemi}", | ||
label="{label}", | ||
**inputs.subj_wildcards | ||
), | ||
warp_native2unfold=bids( | ||
root=work, | ||
datatype="warps", | ||
**inputs.subj_wildcards, | ||
label="{label}", | ||
suffix="xfm.nii.gz", | ||
hemi="{hemi}", | ||
from_="corobl", | ||
to="unfold", | ||
mode="surface" | ||
), | ||
params: | ||
structure_type=lambda wildcards: hemi_to_structure[wildcards.hemi], | ||
secondary_type=lambda wildcards: surf_to_secondary_type[wildcards.surfname], | ||
surface_type="FLAT", | ||
output: | ||
surf_gii=bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="{surfname}.surf.gii", | ||
space="unfold", | ||
desc="{desc}", | ||
hemi="{hemi}", | ||
label="{label}", | ||
**inputs.subj_wildcards | ||
), | ||
container: | ||
config["singularity"]["autotop"] | ||
group: | ||
"subj" | ||
shell: | ||
"wb_command -surface-apply-warpfield {input.surf_gii} {input.warp_native2unfold} {output.surf_gii} && " | ||
"wb_command -set-structure {output.surf_gii} {params.structure_type} -surface-type {params.surface_type}" | ||
" -surface-secondary-type {params.secondary_type}" | ||
|
||
|
||
rule resample_subfields_to_native_surf: | ||
input: | ||
label_gii=bids( | ||
root=root, | ||
datatype="surf", | ||
den="{density}", | ||
suffix="subfields.label.gii", | ||
space="unfold", | ||
hemi="{hemi}", | ||
label="hipp", | ||
atlas="{atlas}", | ||
**inputs.subj_wildcards | ||
), | ||
ref_unfold=os.path.join( | ||
workflow.basedir, | ||
"..", | ||
"resources", | ||
"unfold_template_hipp", | ||
"tpl-avg_space-unfold_den-{density}_midthickness.surf.gii", | ||
), | ||
native_unfold=bids( | ||
root=root, | ||
datatype="surf_", #temporarily, to keep things separate for development | ||
suffix="midthickness.surf.gii", | ||
space="unfold", | ||
desc="{desc}", | ||
hemi="{hemi}", | ||
label="hipp", | ||
**inputs.subj_wildcards | ||
), | ||
output: | ||
label_gii=bids( | ||
root=root, | ||
datatype="surf_", | ||
fromdensity="{density}", | ||
desc="{desc}", | ||
suffix="subfields.label.gii", | ||
space="corobl", | ||
hemi="{hemi}", | ||
label="hipp", | ||
atlas="{atlas}", | ||
**inputs.subj_wildcards | ||
), | ||
container: None | ||
# config["singularity"]["autotop"] | ||
group: | ||
"subj" | ||
shell: | ||
'wb_command -label-resample {input.label_gii} {input.ref_unfold} {input.native_unfold} BARYCENTRIC {output.label_gii} -bypass-sphere-check' | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,90 @@ | ||
import pyvista as pv | ||
import nibabel as nib | ||
import numpy as np | ||
from vtk import vtkNIFTIImageReader | ||
|
||
|
||
def remove_nan_points_faces(vertices,faces): | ||
# Step 1: Identify valid vertices (no NaN values) | ||
nan_mask = np.isnan(vertices).any(axis=1) | ||
valid_vertices = ~nan_mask # True for valid rows | ||
valid_indices = np.where(valid_vertices)[0] | ||
|
||
# Step 2: Create a mapping from old to new indices | ||
new_indices_map = -np.ones(vertices.shape[0], dtype=int) # Default to -1 for invalid vertices | ||
new_indices_map[valid_indices] = np.arange(len(valid_indices)) | ||
|
||
# Step 3: Update the faces array to remove references to invalid vertices | ||
# Replace old indices with new ones, and remove faces with invalid vertices | ||
new_faces = [] | ||
for face in faces: | ||
# Map old indices to new ones | ||
mapped_face = new_indices_map[face] | ||
if np.all(mapped_face >= 0): # Include only faces with all valid vertices | ||
new_faces.append(mapped_face) | ||
|
||
new_faces = np.array(new_faces) | ||
|
||
# Step 4: Remove invalid vertices from the array | ||
new_vertices = vertices[valid_vertices] | ||
|
||
return (new_vertices,new_faces) | ||
|
||
|
||
|
||
|
||
|
||
# Load the NIfTI file | ||
nifti_img = nib.load(snakemake.input.nii) | ||
data = nifti_img.get_fdata() | ||
affine = nifti_img.affine | ||
|
||
# Get voxel spacing from the header | ||
voxel_spacing = nifti_img.header.get_zooms()[:3] | ||
|
||
# Create a PyVista grid | ||
grid = pv.ImageData() | ||
grid.dimensions = np.array(data.shape) + 1 # Add 1 to include the boundary voxels | ||
grid.spacing = (1, 1, 1) # Use unit spacing and zero origin since we will apply affine | ||
grid.origin = (0,0,0) | ||
|
||
# Add the scalar field | ||
grid.cell_data["values"] = np.where(data == 0, np.nan, data).flatten(order="F") | ||
grid = grid.cells_to_points("values") | ||
|
||
# apply the affine | ||
tfm_grid = grid.transform(affine,inplace=False) # Apply the rotation part of the affine | ||
|
||
|
||
# the contour function produces the isosurface | ||
surface = tfm_grid.contour([snakemake.params.threshold],method='contour') | ||
|
||
surface = surface.decimate(float(snakemake.params.decimate_percent) / 100.0) | ||
|
||
# faces from pyvista surface are formatted with number of verts each row | ||
# reshape and remove the first col to get Nx3 | ||
faces = surface.faces | ||
faces = faces.reshape((int(faces.shape[0] / 4), 4))[:, 1:4] | ||
|
||
points = surface.points | ||
|
||
|
||
#with nans in background we end up with nan vertices, we can remove | ||
#these to end up with an open contour.. | ||
new_points,new_faces = remove_nan_points_faces(points,faces) | ||
|
||
|
||
points_darray = nib.gifti.GiftiDataArray( | ||
data=new_points, intent="NIFTI_INTENT_POINTSET", datatype="NIFTI_TYPE_FLOAT32" | ||
) | ||
|
||
tri_darray = nib.gifti.GiftiDataArray( | ||
data=new_faces, intent="NIFTI_INTENT_TRIANGLE", datatype="NIFTI_TYPE_INT32" | ||
) | ||
|
||
|
||
gifti = nib.GiftiImage() | ||
gifti.add_gifti_data_array(points_darray) | ||
gifti.add_gifti_data_array(tri_darray) | ||
|
||
gifti.to_filename(snakemake.output.surf_gii) |
Oops, something went wrong.