Skip to content

Commit

Permalink
Merge branch 'master' of github.com:scilus/scilpy
Browse files Browse the repository at this point in the history
  • Loading branch information
arnaudbore committed Jan 29, 2025
2 parents 3ee923d + 0145bac commit e613c3e
Show file tree
Hide file tree
Showing 119 changed files with 1,512 additions and 935 deletions.
4 changes: 0 additions & 4 deletions docs/source/documentation/fibertube_tracking.rst
Original file line number Diff line number Diff line change
Expand Up @@ -147,10 +147,6 @@ Fibertube metrics
Before we get into tracking. Here is an overview of the metrics that we
saved in ``metrics.json``. (Values expressed in mm):

- ``fibertube_density``:
Estimate of the following ratio: volume of fibertubes / total volume
where the total volume is the combined volume of all voxels containing
at least one fibertube.
- ``min_external_distance``: Smallest distance separating two
fibertubes, outside their diameter.
- ``max_voxel_anisotropic``: Diagonal vector of the largest possible
Expand Down
61 changes: 61 additions & 0 deletions scilpy/gradients/gen_gradient_sampling.py
Original file line number Diff line number Diff line change
Expand Up @@ -298,3 +298,64 @@ def _grad_electrostatic_repulsion_energy(bvecs, weight_matrix, alpha=1.0):
(bvecs[i] + bvecs[indices]).T / sums).sum(1)
grad = grad.reshape(nb_bvecs * 3)
return grad


def energy_comparison(bvecs1, bvecs2, nb_shells, nb_points_per_shell,
alpha=1.0):
"""
Compute the electrostatic-repulsion energy of two sets of b-vectors with
the same number of directions per shell.
Parameters
---------
bvecs1 : array-like shape (N, 3,)
First set of b-vectors.
bvecs2 : array-like shape (N, 3,)
Second set of b-vectors.
nb_shells : int
Number of shells
nb_points_per_shell : list of ints
Number of points per shell.
alpha : float
Controls the power of the repulsion. Default is 1.0
Returns
-------
energy1 : float
Electrostatic-repulsion energy of set bvecs1.
energy2 : float
Electrostatic-repulsion energy of set bvecs2.
"""
nb_dir = bvecs1.shape[0]
shell_groups = ()
for i in range(nb_shells):
shell_groups += ([i],)

shell_groups += (list(range(nb_shells)),)
alphas = list(len(shell_groups) * (1.0,))
weights = _compute_weights(nb_shells, nb_points_per_shell,
shell_groups, alphas)
nb_points_total = np.sum(nb_points_per_shell)
indices = np.cumsum(nb_points_per_shell).tolist()
indices.insert(0, 0)
weight_matrix = np.zeros((nb_points_total, nb_points_total))
for s1 in range(nb_shells):
for s2 in range(nb_shells):
weight_matrix[indices[s1]:indices[s1 + 1],
indices[s2]:indices[s2 + 1]] = weights[s1, s2]

epsilon = 1e-9
energy1 = 0.0
energy2 = 0.0
for i in range(nb_dir):
indices = (np.arange(nb_dir) > i)
diffs = ((bvecs1[indices] - bvecs1[i]) ** 2).sum(1) ** alpha
sums = ((bvecs1[indices] + bvecs1[i]) ** 2).sum(1) ** alpha
energy1 += (weight_matrix[i, indices] *
(1.0 / (diffs + epsilon) + 1.0 / (sums + epsilon))).sum()
diffs = ((bvecs2[indices] - bvecs2[i]) ** 2).sum(1) ** alpha
sums = ((bvecs2[indices] + bvecs2[i]) ** 2).sum(1) ** alpha
energy2 += (weight_matrix[i, indices] *
(1.0 / (diffs + epsilon) + 1.0 /
(sums + epsilon))).sum()
return energy1, energy2
169 changes: 125 additions & 44 deletions scilpy/tractanalysis/fibertube_scoring.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import numpy as np
import nibabel as nib
from numba import objmode

from math import sqrt, acos
Expand All @@ -11,62 +12,141 @@
dist_segment_segment,
dist_point_segment)
from scilpy.tracking.utils import tqdm_if_verbose
from scilpy.tractanalysis.todi import TrackOrientationDensityImaging
from scilpy.tractograms.uncompress import uncompress


def mean_fibertube_density(sft):
def fibertube_density(sft, samples_per_voxel_axis, verbose=False):
"""
Estimates the average per-voxel spatial density of a set of fibertubes.
This is obtained by dividing the volume of fibertube segments present
each voxel by the the total volume of the voxel.
Estimates the per-voxel volumetric density of a set of fibertubes. In other
words, how much space is occupied by fibertubes and how much is emptiness.
1. Segments voxels that contain at least a single fibertube.
2. Valid voxels are finely sampled and we count the number of samples that
landed within a fibertube. For each voxel, this number is then divided by
its total amount of samples.
3. By doing the same steps for samples that landed within 2 or more
fibertubes, we can create a density map of the fibertube collisions.
Parameters
----------
sft: StatefulTractogram
Stateful Tractogram object containing the fibertubes.
samples_per_voxel_axis: int
Number of samples to be created along a single axis of a voxel. The
total number of samples in the voxel will be this number cubed.
verbose: bool
Whether the function and sub-functions should be verbose.
Returns
-------
mean_density: float
Per-voxel spatial density, averaged for the whole tractogram.
density_grid: ndarray
Per-voxel volumetric density of fibertubes as a 3D image.
density_flat: list
Per-voxel volumetric density of fibertubes as a list, containing only
the voxels that were valid in the binary mask (i.e. that contained
fibertubes). This is useful for calculating measurements on the
various density values, like mean, median, etc.
collision_grid: ndarray
Per-voxel density of fibertube collisions.
collision_flat: list
Per-voxel density of fibertubes collisions as a list, containing only
the voxels that were valid in the binary mask (i.e. that contained
fibertubes). This is useful for calculating measurements on the
various density values, like mean, median, etc.
"""
if "diameters" not in sft.data_per_streamline:
raise ValueError('No diameter found as data_per_streamline '
'in the provided tractogram')
diameters = np.reshape(sft.data_per_streamline['diameters'],
len(sft.streamlines))
mean_diameter = np.mean(diameters)

mean_segment_lengths = []
for streamline in sft.streamlines:
mean_segment_lengths.append(
np.mean(np.linalg.norm(streamline[1:] - streamline[:-1], axis=-1)))
mean_segment_length = np.mean(mean_segment_lengths)
# Computing mean tube density per voxel.
max_diameter = np.max(diameters)

# Everything will be in vox and corner for uncompress.
sft.to_vox()
# Because compute_todi expects streamline points (in voxel coordinates)
# to be in the range [0, size] rather than [-0.5, size - 0.5], we shift
# the voxel origin to corner.
sft.to_corner()

# Computing TDI
_, data_shape, _, _ = sft.space_attributes
todi_obj = TrackOrientationDensityImaging(tuple(data_shape))
todi_obj.compute_todi(sft.streamlines)
img = todi_obj.get_tdi()
img = todi_obj.reshape_to_3d(img)

nb_voxels_nonzero = np.count_nonzero(img)
sum = np.sum(img, axis=-1)
sum = np.sum(sum, axis=-1)
sum = np.sum(sum, axis=-1)

mean_seg_volume = np.pi * ((mean_diameter/2) ** 2) * mean_segment_length

mean_seg_count = sum / nb_voxels_nonzero
mean_volume = mean_seg_count * mean_seg_volume
mean_density = mean_volume / (sft.voxel_sizes[0] *
sft.voxel_sizes[1] *
sft.voxel_sizes[2])

return mean_density
vox_idx_for_streamline = uncompress(sft.streamlines)
mask_idx = np.concatenate(vox_idx_for_streamline)
mask = np.zeros((sft.dimensions), dtype=np.uint8)
# Numpy array indexing in 3D works like this
mask[mask_idx[:, 0], mask_idx[:, 1], mask_idx[:, 2]] = 1

sampling_density = np.array([samples_per_voxel_axis,
samples_per_voxel_axis,
samples_per_voxel_axis])

# Source: dipy.tracking.seeds_from_mask
# Grid of points between -.5 and .5, centered at 0, with given density
grid = np.mgrid[0: sampling_density[0], 0: sampling_density[1],
0: sampling_density[2]]
grid = grid.T.reshape((-1, 3))
grid = grid / sampling_density
grid += 0.5 / sampling_density - 0.5
grid = grid.reshape(*sampling_density, 3)

# Back to corner origin
grid += 0.5

# Add samples to each voxel in mask
samples = np.empty(mask.shape, dtype=object)
for i, j, k in np.ndindex(mask.shape):
if mask[i][j][k]:
samples[i][j][k] = [i, j, k] + grid

# Building KDTree from fibertube segments
centers, indices, max_seg_length = streamlines_to_segments(
sft.streamlines, verbose)
tree = KDTree(centers)

density_grid = np.zeros(mask.shape)
density_flat = []
collision_grid = np.zeros(mask.shape)
collision_flat = []
# For each voxel, get density
for i, j, k in tqdm_if_verbose(np.ndindex(mask.shape), verbose,
total=len(np.ravel(mask))):
if not mask[i][j][k]:
continue

voxel_samples = np.reshape(samples[i][j][k], (-1, 3))

# Returns an list of lists of neighbor indexes for each sample
# Ex: [[265, 45, 0, 1231], [12, 67]]
all_sample_neighbors = tree.query_ball_point(
voxel_samples, max_seg_length/2+max_diameter/2, workers=-1)

nb_samples_in_fibertubes = 0
# Set containing sets of fibertube indexes
# This way, each pair of fibertube is only entered once.
# This is unless there is a trio. Then the same colliding fibertubes
# may be entered more than once.
collisions = set()
for index, sample in enumerate(voxel_samples):
neigbors = all_sample_neighbors[index]
fibertubes_touching_sample = set()
for segi in neigbors:
fi = indices[segi][0]
pi = indices[segi][1]
centerline = sft.streamlines[fi]
radius = diameters[fi] / 2

dist, _, _ = dist_point_segment(centerline[pi],
centerline[pi+1],
np.float32(sample))

if dist < radius:
fibertubes_touching_sample.add(fi)

if len(fibertubes_touching_sample) > 0:
nb_samples_in_fibertubes += 1
if len(fibertubes_touching_sample) > 1:
collisions.add(frozenset(fibertubes_touching_sample))

density_grid[i][j][k] = nb_samples_in_fibertubes / len(voxel_samples)
density_flat.append(density_grid[i][j][k])
collision_grid[i][j][k] = len(collisions) / len(voxel_samples)
collision_flat.append(collision_grid[i][j][k])

return density_grid, density_flat, collision_grid, collision_flat


def min_external_distance(sft, verbose):
Expand Down Expand Up @@ -250,8 +330,9 @@ def get_external_vector_from_centerline_vector(vector, r1, r2):
@njit
def resolve_origin_seeding(seeds, centerlines, diameters):
"""
Associates given seeds to segment 0 of the fibertube in which they have
been generated. This pairing only works with fiber origin seeding.
Given seeds generated in the first segment of fibertubes (origin seeding)
and a set of fibertubes, associates each seed with their corresponding
fibertube.
Parameters
----------
Expand All @@ -264,8 +345,8 @@ def resolve_origin_seeding(seeds, centerlines, diameters):
Return
------
seeds_fiber: ndarray
Array containing the fiber index of each seed. If the seed is not in a
fiber, its value will be -1.
Array containing the fibertube index of each seed. If the seed is
not in a fibertube, its value in the array will be -1.
"""
seeds_fiber = [-1] * len(seeds)

Expand Down
42 changes: 11 additions & 31 deletions scilpy/tractograms/uncompress.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -236,36 +236,16 @@ cdef cnp.npy_intp _uncompress(

direction_norm = norm(direction[0], direction[1], direction[2])

# Make sure that the next point is not exactly on a voxel
# intersection or on the face of the voxel, since the behavior
# is not easy to define in this case.
if fabs(next_pt[0] - floor(next_pt[0])) < 1e-8 or\
fabs(next_pt[1] - floor(next_pt[1])) < 1e-8 or\
fabs(next_pt[2] - floor(next_pt[2])) < 1e-8:
# TODO in future, jitter edge or add thickness to deal with
# case where on an edge / face and the corresponding
# component of the direction is 0
next_pt[0] = next_pt[0] - 0.000001 * direction[0]
next_pt[1] = next_pt[1] - 0.000001 * direction[1]
next_pt[2] = next_pt[2] - 0.000001 * direction[2]

# Make sure we don't "underflow" the grid
if next_pt[0] < 0.0 or next_pt[1] < 0.0 or next_pt[2] < 0.0:
next_pt[0] = next_pt[0] + 0.000002 * direction[0]
next_pt[1] = next_pt[1] + 0.000002 * direction[1]
next_pt[2] = next_pt[2] + 0.000002 * direction[2]

# Keep it in mind to correctly set when going back in the loop
jittered_point[0] = next_pt[0]
jittered_point[1] = next_pt[1]
jittered_point[2] = next_pt[2]

# Update those
direction[0] = next_pt[0] - current_pt[0]
direction[1] = next_pt[1] - current_pt[1]
direction[2] = next_pt[2] - current_pt[2]

direction_norm = norm(direction[0], direction[1], direction[2])
# Shift coordiates if the are too close to the voxel boundaries
for i in range(3):
if fabs(next_pt[i] - floor(next_pt[i])) < 1e-8:
if next_pt[i] > 0.000001:
next_pt[i] = next_pt[i] - 0.000001
else:
next_pt[i] = next_pt[i] + 0.000001
jittered_point[i] = next_pt[i]
direction[i] = next_pt[i] - current_pt[i]
direction_norm = norm(direction[0], direction[1], direction[2])

# Set the "remaining_distance" var to compute remaining length of
# vector to process
Expand All @@ -292,7 +272,7 @@ cdef cnp.npy_intp _uncompress(
# Check if last point is already on an edge
remaining_distance -= length_ratio * direction_norm

if remaining_distance < 0.:
if remaining_distance <= 0.:
pointers.points_to_index_out[0] = nb_points_out - 1
pointers.points_to_index_out += 1
nb_points_out_pti += 1
Expand Down
1 change: 1 addition & 0 deletions scilpy/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
_ver.append(_version_extra)

__version__ = '.'.join(map(str, _ver))
version_string = "\nScilpy version: " + __version__

CLASSIFIERS = ["Development Status :: 3 - Alpha",
"Environment :: Console",
Expand Down
Loading

0 comments on commit e613c3e

Please sign in to comment.