Skip to content

Commit

Permalink
Merge pull request #92 from CoBrALab/updates
Browse files Browse the repository at this point in the history
Updates
  • Loading branch information
Gab-D-G authored Sep 30, 2020
2 parents 18d46cd + d30d5f3 commit 0d63835
Show file tree
Hide file tree
Showing 10 changed files with 833 additions and 420 deletions.
449 changes: 322 additions & 127 deletions README.md

Large diffs are not rendered by default.

66 changes: 66 additions & 0 deletions rabies/analysis_pkg/analysis_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,72 @@
import nibabel as nb


def seed_based_FC(bold_file, brain_mask, seed_list):
import os
import nibabel as nb
import numpy as np
from rabies.analysis_pkg.analysis_functions import seed_corr

if len(seed_list)>0:
mask_array = np.asarray(nb.load(brain_mask).dataobj)
mask_vector = mask_array.reshape(-1)
mask_indices = (mask_vector==True)
mask_vector = mask_vector.astype(float)

corr_maps = np.zeros(list(mask_array.shape)+[len(seed_list)])
i = 0
for seed in seed_list:
mask_vector[mask_indices] = seed_corr(bold_file, brain_mask, seed)
corr_maps[:,:,:,i] = mask_vector.reshape(mask_array.shape)
i+=1

corr_map_file = os.path.abspath(os.path.basename(
seed).split('.nii')[0]+'_corr_map.nii.gz')
nb.Nifti1Image(corr_maps, nb.load(brain_mask).affine, nb.load(
brain_mask).header).to_filename(corr_map_file)
return corr_map_file
else:
return None


def seed_corr(bold_file, brain_mask, seed):
import os
from nilearn.input_data import NiftiMasker

resampled = os.path.abspath('resampled.nii.gz')
os.system('antsApplyTransforms -i %s -r %s -o %s -n GenericLabel' %
(seed, brain_mask, resampled))

masker = NiftiMasker(mask_img=nb.load(resampled), standardize=False, verbose=0)
# extract the voxel timeseries within the mask
voxel_seed_timeseries = masker.fit_transform(bold_file)
# take the mean ROI timeseries
seed_timeseries = np.mean(voxel_seed_timeseries, axis=1)

mask_array = np.asarray(nb.load(brain_mask).dataobj)
mask_vector = mask_array.reshape(-1)
mask_indices = (mask_vector==True)

timeseries_array = np.asarray(nb.load(bold_file).dataobj)
sub_timeseries = np.zeros([mask_indices.sum(), timeseries_array.shape[3]])
for t in range(timeseries_array.shape[3]):
sub_timeseries[:, t] = (
timeseries_array[:, :, :, t].reshape(-1))[mask_indices]

corrs = vcorrcoef(sub_timeseries, seed_timeseries)
corrs[np.isnan(corrs)] = 0
return corrs


def vcorrcoef(X, y): # return a correlation between each row of X with y
Xm = np.reshape(np.mean(X, axis=1), (X.shape[0], 1))
ym = np.mean(y)
r_num = np.sum((X-Xm)*(y-ym), axis=1)
r_den = np.sqrt(np.sum((X-Xm)**2, axis=1)*np.sum((y-ym)**2))
r = r_num/r_den
return r


def get_CAPs(data, volumes, n_clusters):
from sklearn.cluster import KMeans
kmeans = KMeans(n_clusters=n_clusters, n_init=10, max_iter=300)
Expand Down
26 changes: 23 additions & 3 deletions rabies/analysis_pkg/analysis_wf.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,18 +3,18 @@
from nipype.interfaces import utility as niu
from nipype import Function

from .analysis_functions import run_group_ICA, run_DR_ICA, run_FC_matrix
from .analysis_functions import run_group_ICA, run_DR_ICA, run_FC_matrix, seed_based_FC


def init_analysis_wf(opts, commonspace_cr=False, name="analysis_wf"):
def init_analysis_wf(opts, commonspace_cr=False, seed_list=[], name="analysis_wf"):

workflow = pe.Workflow(name=name)
subject_inputnode = pe.Node(niu.IdentityInterface(
fields=['bold_file', 'mask_file', 'atlas_file', 'token']), name='subject_inputnode')
group_inputnode = pe.Node(niu.IdentityInterface(
fields=['bold_file_list', 'commonspace_mask', 'token']), name='group_inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=['group_ICA_dir', 'IC_file', 'DR_data_file',
'DR_nii_file', 'matrix_data_file', 'matrix_fig', 'sub_token', 'group_token']), name='outputnode')
'DR_nii_file', 'matrix_data_file', 'matrix_fig', 'corr_map_file', 'sub_token', 'group_token']), name='outputnode')

# connect the nodes so that they exist even without running analysis
workflow.connect([
Expand All @@ -26,6 +26,26 @@ def init_analysis_wf(opts, commonspace_cr=False, name="analysis_wf"):
]),
])

if len(seed_list) > 0:
if not commonspace_cr:
raise ValueError(
'Outputs from confound regression must be in commonspace to run seed-based analysis. Try running confound regression again with --commonspace_bold.')
seed_based_FC_node = pe.Node(Function(input_names=['bold_file', 'brain_mask', 'seed_list'],
output_names=['corr_map_file'],
function=seed_based_FC),
name='seed_based_FC', mem_gb=1)
seed_based_FC_node.inputs.seed_list = seed_list

workflow.connect([
(subject_inputnode, seed_based_FC_node, [
("bold_file", "bold_file"),
("mask_file", "brain_mask"),
]),
(seed_based_FC_node, outputnode, [
("corr_map_file", "corr_map_file"),
]),
])

include_group_ICA = opts.group_ICA

if opts.DR_ICA:
Expand Down
69 changes: 34 additions & 35 deletions rabies/conf_reg_pkg/confound_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,81 +2,80 @@
from nipype.pipeline import engine as pe
from nipype.interfaces import utility as niu
from nipype import Function
from .utils import regress, data_diagnosis, select_timeseries, seed_based_FC
from .utils import regress, data_diagnosis, select_timeseries, exec_ICA_AROMA


def init_confound_regression_wf(lowpass=None, highpass=None, smoothing_filter=0.3, run_aroma=False, aroma_dim=0, conf_list=[],
TR='1.0s', apply_scrubbing=False, scrubbing_threshold=0.1, timeseries_interval='all', diagnosis_output=False, seed_list=[], name="confound_regression_wf"):
TR='1.0s', apply_scrubbing=False, scrubbing_threshold=0.1, timeseries_interval='all', diagnosis_output=False, name="confound_regression_wf"):

workflow = pe.Workflow(name=name)
inputnode = pe.Node(niu.IdentityInterface(fields=[
'bold_file', 'brain_mask', 'csf_mask', 'confounds_file', 'FD_file']), name='inputnode')
outputnode = pe.Node(niu.IdentityInterface(fields=[
'cleaned_path', 'aroma_out', 'mel_out', 'tSNR_file', 'corr_map_list']), name='outputnode')
'cleaned_path', 'VE_file', 'aroma_out', 'mel_out', 'tSNR_file']), name='outputnode')

regress_node = pe.Node(Function(input_names=['bold_file', 'brain_mask_file', 'confounds_file', 'csf_mask', 'FD_file', 'conf_list',
'TR', 'lowpass', 'highpass', 'smoothing_filter', 'run_aroma', 'aroma_dim', 'apply_scrubbing', 'scrubbing_threshold', 'timeseries_interval'],
output_names=['cleaned_path',
'bold_file', 'aroma_out'],
'TR', 'lowpass', 'highpass', 'smoothing_filter', 'apply_scrubbing', 'scrubbing_threshold', 'timeseries_interval'],
output_names=['cleaned_path', 'bold_file', 'VE_file'],
function=regress),
name='regress', mem_gb=1)
regress_node.inputs.conf_list = conf_list
regress_node.inputs.TR = float(TR.split('s')[0])
regress_node.inputs.lowpass = lowpass
regress_node.inputs.highpass = highpass
regress_node.inputs.smoothing_filter = smoothing_filter
regress_node.inputs.run_aroma = run_aroma
regress_node.inputs.aroma_dim = aroma_dim
regress_node.inputs.apply_scrubbing = apply_scrubbing
regress_node.inputs.scrubbing_threshold = scrubbing_threshold
regress_node.inputs.timeseries_interval = timeseries_interval

seed_based_FC_node = pe.Node(Function(input_names=['bold_file', 'brain_mask', 'seed'],
output_names=['corr_map_file'],
function=seed_based_FC),
name='seed_based_FC', mem_gb=1)
seed_based_FC_node.iterables = [('seed', seed_list)]
select_timeseries_node = pe.Node(Function(input_names=['bold_file', 'timeseries_interval'],
output_names=['bold_file'],
function=select_timeseries),
name='select_timeseries', mem_gb=1)
select_timeseries_node.inputs.timeseries_interval = timeseries_interval

workflow.connect([
(inputnode, select_timeseries_node, [
("bold_file", "bold_file"),
]),
(inputnode, regress_node, [
("brain_mask", "brain_mask_file"),
("confounds_file", "confounds_file"),
("csf_mask", "csf_mask"),
("FD_file", "FD_file"),
]),
(regress_node, outputnode, [
("cleaned_path", "cleaned_path"),
("aroma_out", "aroma_out"),
]),
(inputnode, seed_based_FC_node, [
("brain_mask", "brain_mask"),
]),
(regress_node, seed_based_FC_node, [
("cleaned_path", "bold_file"),
]),
(seed_based_FC_node, outputnode, [
("corr_map_file", "corr_map_file"),
("VE_file", "VE_file"),
]),
])

if not timeseries_interval == 'all':
select_timeseries_node = pe.Node(Function(input_names=['bold_file', 'timeseries_interval'],
output_names=['bold_file'],
function=select_timeseries),
name='select_timeseries', mem_gb=1)
select_timeseries_node.inputs.timeseries_interval = timeseries_interval
if run_aroma:
ica_aroma_node = pe.Node(Function(input_names=['inFile', 'mc_file', 'brain_mask', 'csf_mask', 'tr', 'aroma_dim'],
output_names=['cleaned_file', 'aroma_out'],
function=exec_ICA_AROMA),
name='ica_aroma', mem_gb=1)
ica_aroma_node.inputs.tr = float(TR.split('s')[0])
ica_aroma_node.inputs.aroma_dim = aroma_dim

workflow.connect([
(inputnode, select_timeseries_node, [
("bold_file", "bold_file"),
(inputnode, ica_aroma_node, [
("brain_mask", "brain_mask"),
("confounds_file", "mc_file"),
("csf_mask", "csf_mask"),
]),
(select_timeseries_node, regress_node, [
("bold_file", "bold_file"),
(select_timeseries_node, ica_aroma_node, [
("bold_file", "inFile"),
]),
(ica_aroma_node, regress_node, [
("cleaned_file", "bold_file"),
]),
(ica_aroma_node, outputnode, [
("aroma_out", "aroma_out"),
]),
])
else:
workflow.connect([
(inputnode, regress_node, [
(select_timeseries_node, regress_node, [
("bold_file", "bold_file"),
]),
])
Expand Down
4 changes: 2 additions & 2 deletions rabies/conf_reg_pkg/mod_ICA_AROMA/ICA_AROMA_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ def run_ICA_AROMA(outDir,inFile,mc,TR,mask="",mask_csf="",denType="nonaggr",melD
import os
import subprocess
import shutil
import rabies.conf_reg.mod_ICA_AROMA.classification_plots as classification_plots
import rabies.conf_reg.mod_ICA_AROMA.ICA_AROMA_functions as aromafunc
import rabies.conf_reg_pkg.mod_ICA_AROMA.classification_plots as classification_plots
import rabies.conf_reg_pkg.mod_ICA_AROMA.ICA_AROMA_functions as aromafunc

# Change to script directory
cwd = os.path.realpath(os.path.curdir)
Expand Down
Loading

0 comments on commit 0d63835

Please sign in to comment.