From e1a8d2f9ce4b17bf666921217df6a848fb859ed1 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 15:57:45 +0100 Subject: [PATCH 01/15] REF: Split classes of cmp.bidsappmanager.gui into different files [skip ci] --- cmp/bidsappmanager/gui.py | 2853 ----------------- cmp/bidsappmanager/gui/__init__.py | 7 + cmp/bidsappmanager/gui/bidsapp.py | 1286 ++++++++ cmp/bidsappmanager/gui/config.py | 273 ++ cmp/bidsappmanager/gui/globals.py | 123 + cmp/bidsappmanager/gui/handlers.py | 2247 +++++++++++++ cmp/bidsappmanager/gui/principal.py | 293 ++ cmp/bidsappmanager/gui/qc.py | 470 +++ cmp/bidsappmanager/gui/traits.py | 33 + .../pipelines/anatomical/anatomical.py | 2 +- .../pipelines/diffusion/diffusion.py | 2 +- .../pipelines/functional/fMRI.py | 2 +- cmp/bidsappmanager/project.py | 2704 +++------------- cmp/cli/cmpbidsappmanager.py | 17 +- cmp/cli/connectomemapper3.py | 2 +- cmp/project.py | 16 +- run.py | 6 +- setup.py | 3 +- setup_pypi.py | 3 +- 19 files changed, 5248 insertions(+), 5094 deletions(-) delete mode 100644 cmp/bidsappmanager/gui.py create mode 100644 cmp/bidsappmanager/gui/__init__.py create mode 100644 cmp/bidsappmanager/gui/bidsapp.py create mode 100644 cmp/bidsappmanager/gui/config.py create mode 100644 cmp/bidsappmanager/gui/globals.py create mode 100644 cmp/bidsappmanager/gui/handlers.py create mode 100644 cmp/bidsappmanager/gui/principal.py create mode 100644 cmp/bidsappmanager/gui/qc.py create mode 100644 cmp/bidsappmanager/gui/traits.py diff --git a/cmp/bidsappmanager/gui.py b/cmp/bidsappmanager/gui.py deleted file mode 100644 index 580920bef..000000000 --- a/cmp/bidsappmanager/gui.py +++ /dev/null @@ -1,2853 +0,0 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and -# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors -# All rights reserved. -# -# This software is distributed under the open-source license Modified BSD. - -"""Connectome Mapper GUI.""" - -# General imports -import os -import sys - -import pkg_resources -from subprocess import Popen -import subprocess -import multiprocessing -import shutil -import time -import glob - -from pyface.api import ImageResource -from traitsui.qt4.extra.qt_view import QtView -from traitsui.api import * -from traits.api import * - -from bids import BIDSLayout - -import warnings - -# Own imports -import cmp.bidsappmanager.project as project -from cmp.project import CMP_Project_Info, __cmp_directory__, __freesurfer_directory__ -from cmp.info import __version__ -from cmtklib.util import ( - return_button_style_sheet, - BColors, - print_blue, - print_warning, - print_error, -) - -# Remove warnings visible whenever you import scipy (or another package) -# that was compiled against an older numpy than is installed. -warnings.filterwarnings("ignore", message="numpy.dtype size changed") -warnings.filterwarnings("ignore", message="numpy.ufunc size changed") - -# global modal_width -modal_width = 400 - -# global style_sheet -style_sheet = """ - QLabel { - font: 12pt "Verdana"; - margin-left: 5px; - background-color: transparent; - } - QPushButton { - border: 0px solid lightgray; - border-radius: 4px; - color: transparent; - background-color: transparent; - min-width: 222px; - icon-size: 222px; - font: 12pt "Verdana"; - margin: 0px 0px 0px 0px; - padding:0px 0px; - } - QPushButton:pressed { - background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, - stop: 0 #dadbde, stop: 1 #f6f7fa); - } - QMenuBar { - background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, - stop: 0 #dadbde, stop: 1 #f6f7fa) - font: 14pt "Verdana"; - } - QMenuBar::item { - spacing: 5px; /* spacing between menu bar items */ - padding: 5px 5px; - background: transparent; - border-radius: 4px; - } - QMenuBar::item:selected { /* when selected using mouse or keyboard */ - background: #a8a8a8; - } - QMenuBar::item:pressed { - background: #888888; - } - QMainWindow { - background-color: yellow; - image: url("images/cmp.png"); - } - QMainWindow::separator { - background: yellow; - width: 1px; /* when vertical */ - height: 1px; /* when horizontal */ - } - QMainWindow::separator:hover { - background: red; - } - - QListView::item:selected { - border: 1px solid #6a6ea9; - } - - QListView::item:selected:!active { - background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, - stop: 0 #ABAFE5, stop: 1 #8588B2); - } - - QListView::item:selected:active { - background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, - stop: 0 #6a6ea9, stop: 1 #888dd9); - } - - QListView::item:hover { - background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, - stop: 0 #FAFBFE, stop: 1 #DCDEF1); - } - QProgressBar { - border: 2px solid grey; - border-radius: 5px; - } - - QProgressBar::chunk { - background-color: #05B8CC; - width: 20px; - } - """ - - -def get_icon(path): - """Return an instance of `ImageResource` or None is there is not graphical backend. - - Parameters - ---------- - path : string - Path to an image file - - Returns - ------- - icon : ImageResource - Return an instance of `ImageResource` or None is there is not graphical backend. - """ - on_rtd = os.environ.get("READTHEDOCS") == "True" - if on_rtd: - print("READTHEDOCS: Return None for icon") - icon = None - else: - icon = ImageResource(path) - return icon - - -class CMP_Project_InfoUI(CMP_Project_Info): - """Class that extends the :class:`CMP_Project_Info` with graphical components. - - It supports graphically the setting of all processing properties / attributes - of an :class:`CMP_Project_Info` instance. - - Attributes - ----------- - creation_mode : traits.Enum - Mode for loading the dataset. Valid values are - 'Load BIDS dataset', 'Install Datalad BIDS dataset' - - install_datalad_dataset_via_ssh : traits.Bool - If set to True install the datalad dataset from a remote server - via ssh.(True by default) - - ssh_user : traits.Str - Remote server username. - (Required if ``install_datalad_dataset_via_ssh`` is True) - - ssh_pwd - Remote server password. - (Required if ``install_datalad_dataset_via_ssh`` is True) - - ssh_remote : traits.Str - Remote server IP or URL. - (Required if ``install_datalad_dataset_via_ssh`` is True) - - datalad_dataset_path : traits.Directory - Path to the datalad dataset on the remote server. - (Required if ``install_datalad_dataset_via_ssh`` is True) - - summary_view_button : traits.ui.Button - Button that shows the pipeline processing summary table - - pipeline_processing_summary_view : traits.ui.VGroup - TraitsUI VGroup that contains ``Item('pipeline_processing_summary')`` - - dataset_view : traits.ui.View - TraitsUI View that shows a summary of project settings and - modality available for a given subject - - traits_view : QtView - TraitsUI QtView that includes the View 'dataset_view' - - create_view : traits.ui.View - Dialog view to create a BIDS Dataset - - subject_view : traits.ui.View - Dialog view to select of subject - - subject_session_view : traits.ui.View - Dialog view to select the subject session - - dmri_bids_acq_view : traits.ui.View - Dialog view to select the diffusion acquisition model - - anat_warning_view : traits.ui.View - View that displays a warning message regarding - the anatomical T1w data - - anat_config_error_view : traits.ui.View - Error view that displays an error message regarding - the configuration of the anatomical pipeline - - dmri_warning_view : traits.ui.View - View that displays a warning message regarding - the diffusion MRI data - - dmri_config_error_view : traits.ui.View - View that displays an error message regarding - the configuration of the diffusion pipeline - - fmri_warning_view : traits.ui.View - View that displays a warning message regarding - the functional MRI data - - fmri_config_error_view : traits.ui.View - View that displays an error message regarding - the configuration of the fMRI pipeline - - open_view : traits.ui.View - Dialog view to load a BIDS Dataset - - anat_select_config_to_load : traits.ui.View - Dialog view to load the configuration file of the anatomical pipeline - - diffusion_imaging_model_select_view : traits.ui.View - Dialog view to select the diffusion acquisition model - - dmri_select_config_to_load : traits.ui.View - Dialog view to load the configuration file of the diffusion MRI pipeline - - fmri_select_config_to_load : traits.ui.View - Dialog view to load the configuration file of the fMRI pipeline - """ - - creation_mode = Enum("Load BIDS dataset", "Install Datalad BIDS dataset") - install_datalad_dataset_via_ssh = Bool(True) - ssh_user = String("remote_username") - ssh_pwd = Password("") - ssh_remote = String("IP address/ Machine name") - datalad_dataset_path = Directory("/shared/path/to/existing/datalad/dataset") - - anat_runs = List() - anat_run = Enum(values="anat_runs") - - dmri_runs = List() - dmri_run = Enum(values="dmri_runs") - - fmri_runs = List() - fmri_run = Enum(values="fmri_runs") - - summary_view_button = Button("Pipeline processing summary") - - pipeline_processing_summary_view = VGroup(Item("pipeline_processing_summary")) - - dataset_view = VGroup( - VGroup( - HGroup( - Item( - "base_directory", - width=-0.3, - style="readonly", - label="", - resizable=True, - ), - Item( - "number_of_subjects", - width=-0.3, - style="readonly", - label="Number of participants", - resizable=True, - ), - "summary_view_button", - ), - label="BIDS Dataset", - ), - spring, - HGroup( - Group(Item("subject", style="simple", show_label=True, resizable=True)), - Group( - Item( - "subject_session", style="simple", label="Session", resizable=True - ), - visible_when='subject_session!=""', - ), - springy=True, - ), - spring, - Group( - Item("t1_available", style="readonly", label="T1", resizable=True), - HGroup( - Item( - "dmri_available", - style="readonly", - label="Diffusion", - resizable=True, - ), - Item( - "diffusion_imaging_model", - label="Model", - resizable=True, - enabled_when="dmri_available", - ), - ), - Item("fmri_available", style="readonly", label="BOLD", resizable=True), - label="Modalities", - ), - spring, - Group( - Item( - "anat_last_date_processed", - label="Anatomical pipeline", - style="readonly", - resizable=True, - enabled_when="t1_available", - ), - Item( - "dmri_last_date_processed", - label="Diffusion pipeline", - style="readonly", - resizable=True, - enabled_when="dmri_available", - ), - Item( - "fmri_last_date_processed", - label="fMRI pipeline", - style="readonly", - resizable=True, - enabled_when="fmri_available", - ), - label="Last date processed", - ), - spring, - Group( - Item("number_of_cores", resizable=True), label="Processing configuration" - ), - "550", - spring, - springy=True, - ) - - traits_view = QtView(Include("dataset_view")) - - create_view = View( - Item("creation_mode", style="custom"), - Group( - Group( - Item("base_directory", label="BIDS Dataset"), - visible_when='creation_mode=="Load BIDS dataset"', - ), - Group( - Item("install_datalad_dataset_via_ssh"), - visible_when='creation_mode=="Install Datalad/BIDS dataset"', - ), - Group( - Item( - "ssh_remote", - label="Remote ssh server", - visible_when="install_datalad_dataset_via_ssh", - ), - Item( - "ssh_user", - label="Remote username", - visible_when="install_datalad_dataset_via_ssh", - ), - Item( - "ssh_pwd", - label="Remote password", - visible_when="install_datalad_dataset_via_ssh", - ), - Item( - "datalad_dataset_path", - label="Datalad/BIDS Dataset Path/URL to be installed", - ), - Item("base_directory", label="Installation directory"), - visible_when='creation_mode=="Install Datalad/BIDS dataset"', - ), - ), - kind="livemodal", - title="Data creation: BIDS dataset selection", - # style_sheet=style_sheet, - width=modal_width, - buttons=["OK", "Cancel"], - ) - - subject_view = View( - Group(Item("subject", label="Selected Subject")), - kind="modal", - title="Subject and session selection", - # style_sheet=style_sheet, - width=modal_width, - buttons=["OK", "Cancel"], - ) - - subject_session_view = View( - Group( - Item("subject", style="readonly", label="Selected Subject"), - Item("subject_session", label="Selected Session"), - ), - kind="modal", - title="Session selection", - # style_sheet=style_sheet, - width=modal_width, - buttons=["OK", "Cancel"], - ) - - dmri_bids_acq_view = View( - Group( - Item("dmri_bids_acq", label="Selected model"), - ), - kind="modal", - title="Selection of diffusion acquisition model", - # style_sheet=style_sheet, - width=modal_width, - buttons=["OK", "Cancel"], - ) - - anat_warning_view = View( - Group( - Item("anat_warning_msg", style="readonly", show_label=False), - ), - title="Warning : Anatomical T1w data", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - anat_config_error_view = View( - Group( - Item("anat_config_error_msg", style="readonly", show_label=False), - ), - title="Error", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - dmri_warning_view = View( - Group( - Item("dmri_warning_msg", style="readonly", show_label=False), - ), - title="Warning : Diffusion MRI data", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - dmri_config_error_view = View( - Group( - Item("dmri_config_error_msg", style="readonly", show_label=False), - ), - title="Error", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - fmri_warning_view = View( - Group( - Item("fmri_warning_msg", style="readonly", show_label=False), - ), - title="Warning : fMRI data", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - fmri_config_error_view = View( - Group( - Item("fmri_config_error_msg", style="readonly", show_label=False), - ), - title="Error", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - open_view = View( - Item("creation_mode", label="Mode"), - Group( - Item("install_datalad_dataset_via_ssh"), - Item( - "ssh_remote", - label="Remote ssh server", - visible_when="install_datalad_dataset_via_ssh", - ), - Item( - "ssh_user", - label="Remote username", - visible_when="install_datalad_dataset_via_ssh", - ), - Item( - "ssh_pwd", - label="Remote password", - visible_when="install_datalad_dataset_via_ssh", - ), - Item( - "datalad_dataset_path", - label="Datalad/BIDS Dataset Path/URL to be installed", - ), - Item("base_directory", label="Installation directory"), - visible_when='creation_mode=="Install Datalad BIDS dataset"', - ), - Group( - Item("base_directory", label="BIDS Dataset"), - visible_when='creation_mode=="Load BIDS dataset"', - ), - kind="livemodal", - title="BIDS Dataset Creation/Loading", - # style_sheet=style_sheet, - width=600, - height=250, - buttons=["OK", "Cancel"], - ) - - anat_select_config_to_load = View( - Group( - Item("anat_config_to_load_msg", style="readonly", show_label=False), - Item( - "anat_config_to_load", - style="custom", - editor=EnumEditor(name="anat_available_config"), - show_label=False, - ), - ), - title="Select configuration for anatomical pipeline", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - anat_custom_map_view = View( - Group( - Item( - "anat_custom_last_stage", - editor=EnumEditor(name="anat_stage_names"), - style="custom", - show_label=False, - ), - ), - title="Select until which stage to process the anatomical pipeline.", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - diffusion_imaging_model_select_view = View( - Group( - Item("diffusion_imaging_model", label="Diffusion MRI modality"), - ), - title="Please select diffusion MRI modality", - kind="modal", - width=modal_width, - buttons=["OK", "Cancel"], - ) - - dmri_select_config_to_load = View( - Group( - Item("dmri_config_to_load_msg", style="readonly", show_label=False), - ), - Item( - "dmri_config_to_load", - style="custom", - editor=EnumEditor(name="dmri_available_config"), - show_label=False, - ), - title="Select configuration for diffusion pipeline", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - dmri_custom_map_view = View( - Group( - Item( - "dmri_custom_last_stage", - editor=EnumEditor(name="dmri_stage_names"), - style="custom", - show_label=False, - ), - ), - title="Select until which stage to process the diffusion pipeline.", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - fmri_select_config_to_load = View( - Group( - Item("fmri_config_to_load_msg", style="readonly", show_label=False), - ), - Item( - "fmri_config_to_load", - style="custom", - editor=EnumEditor(name="fmri_available_config"), - show_label=False, - ), - title="Select configuration for fMRI pipeline", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - fmri_custom_map_view = View( - Group( - Item( - "fmri_custom_last_stage", - editor=EnumEditor(name="fmri_stage_names"), - style="custom", - show_label=False, - ), - ), - title="Select until which stage to process the fMRI pipeline.", - kind="modal", - width=modal_width, - # style_sheet=style_sheet, - buttons=["OK", "Cancel"], - ) - - def _summary_view_button_fired(self): - self.configure_traits(view="pipeline_processing_summary_view") - - -class MultiSelectAdapter(TabularAdapter): - """This adapter is used by left and right tables for selection of subject to be processed.""" - - # Titles and column names for each column of a table. - # In this example, each table has only one column. - columns = [("", "myvalue")] - width = 100 - - # Magically named trait which gives the display text of the column named - # 'myvalue'. This is done using a Traits Property and its getter: - myvalue_text = Property - - def _get_myvalue_text(self): - """The getter for Property 'myvalue_text'. - - It simply takes the value of the corresponding item in the list - being displayed in this table. A more complicated example could - format the item before displaying it. - """ - return "sub-%s" % self.item - - -class CMP_BIDSAppWindow(HasTraits): - """Class that defines the Window of the BIDS App Interface. - - Attributes - ---------- - project_info : CMP_Project_Info - Instance of :class:`CMP_Project_Info` that represents the processing project - - bids_root : traits.Directory - BIDS root dataset directory - - output_dir : traits.Directory - Output directory - - subjects : traits.List - List of subjects (in the form ``sub-XX``) present in the dataset - - number_of_participants_processed_in_parallel : traits.Range - Number of participants / subjects to be processed in parallel that - takes values in the [1, # of CPUs - 1] range - - number_threads_max : traits.Int - Maximal number of threads to be used by OpenMP programs - (4 by default) - - number_of_threads : traits.Range - Number of threads to be used by OpenMP programs that takes values - in the [1, ``number_threads_max``] range - - fs_file : traits.File - Path to Freesurfer license file - - list_of_subjects_to_be_processed : List(Str) - Selection of subjects to be processed from the ``subjects`` list - - dmri_inputs_checked : traits.Bool - True if dMRI data is available in the dataset - - fmri_inputs_checked : traits.Bool - rue if fMRI data is available in the dataset - - anat_config : traits.File - Configuration file for the anatomical MRI pipeline - - dmri_config : traits.File - Configuration file for the diffusion MRI pipeline - - fmri_config : traits.File - Configuration file for the functional MRI pipeline - - run_anat_pipeline : traits.Bool - If True, run the anatomical pipeline - - run_dmri_pipeline : traits.Bool - If True, run the diffusion pipeline - - run_fmri_pipeline : traits.Bool - If True, run the functional pipeline - - bidsapp_tag : traits.Enum - Selection of BIDS App version to use - - data_provenance_tracking : traits.Bool - If set and if ``datalad_is_available`` is True run the BIDS App - using datalad (False by default) - - datalad_update_environment : traits.Bool - If True and ``data_provenance_tracking`` is True, tell to datalad - to update the BIDS App container image if there was a previous - execution (True by default) - - datalad_is_available : traits.Bool - Boolean used to store if datalad is available in the computing - environment (False by default) - - check : traits.ui.Button - Button to check if all parameters are properly set for execution - of the BIDS App - - start_bidsapp : traits.ui.Button - Button to run the BIDS App - - traits_view : QtView - TraitsUI QtView that describes the content of the window - """ - - project_info = Instance(CMP_Project_Info) - - bids_root = Directory() - output_dir = Directory() - subjects = List(Str) - - # multiproc_number_of_cores = Int(1) - number_of_participants_processed_in_parallel = Range( - low=1, - high=multiprocessing.cpu_count() - 1, - desc="Number of participants to be processed in parallel", - ) - - number_of_threads_max = Int(multiprocessing.cpu_count() - 1) - - number_of_threads = Range( - low=1, - high="number_of_threads_max", - mode="spinner", - desc="Number of OpenMP threads used by Dipy, FSL, MRtrix, " - "and Freesurfer recon-all", - ) - - fix_ants_random_seed = Bool( - False, desc="Fix MRtrix3 random generator seed for tractography" - ) - ants_random_seed = Int(1234, desc="MRtrix random generator seed value") - - fix_mrtrix_random_seed = Bool( - False, desc="Fix ANTs random generator seed for registration" - ) - mrtrix_random_seed = Int(1234, desc="ANTs random generator seed value") - - fix_ants_number_of_threads = Bool( - False, desc="Fix independently number of threads used by ANTs registration" - ) - ants_number_of_threads = Range( - low=1, - high="number_of_threads_max", - mode="spinner", - desc="Number of ITK threads used by ANTs registration", - ) - - fs_license = File(desc="Path to your FREESURFER license.txt") - # fs_average = Directory(os.path.join(os.environ['FREESURFER_HOME'],'subjects','fsaverage')) - - list_of_subjects_to_be_processed = List(Str) - - list_of_processing_logfiles = List(File) - - anat_config = File(desc="Path to the configuration file of the anatomical pipeline") - dmri_config = File(desc="Path to the configuration file of the diffusion pipeline") - fmri_config = File(desc="Path to the configuration file of the fMRI pipeline") - - run_anat_pipeline = Bool(True, desc="Run the anatomical pipeline") - run_dmri_pipeline = Bool(False, desc="Run the diffusion pipeline") - run_fmri_pipeline = Bool(False, desc="Run the fMRI pipeline") - - dmri_inputs_checked = Bool(False) - fmri_inputs_checked = Bool(False) - - settings_checked = Bool(False) - docker_running = Bool(False) - - bidsapp_tag = Enum("{}".format(__version__), ["latest", "{}".format(__version__)]) - - data_provenance_tracking = Bool( - False, desc="Use datalad to execute CMP3 and record dataset changes" - ) - - datalad_update_environment = Bool( - True, - desc="Update the container if datalad run-container has been run already once", - ) - - datalad_is_available = Bool(False, desc="True if datalad is available") - - # check = Action(name='Check settings!', - # action='check_settings', - # image=get_icon( - # pkg_resources.resource_filename('resources', - # os.path.join('buttons', 'bidsapp-check-settings.png')))) - # start_bidsapp = Action(name='Start BIDS App!', - # action='start_bids_app', - # enabled_when='settings_checked==True and docker_running==False', - # image=get_icon( - # pkg_resources.resource_filename('resources', - # os.path.join('buttons', 'bidsapp-run.png')))) - - update_selection = Button() - check = Button() - start_bidsapp = Button() - - # stop_bidsapp = Action(name='Stop BIDS App!',action='stop_bids_app',enabled_when='handler.settings_checked and handler.docker_running') - - traits_view = QtView( - Group( - VGroup( - VGroup( - Item("bidsapp_tag", style="readonly", label="Tag"), - label="BIDS App Version", - ), - VGroup( - Item("bids_root", style="readonly", label="Input directory"), - Item( - "output_dir", - style="simple", - label="Output directory", - enabled_when="not(data_provenance_tracking)", - ), - label="BIDS dataset", - ), - VGroup( - HGroup( - UItem( - "subjects", - editor=TabularEditor( - show_titles=True, - selected="list_of_subjects_to_be_processed", - editable=False, - multi_select=True, - adapter=MultiSelectAdapter( - columns=[("Available labels", "myvalue")] - ), - ), - ), - UItem( - "list_of_subjects_to_be_processed", - editor=TabularEditor( - show_titles=True, - editable=False, - adapter=MultiSelectAdapter( - columns=[("Labels to be processed", "myvalue")] - ), - ), - ), - ), - label="Participant labels to be processed", - ), - HGroup( - Item( - "number_of_participants_processed_in_parallel", - label="Number of participants processed in parallel", - ), - label="Parallel processing", - ), - VGroup( - HGroup( - VGroup( - Item("number_of_threads", label="Number of OpenMP threads"), - Item( - "fix_ants_number_of_threads", - label="Set number of threads used by ANTs", - ), - Item( - "ants_number_of_threads", - label="Number of ITK threads used by ANTs registration", - enabled_when="fix_ants_number_of_threads", - ), - label="Multithreading", - ), - VGroup( - Item( - "fix_ants_random_seed", - label="Set seed of ANTS random number generator", - ), - Item( - "ants_random_seed", - label="Seed", - enabled_when="fix_ants_random_seed", - ), - Item( - "fix_mrtrix_random_seed", - label="Set seed of MRtrix random number generator", - ), - Item( - "mrtrix_random_seed", - label="Seed", - enabled_when="fix_mrtrix_random_seed", - ), - label="Random number generators", - ), - ), - label="Advanced execution settings for each participant process", - ), - VGroup( - Group( - Item( - "anat_config", - editor=FileEditor(dialog_style="open"), - label="Configuration file", - visible_when="run_anat_pipeline", - ), - label="Anatomical pipeline", - ), - Group( - Item("run_dmri_pipeline", label="Run processing stages"), - Item( - "dmri_config", - editor=FileEditor(dialog_style="open"), - label="Configuration file", - visible_when="run_dmri_pipeline", - ), - label="Diffusion pipeline", - visible_when="dmri_inputs_checked==True", - ), - Group( - Item("run_fmri_pipeline", label="Run processing stages"), - Item( - "fmri_config", - editor=FileEditor(dialog_style="open"), - label="Configuration file", - visible_when="run_fmri_pipeline", - ), - label="fMRI pipeline", - visible_when="fmri_inputs_checked==True", - ), - label="Configuration of processing pipelines", - ), - VGroup( - Item( - "fs_license", - editor=FileEditor(dialog_style="open"), - label="LICENSE", - ), - # Item('fs_average', label='FSaverage directory'), - label="Freesurfer configuration", - ), - VGroup( - Item("data_provenance_tracking", label="Use Datalad"), - Item( - "datalad_update_environment", - visible_when="data_provenance_tracking", - label="Update the computing environment (if existing)", - ), - label="Data Provenance Tracking / Data Lineage", - enabled_when="datalad_is_available", - ), - orientation="vertical", - springy=True, - ), - spring, - HGroup( - spring, - Item( - "check", - style="custom", - width=152, - height=35, - resizable=False, - label="", - show_label=False, - style_sheet=return_button_style_sheet( - ImageResource( - pkg_resources.resource_filename( - "resources", - os.path.join("buttons", "bidsapp-check-settings.png"), - ) - ).absolute_path - ), - ), - spring, - Item( - "start_bidsapp", - style="custom", - width=152, - height=35, - resizable=False, - label="", - show_label=False, - style_sheet=return_button_style_sheet( - ImageResource( - pkg_resources.resource_filename( - "resources", os.path.join("buttons", "bidsapp-run.png") - ) - ).absolute_path, - ImageResource( - pkg_resources.resource_filename( - "resources", - os.path.join("buttons", "bidsapp-run-disabled.png"), - ) - ).absolute_path, - ), - enabled_when="settings_checked==True and docker_running==False", - ), - spring, - show_labels=False, - label="", - ), - orientation="vertical", - springy=True, - ), - title="Connectome Mapper 3 BIDS App GUI", - # kind='modal', - handler=project.CMP_BIDSAppWindowHandler(), - # style_sheet=style_sheet, - buttons=[], - # buttons = [check,start_bidsapp], - # buttons = [process_anatomical,map_dmri_connectome,map_fmri_connectome], - # buttons = [preprocessing, map_connectome, map_custom], - width=0.6, - height=0.8, - scrollable=True, # , resizable=True - icon=get_icon("bidsapp.png"), - ) - - log_view = QtView( - Group( - Item("list_of_processing_logfiles"), orientation="vertical", springy=True - ), - title="Connectome Mapper 3 BIDS App Progress", - # kind='modal', - # handler=project.CMP_BIDSAppWindowHandler(), - # style_sheet=style_sheet, - buttons=[], - # buttons = [check,start_bidsapp], - # buttons = [process_anatomical,map_dmri_connectome,map_fmri_connectome], - # buttons = [preprocessing, map_connectome, map_custom], - width=0.5, - height=0.8, - resizable=True, # , scrollable=True, resizable=True - icon=get_icon("bidsapp.png"), - ) - - def __init__( - self, - project_info=None, - bids_root="", - subjects=None, - list_of_subjects_to_be_processed=None, - anat_config="", - dmri_config="", - fmri_config="", - ): - """Constructor of an :class:``CMP_BIDSAppWindow`` instance. - - Parameters - ---------- - project_info : cmp.project.CMP_Project_Info - :class:`CMP_Project_Info` object (Default: None) - - bids_root : traits.Directory - BIDS dataset root directory (Default: \'\') - - subjects : List of string - List of subjects in the dataset (Default: None) - - list_of_subjects_to_be_processed : List of string - List of subjects to be processed (Default: None) - - anat_config : string - Path to anatomical pipeline configuration file (Default: \'\') - - dmri_config : string - Path to diffusion pipeline configuration file (Default: \'\') - - fmri_config : string - Path to functional pipeline configuration file (Default: \'\') - """ - print("> Initialize window...") - if multiprocessing.cpu_count() < 4: - self.number_of_threads_max = multiprocessing.cpu_count() - - self.project_info = project_info - self.bids_root = bids_root - - # Create a BIDSLayout for checking availability of dMRI and fMRI data - try: - bids_layout = BIDSLayout(self.bids_root) - except Exception: - print_error(" .. Exception : Raised at BIDSLayout") - sys.exit(1) - - # Check if sMRI data is available in the dataset - smri_files = bids_layout.get( - datatype="anat", suffix="T1w", extensions="nii.gz", return_type="file" - ) - - if not smri_files: - anat_inputs_checked = False - else: - anat_inputs_checked = True - - print(f" .. T1w available: {anat_inputs_checked}") - - # Check if dMRI data is available in the dataset - dmri_files = bids_layout.get( - datatype="dwi", suffix="dwi", extensions="nii.gz", return_type="file" - ) - - if not dmri_files: - self.dmri_inputs_checked = False - self.run_dmri_pipeline = False - else: - self.dmri_inputs_checked = True - self.run_dmri_pipeline = True - - print(f" .. DWI available: {self.dmri_inputs_checked}") - - # Check if fMRI data is available in the dataset - fmri_files = bids_layout.get( - task="rest", - datatype="func", - suffix="bold", - extensions="nii.gz", - return_type="file", - ) - if not fmri_files: - self.fmri_inputs_checked = False - self.run_fmri_pipeline = False - else: - self.fmri_inputs_checked = True - self.run_fmri_pipeline = True - - print(f" .. rsfMRI available: {self.fmri_inputs_checked}") - - # Initialize output directory to be /bids_dir/derivatives - self.output_dir = os.path.join(bids_root, "derivatives") - - self.subjects = subjects - # self.list_of_subjects_to_be_processed = list_of_subjects_to_be_processed - self.anat_config = anat_config - self.dmri_config = dmri_config - self.fmri_config = fmri_config - - if 'FREESURFER_HOME' in os.environ: - self.fs_license = os.path.join( - os.environ['FREESURFER_HOME'], 'license.txt') - elif os.path.isfile(os.path.join(bids_root, 'code', 'license.txt')): - self.fs_license = os.path.join(bids_root, 'code', 'license.txt') - else: - print_error('.. ERROR: Environment variable $FREESURFER_HOME not found and no Freesurfer license file ' - 'found in local code-folder ') - self.fs_license = '' - print_warning('Freesurfer license unset ({})'.format(self.fs_license)) - - self.datalad_is_available = project.is_tool("datalad") - - self.on_trait_change(self.update_run_dmri_pipeline, "run_dmri_pipeline") - self.on_trait_change(self.update_run_fmri_pipeline, "run_fmri_pipeline") - - self.on_trait_change( - self.number_of_parallel_procs_updated, - "number_of_participants_processed_in_parallel", - ) - - self.on_trait_change( - self.update_checksettings, "list_of_subjects_to_be_processed" - ) - self.on_trait_change(self.update_checksettings, "anat_config") - self.on_trait_change(self.update_checksettings, "run_dmri_pipeline") - self.on_trait_change(self.update_checksettings, "dmri_config") - self.on_trait_change(self.update_checksettings, "run_fmri_pipeline") - self.on_trait_change(self.update_checksettings, "fmri_config") - self.on_trait_change(self.update_checksettings, "fs_license") - # self.on_trait_change(self.update_checksettings, 'fs_average') - - def number_of_parallel_procs_updated(self, new): - """Callback function when ``number_of_parallel_procs`` is updated.""" - number_of_threads_max = int((multiprocessing.cpu_count() - 1) / new) - - if number_of_threads_max > 4: - self.number_of_threads_max = 4 - else: - self.number_of_threads_max = number_of_threads_max - - print( - " .. INFO : Update number of threads max to : {}".format( - self.number_of_threads_max - ) - ) - - def update_run_anat_pipeline(self, new): - """Callback function when ``run_anat_pipeline`` is updated.""" - if new is False: - print_warning(" .. WARNING: At least anatomical pipeline should be run!") - self.run_anat_pipeline = True - - def update_run_dmri_pipeline(self, new): - """Callback function when ``run_dmri_pipeline`` is updated.""" - self.run_anat_pipeline = True - - def update_run_fmri_pipeline(self, new): - """Callback function when ``run_fmri_pipeline`` is updated.""" - self.run_anat_pipeline = True - - def update_checksettings(self, new): - """Function that reset ``settings_checked`` attribute to False.""" - self.settings_checked = False - - def _data_provenance_tracking_changed(self, new): - """Callback function `data_provenance_tracking` attribute is updated.""" - if new is True: - self.output_dir = os.path.join(self.bids_root, "derivatives") - self.data_provenance_tracking = new - - def _update_selection_fired(self): - """Callback function when the list of selected subjects is updated.""" - self.configure_traits(view="select_subjects_to_be_processed_view") - - def _check_fired(self): - """Callback function when the Check Setting button is clicked.""" - self.check_settings() - - def _start_bidsapp_fired(self): - """Callback function when the Run BIDS App button is clicked.""" - self.start_bids_app() - - def check_settings(self): - """Checks if all the parameters of the BIDS App run are properly set before execution.""" - print_warning("\n-----------------------------------------") - print_warning("BIDS App execution settings check summary") - print_warning("-----------------------------------------") - - self.settings_checked = True - - if os.path.isdir(self.bids_root): - print(f"* BIDS root directory : {self.bids_root}") - else: - print_error("Error: BIDS root invalid!") - self.settings_checked = False - - if os.path.exists(os.path.join(self.output_dir, __cmp_directory__)): - print(f"* Output directory (existing) : {self.output_dir}") - else: - os.makedirs(os.path.join(self.output_dir, __cmp_directory__)) - print_warning(f"Output directory (created) : {self.output_dir}") - - if len(self.list_of_subjects_to_be_processed) > 0: - print( - f"* Participant labels to be processed : {self.list_of_subjects_to_be_processed}" - ) - else: - print_error( - "Error: At least one participant label to be processed should selected!" - ) - self.settings_checked = False - # if not self.list_of_subjects_to_be_processed.empty(): - # print("List of subjects to be processed : {}".format(self.list_of_subjects_to_be_processed)) - # else: - # print("Warning: List of subjects empty!") - - if os.path.isfile(self.anat_config): - print(f"* Anatomical configuration file : {self.anat_config}") - else: - print_error( - "Error: Configuration file for anatomical pipeline not existing!" - ) - self.settings_checked = False - - if os.path.isfile(self.dmri_config): - print(f"* Diffusion configuration file : {self.dmri_config}") - else: - print_warning( - "Warning: Configuration file for diffusion pipeline not existing!" - ) - - if os.path.isfile(self.fmri_config): - print(f"* fMRI configuration file : {self.fmri_config}") - else: - print_warning("Warning: Configuration file for fMRI pipeline not existing!") - - if os.path.isfile(self.fs_license): - print(f"* Freesurfer license : {self.fs_license}") - else: - print_error(f"Error: Invalid Freesurfer license ({self.fs_license})!") - self.settings_checked = False - - # if os.path.isdir(self.fs_average): - # print("fsaverage directory : {}".format(self.fs_average)) - # else: - # print("Error: fsaverage directory ({}) not existing!".format(self.fs_average)) - # self.settings_checked = False - - print(f"Valid inputs for BIDS App : {self.settings_checked}") - print(f"BIDS App Version Tag: {self.bidsapp_tag}") - print(f"Data provenance tracking (datalad) : {self.data_provenance_tracking}") - print( - f"Update computing environment (datalad) : {self.datalad_update_environment}" - ) - print( - f"Number of participant processed in parallel : {self.number_of_participants_processed_in_parallel}" - ) - print(f"Number of OpenMP threads / participant : {self.number_of_threads}") - - print(f"Fix number of ITK threads : {self.fix_ants_number_of_threads}") - if self.fix_ants_number_of_threads: - print( - f"Number of ITK threads (ANTs) / participant : {self.ants_number_of_threads}" - ) - - print(f"Fix seed in ANTS random number generator : {self.fix_ants_random_seed}") - if self.fix_ants_random_seed: - print(f"Seed value : {self.ants_random_seed}") - - print( - f"Fix seed in MRtrix random number generator : {self.fix_mrtrix_random_seed}" - ) - if self.fix_ants_random_seed: - print(f"Seed value : {self.mrtrix_random_seed}") - - print("-----------------------------------------\n") - - return True - - def start_bidsapp_participant_level_process(self, bidsapp_tag, participant_labels): - """Create and run the BIDS App command. - - Parameters - ---------- - bidsapp_tag : traits.Str - Version tag of the CMP 3 BIDS App - - participant_labels : traits.List - List of participants labels in the form ["01", "03", "04", ...] - """ - - cmd = [ - "docker", - "run", - "-it", - "--rm", - "-v", - "{}:/bids_dir".format(self.bids_root), - "-v", - "{}:/output_dir".format(self.output_dir), - "-v", - "{}:/bids_dir/code/license.txt".format(self.fs_license), - "-v", - "{}:/code/ref_anatomical_config.json".format(self.anat_config), - ] - - if self.run_dmri_pipeline: - cmd.append("-v") - cmd.append("{}:/code/ref_diffusion_config.json".format(self.dmri_config)) - - if self.run_fmri_pipeline: - cmd.append("-v") - cmd.append("{}:/code/ref_fMRI_config.json".format(self.fmri_config)) - - cmd.append("-u") - cmd.append("{}:{}".format(os.geteuid(), os.getegid())) - - cmd.append("sebastientourbier/connectomemapper-bidsapp:{}".format(bidsapp_tag)) - cmd.append("/bids_dir") - cmd.append("/output_dir") - cmd.append("participant") - - cmd.append("--participant_label") - for label in participant_labels: - cmd.append("{}".format(label)) - - cmd.append("--anat_pipeline_config") - cmd.append("/code/ref_anatomical_config.json") - - if self.run_dmri_pipeline: - cmd.append("--dwi_pipeline_config") - cmd.append("/code/ref_diffusion_config.json") - - if self.run_fmri_pipeline: - cmd.append("--func_pipeline_config") - cmd.append("/code/ref_fMRI_config.json") - - cmd.append("--fs_license") - cmd.append("{}".format("/bids_dir/code/license.txt")) - - cmd.append("--number_of_participants_processed_in_parallel") - cmd.append("{}".format(self.number_of_participants_processed_in_parallel)) - - cmd.append("--number_of_threads") - cmd.append("{}".format(self.number_of_threads)) - - if self.fix_ants_number_of_threads: - cmd.append("--ants_number_of_threads") - cmd.append("{}".format(self.ants_number_of_threads)) - - if self.fix_ants_random_seed: - cmd.append("--ants_random_seed") - cmd.append("{}".format(self.ants_random_seed)) - - if self.fix_mrtrix_random_seed: - cmd.append("--mrtrix_random_seed") - cmd.append("{}".format(self.mrtrix_random_seed)) - - print_blue("... BIDS App execution command: {}".format(" ".join(cmd))) - - proc = Popen(cmd) - # proc = Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) - - return proc - - def start_bidsapp_participant_level_process_with_datalad( - self, bidsapp_tag, participant_labels - ): - """Create and run the BIDS App command with Datalad. - - Parameters - ---------- - bidsapp_tag : traits.Str - Version tag of the CMP 3 BIDS App - - participant_labels : traits.List - List of participants labels in the form ["01", "03", "04", ...] - """ - cmd = [ - "datalad", - "containers-run", - "--container-name", - "connectomemapper-bidsapp-{}".format("-".join(bidsapp_tag.split("."))), - "-m", - "Processing with connectomemapper-bidsapp {}".format(bidsapp_tag), - "--input", - f"{self.anat_config}", - ] - - # for label in participant_labels: - # cmd.append('--input') - # cmd.append('sub-{}/ses-*/anat/sub-*_T1w.*'.format(label)) - # - # cmd.append('--input') - # cmd.append('derivatives/freesurfer/sub-{}*/*'.format(label)) - # - # if self.run_dmri_pipeline: - # cmd.append('--input') - # cmd.append('sub-{}/ses-*/dwi/sub-*_dwi.*'.format(label)) - # - # if self.run_fmri_pipeline: - # cmd.append('--input') - # cmd.append('sub-{}/ses-*/func/sub-*_bold.*'.format(label)) - - if self.run_dmri_pipeline: - cmd.append("--input") - cmd.append(f"{self.dmri_config}") - - if self.run_fmri_pipeline: - cmd.append("--input") - cmd.append(f"{self.fmri_config}") - - cmd.append("--output") - cmd.append(f"{self.output_dir}") - # for label in participant_labels: - # cmd.append('--input') - # cmd.append('{}'.format(label)) - - cmd.append("/bids_dir") - cmd.append("/output_dir") - cmd.append("participant") - - cmd.append("--participant_label") - for label in participant_labels: - cmd.append("{}".format(label)) - - # Counter to track position of config file as --input - i = 0 - cmd.append("--anat_pipeline_config") - cmd.append("/{{inputs[{}]}}".format(i)) - i += 1 - if self.run_dmri_pipeline: - cmd.append("--dwi_pipeline_config") - cmd.append("/{{inputs[{}]}}".format(i)) - i += 1 - - if self.run_fmri_pipeline: - cmd.append("--func_pipeline_config") - cmd.append("/{{inputs[{}]}}".format(i)) - - print_blue("... Datalad cmd : {}".format(" ".join(cmd))) - - proc = Popen(cmd, cwd=os.path.join(self.bids_root)) - # proc = Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=os.path.join(self.bids_root,'derivatives')) - - return proc - - @classmethod - def manage_bidsapp_procs(self, proclist): - """Manage parallelized process at the participant level - - Parameters - ---------- - proclist : List of subprocess.Popen - List of Popen processes - """ - for proc in proclist: - if proc.poll() is not None: - proclist.remove(proc) - - @classmethod - def run(self, command, env=None, cwd=os.getcwd()): - """Function to run datalad commands. - - It runs the command specified as input via ``subprocess.run()``. - - Parameters - ---------- - command : string - String containing the command to be executed (required) - - env : os.environ - Specify a custom os.environ - - cwd : os.path - Specify a custom current working directory - - Examples - -------- - >>> cmd = 'datalad save -m my dataset change message' - >>> run(cmd) # doctest: +SKIP - """ - merged_env = os.environ - if env is not None: - merged_env.update(env) - process = Popen( - command, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - shell=True, - env=merged_env, - cwd=cwd, - ) - while True: - line = process.stdout.readline() - # Remove the "b'" prefix and the "'" at the end return by datalad - line = str(line)[2:-1] - print(line) - if line == "" and process.poll() is not None: - break - if process.returncode != 0: - raise Exception( - BColors.FAIL - + f"Non zero return code: {process.returncode}" - + BColors.ENDC - ) - - def start_bids_app(self): - """Function executed when the Run BIDS App button is clicked. - - It implements all steps in the creation and execution of the BIDS App - with or without datalad. - """ - print_blue("[Run BIDS App]") - - # Copy freesurfer license into dataset/code directory at the location - # the BIDS app expects to find it. - - license_dst = os.path.join(self.bids_root, "code", "license.txt") - - if not os.access(license_dst, os.F_OK): - dst = os.path.join(self.bids_root, "code", "license.txt") - print("> Copy FreeSurfer license (BIDS App Manager) ") - print("... src : {}".format(self.fs_license)) - print("... dst : {}".format(dst)) - shutil.copy2(src=self.fs_license, dst=dst) - else: - print_warning( - "> FreeSurfer license copy skipped as it already exists(BIDS App Manager) " - ) - - print("> Datalad available: {}".format(self.datalad_is_available)) - - # self.datalad_is_available = False - - if self.datalad_is_available and self.data_provenance_tracking: - # Detect structure subject/session - session_structure = False - res = glob.glob(os.path.join(self.bids_root, "sub-*/*/anat")) - # print(res) - if len(res) > 0: - session_structure = True - print(" INFO : Subject/Session structure detected!") - else: - print(" INFO : Subject structure detected!") - - # Equivalent to: - # >> datalad create derivatives - # >> cd derivatives - # >> datalad containers-add connectomemapper-bidsapp-{} --url dhub://sebastientourbier/connectomemapper-bidsapp:{} - if not os.path.isdir(os.path.join(self.bids_root, ".datalad")): - cmd = [ - "datalad", - "create", - "--force", - "-D", - f'"Creation of datalad dataset to be processed by the connectome mapper bidsapp (tag:{self.bidsapp_tag})"', - "-c", - "text2git", - "-d", - f"{self.bids_root}", - ] - cmd = " ".join(cmd) - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - print( - " INFO: A datalad dataset has been created with success at the root directory!" - ) - msg = ( - "Add all files to datalad. " - "Dataset ready to be linked with the BIDS App." - ) - - except Exception: - msg = "Save state after error at datalad dataset creation" - print_error( - " DATALAD ERROR: Failed to create the datalad dataset" - ) - else: - msg = "Datalad dataset up-to-date and ready to be linked with the BIDS App." - print(" INFO: A datalad dataset already exists!") - - # log_filename = os.path.join(self.bids_root,'derivatives','cmp','main-datalad_log-cmpbidsapp.txt') - # - # if not os.path.exists(os.path.join(self.bids_root,'derivatives','cmp')): - # os.makedirs(os.path.join(self.bids_root,'derivatives','cmp')) - - # create an empty log file to be tracked by datalad - # f = open(log_filename,"w+") - # f.close() - - cmd = f'datalad save -d . -m "{msg}"' - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error(" DATALAD ERROR: Failed to add changes to dataset") - - datalad_container = os.path.join( - self.bids_root, - ".datalad", - "environments", - "connectomemapper-bidsapp-{}".format( - "-".join(self.bidsapp_tag.split(".")) - ), - "image", - ) - add_container = True - update_container = False - if os.path.isdir(datalad_container): - if self.datalad_update_environment: - print( - " INFO: Container already listed in the datalad dataset and will be updated!" - ) - shutil.rmtree(datalad_container) - add_container = True - else: - add_container = False - print( - " INFO: Container already listed in the datalad dataset and will NOT be updated!" - ) - else: - add_container = True - print( - " INFO: Add a new computing environment (container image) to the datalad dataset!" - ) - - if add_container: - # Define the docker run command executed by Datalad. - # It makes the assumption that the license.txt and the configuration files - # are located in the code/ directory. - docker_cmd = [ - "docker", - "run", - "--rm", - "-t", - "-v", - '"$(pwd)":/bids_dir', - "-v", - '"$(pwd)"/derivatives:/output_dir', - "-v", - '"$(pwd)"/code/license.txt:/bids_dir/code/license.txt', - "-v", - f'"$(pwd)"/code/{os.path.basename(self.anat_config)}:/code/ref_anatomical_config.json', - ] - - if self.run_dmri_pipeline: - docker_cmd.append("-v") - docker_cmd.append( - f'"$(pwd)"/code/{os.path.basename(self.dmri_config)}:/code/ref_diffusion_config.json' - ) - - if self.run_fmri_pipeline: - docker_cmd.append("-v") - docker_cmd.append( - f'"$(pwd)"/code/{os.path.basename(self.fmri_config)}:/code/ref_fMRI_config.json' - ) - - docker_cmd.append("-u") - docker_cmd.append("{}:{}".format(os.geteuid(), os.getegid())) - - docker_cmd.append( - f"sebastientourbier/connectomemapper-bidsapp:{self.bidsapp_tag}" - ) - docker_cmd.append("{cmd}") - - # Define and run the command to add the container image to datalad - version_tag = "-".join(self.bidsapp_tag.split(".")) - cmd = [ - "datalad", - "containers-add", - f"connectomemapper-bidsapp-{version_tag}", - "--url", - f"dhub://sebastientourbier/connectomemapper-bidsapp:{self.bidsapp_tag}", - "-d", - ".", - "--call-fmt", - ] - - cmd = " ".join(cmd) - docker_cmd = " ".join(docker_cmd) - cmd = f'{cmd} "{docker_cmd}"' - - if self.datalad_update_environment: - cmd = f"{cmd} --update" - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.join(self.bids_root)) - print( - " INFO: Container image has been linked to dataset with success!" - ) - except Exception: - print_error( - " DATALAD ERROR: Failed to link the container image to the dataset" - ) - - # Create a list of files to be retrieved by datalad get - datalad_get_list = [self.anat_config] - - if self.run_dmri_pipeline: - datalad_get_list.append(self.dmri_config) - - if self.run_dmri_pipeline: - datalad_get_list.append(self.fmri_config) - - if session_structure: - for label in self.list_of_subjects_to_be_processed: - datalad_get_list.append( - "sub-{}/ses-*/anat/sub-{}*_T1w.*".format(label, label) - ) - datalad_get_list.append( - "derivatives/{}/sub-{}*/*".format(__freesurfer_directory__, label) - ) - if self.run_dmri_pipeline: - datalad_get_list.append( - "sub-{}/ses-*/dwi/sub-{}*_dwi.*".format(label, label) - ) - if self.run_fmri_pipeline: - datalad_get_list.append( - "sub-{}/ses-*/func/sub-{}*_bold.*".format(label, label) - ) - else: - for label in self.list_of_subjects_to_be_processed: - datalad_get_list.append( - "sub-{}/anat/sub-{}*_T1w.*".format(label, label) - ) - datalad_get_list.append( - "derivatives/{}/sub-{}/*".format(__freesurfer_directory__, label) - ) - if self.run_dmri_pipeline: - datalad_get_list.append( - "sub-{}/dwi/sub-{}*_dwi.*".format(label, label) - ) - if self.run_fmri_pipeline: - datalad_get_list.append( - "sub-{}/func/sub-{}*_bold.*".format(label, label) - ) - - cmd = ( - 'datalad save -d . -m "Dataset state after adding the container image. ' - 'Datasets ready to get files via datalad run."' - ) - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error( - " DATALAD ERROR: Failed to add existing files to dataset" - ) - - cmd = 'datalad run -d . -m "Get files for sub-{}" bash -c "datalad get {}"'.format( - self.list_of_subjects_to_be_processed, " ".join(datalad_get_list) - ) - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error( - " DATALAD ERROR: Failed to get files (cmd: datalad get {})".format( - " ".join(datalad_get_list) - ) - ) - - cmd = ( - 'datalad save -d . -m "Dataset state after getting the files. Dataset ready for connectome mapping." ' - "--version-tag ready4analysis-{}".format(time.strftime("%Y%m%d-%H%M%S")) - ) - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error(" DATALAD ERROR: Failed to commit changes to dataset") - - cmd = "datalad status -d ." - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error(" DATALAD ERROR: Failed to run datalad rev-status") - - # maxprocs = multiprocessing.cpu_count() - processes = [] - - self.docker_running = True - - if self.datalad_is_available and self.data_provenance_tracking: - - proc = self.start_bidsapp_participant_level_process_with_datalad( - self.bidsapp_tag, self.list_of_subjects_to_be_processed - ) - - else: - proc = self.start_bidsapp_participant_level_process( - self.bidsapp_tag, self.list_of_subjects_to_be_processed - ) - - processes.append(proc) - - while len(processes) > 0: - self.manage_bidsapp_procs(processes) - - if self.datalad_is_available and self.data_provenance_tracking: - # Clean remaining cache files generated in tmp/ of the docker image - # project.clean_cache(self.bids_root) - - cmd = 'datalad save -d . -m "Dataset processed by the connectomemapper-bidsapp:{}" --version-tag processed-{}'.format( - self.bidsapp_tag, time.strftime("%Y%m%d-%H%M%S") - ) - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error( - " DATALAD ERROR: Failed to commit derivatives to datalad dataset" - ) - - cmd = "datalad diff -t HEAD~1" - try: - print_blue(f"... cmd: {cmd}") - self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) - except Exception: - print_error(" DATALAD ERROR: Failed to run datalad diff -t HEAD~1") - - print("Processing with BIDS App Finished") - self.docker_running = False - return True - - # def stop_bids_app(self, ui_info): - # print("Stop BIDS App") - # #self.docker_process.kill() - # self.docker_running = False - # return True - - -class CMP_ConfiguratorWindow(HasTraits): - """Class that defines the Configurator Window. - - Attributes - ---------- - project_info : CMP_Project_Info - Instance of :class:`CMP_Project_Info` that represents the processing project - - anat_pipeline : Instance(HasTraits) - Instance of anatomical MRI pipeline UI - - dmri_pipeline : Instance(HasTraits) - Instance of diffusion MRI pipeline UI - - fmri_pipeline : Instance(HasTraits) - Instance of functional MRI pipeline UI - - anat_inputs_checked : traits.Bool - Boolean that indicates if anatomical pipeline inputs are available - (Default: False) - - dmri_inputs_checked = : traits.Bool - Boolean that indicates if diffusion pipeline inputs are available - (Default: False) - - fmri_inputs_checked : traits.Bool - Boolean that indicates if functional pipeline inputs are available - (Default: False) - - anat_save_config : traits.ui.Action - TraitsUI Action to save the anatomical pipeline configuration - - dmri_save_config : traits.ui.Action - TraitsUI Action to save the diffusion pipeline configuration - - fmri_save_config : traits.ui.Action - TraitsUI Action to save the functional pipeline configuration - - save_all_config : traits.ui.Button - Button to save all configuration files at once - - traits_view : QtView - TraitsUI QtView that describes the content of the window - """ - - project_info = Instance(CMP_Project_Info) - - anat_pipeline = Instance(HasTraits) - dmri_pipeline = Instance(HasTraits) - fmri_pipeline = Instance(HasTraits) - - anat_inputs_checked = Bool(False) - dmri_inputs_checked = Bool(False) - fmri_inputs_checked = Bool(False) - - anat_save_config = Action( - name="Save anatomical pipeline configuration as...", - action="save_anat_config_file", - ) - dmri_save_config = Action( - name="Save diffusion pipeline configuration as...", - action="save_dmri_config_file", - ) - fmri_save_config = Action( - name="Save fMRI pipeline configuration as...", action="save_fmri_config_file" - ) - - # anat_load_config = Action(name='Load anatomical pipeline configuration...',action='anat_load_config_file') - # dmri_load_config = Action(name='Load diffusion pipeline configuration...',action='load_dmri_config_file') - # fmri_load_config = Action(name='Load fMRI pipeline configuration...',action='load_fmri_config_file') - - save_all_config = Button("") - - traits_view = QtView( - Group( - Group( - Item("anat_pipeline", style="custom", show_label=False), - label="Anatomical pipeline", - dock="tab", - ), - Group( - Item( - "dmri_pipeline", - style="custom", - show_label=False, - enabled_when="dmri_inputs_checked", - visible_when="dmri_inputs_checked", - ), - label="Diffusion pipeline", - dock="tab", - ), - Group( - Item( - "fmri_pipeline", - style="custom", - show_label=False, - enabled_when="fmri_inputs_checked", - visible_when="fmri_inputs_checked", - ), - label="fMRI pipeline", - dock="tab", - ), - orientation="horizontal", - layout="tabbed", - springy=True, - enabled_when="anat_inputs_checked", - ), - spring, - HGroup( - spring, - Item( - "save_all_config", - style="custom", - width=315, - height=35, - resizable=False, - label="", - show_label=False, - style_sheet=return_button_style_sheet( - ImageResource( - pkg_resources.resource_filename( - "resources", - os.path.join("buttons", "configurator-saveall.png"), - ) - ).absolute_path - ), - enabled_when="anat_inputs_checked==True", - ), - spring, - show_labels=False, - label="", - ), - title="Connectome Mapper 3 Configurator", - menubar=MenuBar( - Menu( - ActionGroup(anat_save_config, dmri_save_config, fmri_save_config), - ActionGroup(Action(name="Quit", action="_on_close")), - name="File", - ) - ), - handler=project.CMP_ConfigQualityWindowHandler(), - style_sheet=style_sheet, - buttons=[], - width=0.5, - height=0.8, - resizable=True, # scrollable=True, - icon=get_icon("configurator.png"), - ) - - def __init__( - self, - project_info=None, - anat_pipeline=None, - dmri_pipeline=None, - fmri_pipeline=None, - anat_inputs_checked=False, - dmri_inputs_checked=False, - fmri_inputs_checked=False, - ): - """Constructor of an :class:``CMP_ConfiguratorWindow`` instance. - - Parameters - ---------- - project_info : cmp.project.CMP_Project_Info - :class:`CMP_Project_Info` object (Default: None) - - anat_pipeline - Instance of :class:`cmp.bidsappmanager.pipelines.anatomical.AnatomicalPipelineUI` - (Default: None) - - dmri_pipeline - Instance of :class:`cmp.bidsappmanager.pipelines.diffusion.DiffusionPipelineUI` - (Default: None) - - fmri_pipeline - Instance of :class:`cmp.bidsappmanager.pipelines.functional.fMRIPipelineUI` - (Default: None) - - anat_inputs_checked : traits.Bool - Boolean that indicates if anatomical pipeline inputs are available - (Default: False) - - dmri_inputs_checked = : traits.Bool - Boolean that indicates if diffusion pipeline inputs are available - (Default: False) - - fmri_inputs_checked : traits.Bool - Boolean that indicates if functional pipeline inputs are available - (Default: False) - """ - print("> Initialize window...") - self.project_info = project_info - - self.anat_pipeline = anat_pipeline - self.dmri_pipeline = dmri_pipeline - self.fmri_pipeline = fmri_pipeline - - if self.anat_pipeline is not None: - self.anat_pipeline.view_mode = "config_view" - - if self.dmri_pipeline is not None: - self.dmri_pipeline.view_mode = "config_view" - - if self.fmri_pipeline is not None: - self.fmri_pipeline.view_mode = "config_view" - - self.anat_inputs_checked = anat_inputs_checked - self.dmri_inputs_checked = dmri_inputs_checked - self.fmri_inputs_checked = fmri_inputs_checked - - def update_diffusion_imaging_model(self, new): - self.dmri_pipeline.diffusion_imaging_model = new - - def _save_all_config_fired(self): - print_blue("[Save all pipeline configuration files]") - - if self.anat_inputs_checked: - anat_config_file = os.path.join( - self.project_info.base_directory, "code", "ref_anatomical_config.json" - ) - project.anat_save_config(self.anat_pipeline, anat_config_file) - print(" * Anatomical config saved as {}".format(anat_config_file)) - - if self.dmri_inputs_checked: - dmri_config_file = os.path.join( - self.project_info.base_directory, "code", "ref_diffusion_config.json" - ) - project.dmri_save_config(self.dmri_pipeline, dmri_config_file) - print(" * Diffusion config saved as {}".format(dmri_config_file)) - - if self.fmri_inputs_checked: - fmri_config_file = os.path.join( - self.project_info.base_directory, "code", "ref_fMRI_config.json" - ) - project.fmri_save_config(self.fmri_pipeline, fmri_config_file) - print(" * fMRI config saved as {}".format(fmri_config_file)) - - -# Window class of the ConnectomeMapper_Pipeline Quality Inspector -# -class CMP_InspectorWindow(HasTraits): - """Class that defines the Configurator Window. - - Attributes - ---------- - project_info : CMP_Project_Info - Instance of :class:`CMP_Project_Info` that represents the processing project - - anat_pipeline : Instance(HasTraits) - Instance of anatomical MRI pipeline - - dmri_pipeline : Instance(HasTraits) - Instance of diffusion MRI pipeline - - fmri_pipeline : Instance(HasTraits) - Instance of functional MRI pipeline - - anat_inputs_checked : traits.Bool - Indicates if inputs of anatomical pipeline are available - (Default: False) - - dmri_inputs_checked : traits.Bool - Indicates if inputs of diffusion pipeline are available - (Default: False) - - fmri_inputs_checked : traits.Bool - Indicates if inputs of functional pipeline are available - (Default: False) - - output_anat_available : traits.Bool - Indicates if outputs of anatomical pipeline are available - (Default: False) - - output_dmri_available : traits.Bool - Indicates if outputs of diffusion pipeline are available - (Default: False) - - output_fmri_available : traits.Bool - Indicates if outputs of functional pipeline are available - (Default: False) - - traits_view : QtView - TraitsUI QtView that describes the content of the window - """ - - project_info = Instance(CMP_Project_Info) - - anat_pipeline = Instance(HasTraits) - dmri_pipeline = Instance(HasTraits) - fmri_pipeline = Instance(HasTraits) - - anat_inputs_checked = Bool(False) - dmri_inputs_checked = Bool(False) - fmri_inputs_checked = Bool(False) - - output_anat_available = Bool(False) - output_dmri_available = Bool(False) - output_fmri_available = Bool(False) - - traits_view = QtView( - Group( - # Group( - # # Include('dataset_view'),label='Data manager',springy=True - # Item('project_info',style='custom',show_label=False),label='Data manager',springy=True, dock='tab' - # ), - Group( - Item("anat_pipeline", style="custom", show_label=False), - visible_when="output_anat_available", - label="Anatomical pipeline", - dock="tab", - ), - Group( - Item( - "dmri_pipeline", - style="custom", - show_label=False, - visible_when="output_dmri_available", - ), - label="Diffusion pipeline", - dock="tab", - ), - Group( - Item( - "fmri_pipeline", - style="custom", - show_label=False, - visible_when="output_fmri_available", - ), - label="fMRI pipeline", - dock="tab", - ), - orientation="horizontal", - layout="tabbed", - springy=True, - enabled_when="output_anat_available", - ), - title="Connectome Mapper 3 Inspector", - menubar=MenuBar( - Menu( - ActionGroup( - Action(name="Quit", action="_on_close"), - ), - name="File", - ), - ), - handler=project.CMP_ConfigQualityWindowHandler(), - style_sheet=style_sheet, - width=0.5, - height=0.8, - resizable=True, # scrollable=True, - icon=get_icon("qualitycontrol.png"), - ) - - error_msg = Str("") - error_view = View( - Group( - Item("error_msg", style="readonly", show_label=False), - ), - title="Error", - kind="modal", - # style_sheet=style_sheet, - buttons=["OK"], - ) - - def __init__( - self, - project_info=None, - anat_inputs_checked=False, - dmri_inputs_checked=False, - fmri_inputs_checked=False, - ): - """Constructor of an :class:``CMP_ConfiguratorWindow`` instance. - - Parameters - ---------- - project_info : cmp.project.CMP_Project_Info - :class:`CMP_Project_Info` object (Default: None) - - anat_inputs_checked : traits.Bool - Boolean that indicates if anatomical pipeline inputs are available - (Default: False) - - dmri_inputs_checked = : traits.Bool - Boolean that indicates if diffusion pipeline inputs are available - (Default: False) - - fmri_inputs_checked : traits.Bool - Boolean that indicates if functional pipeline inputs are available - (Default: False) - """ - print("> Initialize window...") - self.project_info = project_info - - self.anat_inputs_checked = anat_inputs_checked - self.dmri_inputs_checked = dmri_inputs_checked - self.fmri_inputs_checked = fmri_inputs_checked - - aborded = self.select_subject() - - if aborded: - raise Exception( - BColors.FAIL - + " .. ABORDED: The quality control window will not be displayed." - + "Selection of subject/session was cancelled at initialization." - + BColors.ENDC - ) - - def select_subject(self): - """Function to select the subject and session for which to inspect outputs.""" - print("> Selection of subject (and session) for which to inspect outputs") - valid_selected_subject = False - select = True - aborded = False - - while not valid_selected_subject and not aborded: - - # Select subject from BIDS dataset - np_res = self.project_info.configure_traits(view="subject_view") - - if not np_res: - aborded = True - break - - print(" .. INFO: Selected subject: {}".format(self.project_info.subject)) - - # Select session if any - bids_layout = BIDSLayout(self.project_info.base_directory) - subject = self.project_info.subject.split("-")[1] - - sessions = bids_layout.get( - target="session", return_type="id", subject=subject - ) - - if len(sessions) > 0: - print(" .. INFO: Input dataset has sessions") - print(sessions) - - self.project_info.subject_sessions = [] - - for ses in sessions: - self.project_info.subject_sessions.append("ses-" + str(ses)) - - np_res = self.project_info.configure_traits(view="subject_session_view") - - if not np_res: - aborded = True - break - - self.project_info.anat_config_file = os.path.join( - self.project_info.base_directory, - "derivatives", - __cmp_directory__, - "{}".format(self.project_info.subject), - "{}".format(self.project_info.subject_session), - "{}_{}_anatomical_config.json".format( - self.project_info.subject, self.project_info.subject_session - ), - ) - if os.access(self.project_info.anat_config_file, os.F_OK): - print("> Initialize anatomical pipeline") - self.anat_pipeline = project.init_anat_project( - self.project_info, False - ) - else: - self.anat_pipeline = None - - if self.dmri_inputs_checked: - self.project_info.dmri_config_file = os.path.join( - self.project_info.base_directory, - "derivatives", - __cmp_directory__, - "{}".format(self.project_info.subject), - "{}".format(self.project_info.subject_session), - "{}_{}_diffusion_config.json".format( - self.project_info.subject, self.project_info.subject_session - ), - ) - if os.access(self.project_info.dmri_config_file, os.F_OK): - print("> Initialize diffusion pipeline") - ( - dmri_valid_inputs, - self.dmri_pipeline, - ) = project.init_dmri_project( - self.project_info, bids_layout, False - ) - else: - self.dmri_pipeline = None - - # self.dmri_pipeline.subject = self.project_info.subject - # self.dmri_pipeline.global_conf.subject = self.project_info.subject - - if self.fmri_inputs_checked: - self.project_info.fmri_config_file = os.path.join( - self.project_info.base_directory, - "derivatives", - __cmp_directory__, - "{}".format(self.project_info.subject), - "{}".format(self.project_info.subject_session), - "{}_{}_fMRI_config.json".format( - self.project_info.subject, self.project_info.subject_session - ), - ) - if os.access(self.project_info.fmri_config_file, os.F_OK): - print("> Initialize fMRI pipeline") - ( - fmri_valid_inputs, - self.fmri_pipeline, - ) = project.init_fmri_project( - self.project_info, bids_layout, False - ) - else: - self.fmri_pipeline = None - - # self.fmri_pipeline.subject = self.project_info.subject - # self.fmri_pipeline.global_conf.subject = self.project_info.subject - - # self.anat_pipeline.global_conf.subject_session = self.project_info.subject_session - - # if self.dmri_pipeline is not None: - # self.dmri_pipeline.global_conf.subject_session = self.project_info.subject_session - # - # if self.fmri_pipeline is not None: - # self.fmri_pipeline.global_conf.subject_session = self.project_info.subject_session - - print( - " .. INFO: Selected session %s" % self.project_info.subject_session - ) - if self.anat_pipeline is not None: - self.anat_pipeline.stages[ - "Segmentation" - ].config.freesurfer_subject_id = os.path.join( - self.project_info.base_directory, - "derivatives", - __freesurfer_directory__, - "{}_{}".format( - self.project_info.subject, self.project_info.subject_session - ), - ) - else: - print(" .. INFO: No session detected") - self.project_info.anat_config_file = os.path.join( - self.project_info.base_directory, - "derivatives", - __cmp_directory__, - "{}".format(self.project_info.subject), - "{}_anatomical_config.json".format(self.project_info.subject), - ) - if os.access(self.project_info.anat_config_file, os.F_OK): - self.anat_pipeline = project.init_anat_project( - self.project_info, False - ) - else: - self.anat_pipeline = None - - if self.dmri_inputs_checked: - self.project_info.dmri_config_file = os.path.join( - self.project_info.base_directory, - "derivatives", - __cmp_directory__, - "{}".format(self.project_info.subject), - "{}_diffusion_config.json".format(self.project_info.subject), - ) - if os.access(self.project_info.dmri_config_file, os.F_OK): - ( - dmri_valid_inputs, - self.dmri_pipeline, - ) = project.init_dmri_project( - self.project_info, bids_layout, False - ) - else: - self.dmri_pipeline = None - - # self.dmri_pipeline.subject = self.project_info.subject - # self.dmri_pipeline.global_conf.subject = self.project_info.subject - - if self.fmri_inputs_checked: - self.project_info.fmri_config_file = os.path.join( - self.project_info.base_directory, - "derivatives", - __cmp_directory__, - "{}".format(self.project_info.subject), - "{}_fMRI_config.json".format(self.project_info.subject), - ) - if os.access(self.project_info.fmri_config_file, os.F_OK): - ( - fmri_valid_inputs, - self.fmri_pipeline, - ) = project.init_fmri_project( - self.project_info, bids_layout, False - ) - else: - self.fmri_pipeline = None - - # self.fmri_pipeline.subject = self.project_info.subject - # self.fmri_pipeline.global_conf.subject = self.project_info.subject - - # self.anat_pipeline.global_conf.subject_session = '' - if self.anat_pipeline is not None: - self.anat_pipeline.stages[ - "Segmentation" - ].config.freesurfer_subjects_dir = os.path.join( - self.project_info.base_directory, - "derivatives", - __freesurfer_directory__, - "{}".format(self.project_info.subject), - ) - - if self.anat_pipeline is not None: - print("> Anatomical pipeline output inspection") - self.anat_pipeline.view_mode = "inspect_outputs_view" - for stage in list(self.anat_pipeline.stages.values()): - print(" ... Inspect stage {}".format(stage)) - stage.define_inspect_outputs() - # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) - if (len(stage.inspect_outputs) > 0) and ( - stage.inspect_outputs[0] != "Outputs not available" - ): - self.output_anat_available = True - - if self.dmri_pipeline is not None: - print("> Diffusion pipeline output inspection") - self.dmri_pipeline.view_mode = "inspect_outputs_view" - for stage in list(self.dmri_pipeline.stages.values()): - print(" ... Inspect stage {}".format(stage)) - stage.define_inspect_outputs() - # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) - if (len(stage.inspect_outputs) > 0) and ( - stage.inspect_outputs[0] != "Outputs not available" - ): - self.output_dmri_available = True - - if self.fmri_pipeline is not None: - print("> fMRI pipeline output inspection") - self.fmri_pipeline.view_mode = "inspect_outputs_view" - for stage in list(self.fmri_pipeline.stages.values()): - print(" ... Inspect stage {}".format(stage)) - stage.define_inspect_outputs() - # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) - if (len(stage.inspect_outputs) > 0) and ( - stage.inspect_outputs[0] != "Outputs not available" - ): - self.output_fmri_available = True - - print_blue( - " .. Anatomical output(s) available : %s" % self.output_anat_available - ) - print_blue( - " .. Diffusion output(s) available : %s" % self.output_dmri_available - ) - print_blue( - " .. fMRI output(s) available : %s" % self.output_fmri_available - ) - - if ( - self.output_anat_available - or self.output_dmri_available - or self.output_fmri_available - ): - valid_selected_subject = True - else: - self.error_msg = ( - " .. ERROR: No output available! " - + "Please select another subject (and session if any)!" - ) - print_error(self.error_msg) - select = error( - message=self.error_msg, title="Error", buttons=["OK", "Cancel"] - ) - aborded = not select - - return aborded - - def update_diffusion_imaging_model(self, new): - """Function called when ``diffusion_imaging_model`` is updated.""" - self.dmri_pipeline.diffusion_imaging_model = new - - -class CMP_MainWindow(HasTraits): - """Class that defines the Main window of the Connectome Mapper 3 GUI. - - Attributes - ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` that represents the processing project - - anat_pipeline : Instance(HasTraits) - Instance of anatomical MRI pipeline UI - - dmri_pipeline : Instance(HasTraits) - Instance of diffusion MRI pipeline UI - - fmri_pipeline : Instance(HasTraits) - Instance of functional MRI pipeline UI - - bidsapp_ui : CMP_Project_Info - Instance of :class:`CMP_BIDSAppWindow` - - load_dataset : traits.ui.Action - TraitsUI Action to load a BIDS dataset - - bidsapp : traits.ui.Button - Button that displays the BIDS App Interface window - - configurator : traits.ui.Button - Button thats displays the pipeline Configurator window - - quality_control : traits.ui.Button - Button that displays the pipeline Quality Control / Inspector window - - manager_group : traits.ui.View - TraitsUI View that describes the content of the main window - - traits_view : QtView - TraitsUI QtView that includes ``manager_group`` and parameterize - the window with menu - """ - - project_info = Instance(CMP_Project_Info) - - anat_pipeline = Instance(HasTraits) - dmri_pipeline = Instance(HasTraits) - fmri_pipeline = Instance(HasTraits) - - # configurator_ui = Instance(CMP_PipelineConfigurationWindow) - bidsapp_ui = Instance(CMP_BIDSAppWindow) - # quality_control_ui = Instance(CMP_QualityControlWindow) - - load_dataset = Action(name="Load BIDS Dataset...", action="load_dataset") - - project_info.style_sheet = style_sheet - - configurator = Button("") - bidsapp = Button("") - quality_control = Button("") - - view_mode = 1 - - manager_group = VGroup( - spring, - HGroup( - spring, - HGroup( - Item( - "configurator", - style="custom", - width=200, - height=200, - resizable=False, - label="", - show_label=False, - style_sheet=return_button_style_sheet( - ImageResource( - pkg_resources.resource_filename( - "cmp", - os.path.join( - "bidsappmanager/images", "configurator_200x200.png" - ), - ) - ).absolute_path - ), - ), - show_labels=False, - label="", - ), - spring, - HGroup( - Item( - "bidsapp", - style="custom", - width=200, - height=200, - resizable=False, - style_sheet=return_button_style_sheet( - ImageResource( - pkg_resources.resource_filename( - "cmp", - os.path.join( - "bidsappmanager/images", "bidsapp_200x200.png" - ), - ) - ).absolute_path - ), - ), - show_labels=False, - label="", - ), - spring, - HGroup( - Item( - "quality_control", - style="custom", - width=200, - height=200, - resizable=False, - style_sheet=return_button_style_sheet( - ImageResource( - pkg_resources.resource_filename( - "cmp", - os.path.join( - "bidsappmanager/images", - "qualitycontrol_200x200.png", - ), - ) - ).absolute_path - ), - ), - show_labels=False, - label="", - ), - spring, - springy=True, - visible_when="handler.project_loaded==True", - ), - spring, - springy=True, - ) - - traits_view = QtView( - HGroup( - Include("manager_group"), - ), - title="Connectome Mapper {} - BIDS App Manager".format(__version__), - menubar=MenuBar( - Menu( - ActionGroup( - load_dataset, - ), - ActionGroup( - Action(name="Quit", action="_on_close"), - ), - name="File", - ), - ), - handler=project.CMP_MainWindowHandler(), - style_sheet=style_sheet, - width=0.5, - height=0.8, - resizable=True, # , scrollable=True , resizable=True - icon=get_icon("cmp.png"), - ) - - def _bidsapp_fired(self): - """ Callback of the "bidsapp" button. This displays the BIDS App Interface window.""" - print_blue("[Open BIDS App Window]") - bids_layout = BIDSLayout(self.project_info.base_directory) - subjects = bids_layout.get_subjects() - - anat_config = os.path.join( - self.project_info.base_directory, "code/", "ref_anatomical_config.json" - ) - dmri_config = os.path.join( - self.project_info.base_directory, "code/", "ref_diffusion_config.json" - ) - fmri_config = os.path.join( - self.project_info.base_directory, "code/", "ref_fMRI_config.json" - ) - - self.bidsapp_ui = CMP_BIDSAppWindow( - project_info=self.project_info, - bids_root=self.project_info.base_directory, - subjects=subjects, - list_of_subjects_to_be_processed=subjects, - # anat_config=self.project_info.anat_config_file, - # dmri_config=self.project_info.dmri_config_file, - # fmri_config=self.project_info.fmri_config_file - anat_config=anat_config, - dmri_config=dmri_config, - fmri_config=fmri_config, - ) - self.bidsapp_ui.configure_traits() - - def _configurator_fired(self): - """Callback of the "configurator" button. This displays the Configurator Window.""" - print_blue("[Open Pipeline Configurator Window]") - if self.project_info.t1_available: - if os.path.isfile(self.project_info.anat_config_file): - print( - " .. Anatomical config file : %s" - % self.project_info.anat_config_file - ) - - if self.project_info.dmri_available: - if os.path.isfile(self.project_info.dmri_config_file): - print( - " .. Diffusion config file : %s" - % self.project_info.dmri_config_file - ) - - if self.project_info.fmri_available: - if os.path.isfile(self.project_info.fmri_config_file): - print(" .. fMRI config file : %s" % self.project_info.fmri_config_file) - - self.configurator_ui = CMP_ConfiguratorWindow( - project_info=self.project_info, - anat_pipeline=self.anat_pipeline, - dmri_pipeline=self.dmri_pipeline, - fmri_pipeline=self.fmri_pipeline, - anat_inputs_checked=self.project_info.t1_available, - dmri_inputs_checked=self.project_info.dmri_available, - fmri_inputs_checked=self.project_info.fmri_available, - ) - - self.configurator_ui.configure_traits() - - def _quality_control_fired(self): - """Callback of the "Inspector" button. This displays the Quality Control (Inspector) Window.""" - print_blue("[Open Quality Inspector Window]") - if self.project_info.t1_available: - if os.path.isfile(self.project_info.anat_config_file): - print( - " .. Anatomical config file : %s" - % self.project_info.anat_config_file - ) - - if self.project_info.dmri_available: - if os.path.isfile(self.project_info.dmri_config_file): - print( - " .. Diffusion config file : %s" - % self.project_info.dmri_config_file - ) - - if self.project_info.fmri_available: - if os.path.isfile(self.project_info.fmri_config_file): - print(" .. fMRI config file : %s" % self.project_info.fmri_config_file) - - try: - self.quality_control_ui = CMP_InspectorWindow( - project_info=self.project_info, - anat_inputs_checked=self.project_info.t1_available, - dmri_inputs_checked=self.project_info.dmri_available, - fmri_inputs_checked=self.project_info.fmri_available, - ) - self.quality_control_ui.configure_traits() - except Exception as e: - print(e) diff --git a/cmp/bidsappmanager/gui/__init__.py b/cmp/bidsappmanager/gui/__init__.py new file mode 100644 index 000000000..ff9173558 --- /dev/null +++ b/cmp/bidsappmanager/gui/__init__.py @@ -0,0 +1,7 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Module that provides the definition of all classes, functions, and variables dedicated to the GUI of Connectome Mapper 3.""" diff --git a/cmp/bidsappmanager/gui/bidsapp.py b/cmp/bidsappmanager/gui/bidsapp.py new file mode 100644 index 000000000..03c95f79b --- /dev/null +++ b/cmp/bidsappmanager/gui/bidsapp.py @@ -0,0 +1,1286 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Connectome Mapper BIDS App Interface Window.""" + +# General imports +import os +import sys + +import pkg_resources +from subprocess import Popen +import subprocess +import multiprocessing +import shutil +import time +import glob + +from pyface.api import ImageResource +from traitsui.qt4.extra.qt_view import QtView +from traitsui.api import * +from traits.api import * + +from bids import BIDSLayout + +# Own imports +import cmp.project +from cmp.info import __version__ + +from cmtklib.bids.io import ( + __cmp_directory__, __freesurfer_directory__ +) +from cmtklib.util import ( + return_button_style_sheet, + BColors, + print_blue, + print_warning, + print_error, +) + +import cmp.bidsappmanager.gui.handlers +import cmp.bidsappmanager.project as project +from cmp.bidsappmanager.gui.traits import MultiSelectAdapter +from cmp.bidsappmanager.gui.globals import get_icon + +# Remove warnings visible whenever you import scipy (or another package) +# that was compiled against an older numpy than is installed. +import warnings +warnings.filterwarnings("ignore", message="numpy.dtype size changed") +warnings.filterwarnings("ignore", message="numpy.ufunc size changed") + + +class BIDSAppInterfaceWindow(HasTraits): + """Class that defines the Window of the BIDS App Interface. + + Attributes + ---------- + project_info : ProjectInfo + Instance of :class:`CMP_Project_Info` that represents the processing project + + bids_root : traits.Directory + BIDS root dataset directory + + output_dir : traits.Directory + Output directory + + subjects : traits.List + List of subjects (in the form ``sub-XX``) present in the dataset + + number_of_participants_processed_in_parallel : traits.Range + Number of participants / subjects to be processed in parallel that + takes values in the [1, # of CPUs - 1] range + + number_threads_max : traits.Int + Maximal number of threads to be used by OpenMP programs + (4 by default) + + number_of_threads : traits.Range + Number of threads to be used by OpenMP programs that takes values + in the [1, ``number_threads_max``] range + + fs_file : traits.File + Path to Freesurfer license file + + list_of_subjects_to_be_processed : List(Str) + Selection of subjects to be processed from the ``subjects`` list + + dmri_inputs_checked : traits.Bool + True if dMRI data is available in the dataset + + fmri_inputs_checked : traits.Bool + rue if fMRI data is available in the dataset + + anat_config : traits.File + Configuration file for the anatomical MRI pipeline + + dmri_config : traits.File + Configuration file for the diffusion MRI pipeline + + fmri_config : traits.File + Configuration file for the functional MRI pipeline + + run_anat_pipeline : traits.Bool + If True, run the anatomical pipeline + + run_dmri_pipeline : traits.Bool + If True, run the diffusion pipeline + + run_fmri_pipeline : traits.Bool + If True, run the functional pipeline + + bidsapp_tag : traits.Enum + Selection of BIDS App version to use + + data_provenance_tracking : traits.Bool + If set and if ``datalad_is_available`` is True run the BIDS App + using datalad (False by default) + + datalad_update_environment : traits.Bool + If True and ``data_provenance_tracking`` is True, tell to datalad + to update the BIDS App container image if there was a previous + execution (True by default) + + datalad_is_available : traits.Bool + Boolean used to store if datalad is available in the computing + environment (False by default) + + check : traits.ui.Button + Button to check if all parameters are properly set for execution + of the BIDS App + + start_bidsapp : traits.ui.Button + Button to run the BIDS App + + traits_view : QtView + TraitsUI QtView that describes the content of the window + """ + project_info = Instance(cmp.project.ProjectInfo) + + bids_root = Directory() + output_dir = Directory() + subjects = List(Str) + + # multiproc_number_of_cores = Int(1) + number_of_participants_processed_in_parallel = Range( + low=1, + high=multiprocessing.cpu_count() - 1, + desc="Number of participants to be processed in parallel", + ) + + number_of_threads_max = Int(multiprocessing.cpu_count() - 1) + + number_of_threads = Range( + low=1, + high="number_of_threads_max", + mode="spinner", + desc="Number of OpenMP threads used by Dipy, FSL, MRtrix, " + "and Freesurfer recon-all", + ) + + fix_ants_random_seed = Bool( + False, desc="Fix MRtrix3 random generator seed for tractography" + ) + ants_random_seed = Int(1234, desc="MRtrix random generator seed value") + + fix_mrtrix_random_seed = Bool( + False, desc="Fix ANTs random generator seed for registration" + ) + mrtrix_random_seed = Int(1234, desc="ANTs random generator seed value") + + fix_ants_number_of_threads = Bool( + False, desc="Fix independently number of threads used by ANTs registration" + ) + ants_number_of_threads = Range( + low=1, + high="number_of_threads_max", + mode="spinner", + desc="Number of ITK threads used by ANTs registration", + ) + + fs_license = File(desc="Path to your FREESURFER license.txt") + # fs_average = Directory(os.path.join(os.environ['FREESURFER_HOME'],'subjects','fsaverage')) + + list_of_subjects_to_be_processed = List(Str) + + list_of_processing_logfiles = List(File) + + anat_config = File(desc="Path to the configuration file of the anatomical pipeline") + dmri_config = File(desc="Path to the configuration file of the diffusion pipeline") + fmri_config = File(desc="Path to the configuration file of the fMRI pipeline") + + run_anat_pipeline = Bool(True, desc="Run the anatomical pipeline") + run_dmri_pipeline = Bool(False, desc="Run the diffusion pipeline") + run_fmri_pipeline = Bool(False, desc="Run the fMRI pipeline") + + dmri_inputs_checked = Bool(False) + fmri_inputs_checked = Bool(False) + + settings_checked = Bool(False) + docker_running = Bool(False) + + bidsapp_tag = Enum("{}".format(__version__), ["latest", "{}".format(__version__)]) + + data_provenance_tracking = Bool( + False, desc="Use datalad to execute CMP3 and record dataset changes" + ) + + datalad_update_environment = Bool( + True, + desc="Update the container if datalad run-container has been run already once", + ) + + datalad_is_available = Bool(False, desc="True if datalad is available") + + update_selection = Button() + check = Button() + start_bidsapp = Button() + + # stop_bidsapp = Action(name='Stop BIDS App!',action='stop_bids_app',enabled_when='handler.settings_checked and handler.docker_running') + + traits_view = QtView( + Group( + VGroup( + VGroup( + Item("bidsapp_tag", style="readonly", label="Tag"), + label="BIDS App Version", + ), + VGroup( + Item("bids_root", style="readonly", label="Input directory"), + Item( + "output_dir", + style="simple", + label="Output directory", + enabled_when="not(data_provenance_tracking)", + ), + label="BIDS dataset", + ), + VGroup( + HGroup( + UItem( + "subjects", + editor=TabularEditor( + show_titles=True, + selected="list_of_subjects_to_be_processed", + editable=False, + multi_select=True, + adapter=MultiSelectAdapter( + columns=[("Available labels", "myvalue")] + ), + ), + ), + UItem( + "list_of_subjects_to_be_processed", + editor=TabularEditor( + show_titles=True, + editable=False, + adapter=MultiSelectAdapter( + columns=[("Labels to be processed", "myvalue")] + ), + ), + ), + ), + label="Participant labels to be processed", + ), + HGroup( + Item( + "number_of_participants_processed_in_parallel", + label="Number of participants processed in parallel", + ), + label="Parallel processing", + ), + VGroup( + HGroup( + VGroup( + Item("number_of_threads", label="Number of OpenMP threads"), + Item( + "fix_ants_number_of_threads", + label="Set number of threads used by ANTs", + ), + Item( + "ants_number_of_threads", + label="Number of ITK threads used by ANTs registration", + enabled_when="fix_ants_number_of_threads", + ), + label="Multithreading", + ), + VGroup( + Item( + "fix_ants_random_seed", + label="Set seed of ANTS random number generator", + ), + Item( + "ants_random_seed", + label="Seed", + enabled_when="fix_ants_random_seed", + ), + Item( + "fix_mrtrix_random_seed", + label="Set seed of MRtrix random number generator", + ), + Item( + "mrtrix_random_seed", + label="Seed", + enabled_when="fix_mrtrix_random_seed", + ), + label="Random number generators", + ), + ), + label="Advanced execution settings for each participant process", + ), + VGroup( + Group( + Item( + "anat_config", + editor=FileEditor(dialog_style="open"), + label="Configuration file", + visible_when="run_anat_pipeline", + ), + label="Anatomical pipeline", + ), + Group( + Item("run_dmri_pipeline", label="Run processing stages"), + Item( + "dmri_config", + editor=FileEditor(dialog_style="open"), + label="Configuration file", + visible_when="run_dmri_pipeline", + ), + label="Diffusion pipeline", + visible_when="dmri_inputs_checked==True", + ), + Group( + Item("run_fmri_pipeline", label="Run processing stages"), + Item( + "fmri_config", + editor=FileEditor(dialog_style="open"), + label="Configuration file", + visible_when="run_fmri_pipeline", + ), + label="fMRI pipeline", + visible_when="fmri_inputs_checked==True", + ), + label="Configuration of processing pipelines", + ), + VGroup( + Item( + "fs_license", + editor=FileEditor(dialog_style="open"), + label="LICENSE", + ), + # Item('fs_average', label='FSaverage directory'), + label="Freesurfer configuration", + ), + VGroup( + Item("data_provenance_tracking", label="Use Datalad"), + Item( + "datalad_update_environment", + visible_when="data_provenance_tracking", + label="Update the computing environment (if existing)", + ), + label="Data Provenance Tracking / Data Lineage", + enabled_when="datalad_is_available", + ), + orientation="vertical", + springy=True, + ), + spring, + HGroup( + spring, + Item( + "check", + style="custom", + width=152, + height=35, + resizable=False, + label="", + show_label=False, + style_sheet=return_button_style_sheet( + ImageResource( + pkg_resources.resource_filename( + "resources", + os.path.join("buttons", "bidsapp-check-settings.png"), + ) + ).absolute_path + ), + ), + spring, + Item( + "start_bidsapp", + style="custom", + width=152, + height=35, + resizable=False, + label="", + show_label=False, + style_sheet=return_button_style_sheet( + ImageResource( + pkg_resources.resource_filename( + "resources", os.path.join("buttons", "bidsapp-run.png") + ) + ).absolute_path, + ImageResource( + pkg_resources.resource_filename( + "resources", + os.path.join("buttons", "bidsapp-run-disabled.png"), + ) + ).absolute_path, + ), + enabled_when="settings_checked==True and docker_running==False", + ), + spring, + show_labels=False, + label="", + ), + orientation="vertical", + springy=True, + ), + title="Connectome Mapper 3 BIDS App GUI", + # kind='modal', + handler=cmp.bidsappmanager.gui.handlers.BIDSAppInterfaceWindowHandler(), + # style_sheet=style_sheet, + buttons=[], + # buttons = [check,start_bidsapp], + # buttons = [process_anatomical,map_dmri_connectome,map_fmri_connectome], + # buttons = [preprocessing, map_connectome, map_custom], + width=0.6, + height=0.8, + scrollable=True, # , resizable=True + icon=get_icon("bidsapp.png"), + ) + + log_view = QtView( + Group( + Item("list_of_processing_logfiles"), orientation="vertical", springy=True + ), + title="Connectome Mapper 3 BIDS App Progress", + # kind='modal', + # handler=project.BIDSAppInterfaceWindowHandler(), + # style_sheet=style_sheet, + buttons=[], + # buttons = [check,start_bidsapp], + # buttons = [process_anatomical,map_dmri_connectome,map_fmri_connectome], + # buttons = [preprocessing, map_connectome, map_custom], + width=0.5, + height=0.8, + resizable=True, # , scrollable=True, resizable=True + icon=get_icon("bidsapp.png"), + ) + + def __init__( + self, + project_info=None, + bids_root="", + subjects=None, + list_of_subjects_to_be_processed=None, + anat_config="", + dmri_config="", + fmri_config="", + ): + """Constructor of an :class:``BIDSAppInterfaceWindow`` instance. + + Parameters + ---------- + project_info : cmp.project.ProjectInfo + :class:`CMP_Project_Info` object (Default: None) + + bids_root : traits.Directory + BIDS dataset root directory (Default: \'\') + + subjects : List of string + List of subjects in the dataset (Default: None) + + list_of_subjects_to_be_processed : List of string + List of subjects to be processed (Default: None) + + anat_config : string + Path to anatomical pipeline configuration file (Default: \'\') + + dmri_config : string + Path to diffusion pipeline configuration file (Default: \'\') + + fmri_config : string + Path to functional pipeline configuration file (Default: \'\') + """ + print("> Initialize window...") + if multiprocessing.cpu_count() < 4: + self.number_of_threads_max = multiprocessing.cpu_count() + + self.project_info = project_info + self.bids_root = bids_root + + # Create a BIDSLayout for checking availability of dMRI and fMRI data + try: + bids_layout = BIDSLayout(self.bids_root) + except Exception: + print_error(" .. Exception : Raised at BIDSLayout") + sys.exit(1) + + # Check if sMRI data is available in the dataset + smri_files = bids_layout.get( + datatype="anat", suffix="T1w", extensions="nii.gz", return_type="file" + ) + + if not smri_files: + anat_inputs_checked = False + else: + anat_inputs_checked = True + + print(f" .. T1w available: {anat_inputs_checked}") + + # Check if dMRI data is available in the dataset + dmri_files = bids_layout.get( + datatype="dwi", suffix="dwi", extensions="nii.gz", return_type="file" + ) + + if not dmri_files: + self.dmri_inputs_checked = False + self.run_dmri_pipeline = False + else: + self.dmri_inputs_checked = True + self.run_dmri_pipeline = True + + print(f" .. DWI available: {self.dmri_inputs_checked}") + + # Check if fMRI data is available in the dataset + fmri_files = bids_layout.get( + task="rest", + datatype="func", + suffix="bold", + extensions="nii.gz", + return_type="file", + ) + if not fmri_files: + self.fmri_inputs_checked = False + self.run_fmri_pipeline = False + else: + self.fmri_inputs_checked = True + self.run_fmri_pipeline = True + + print(f" .. rsfMRI available: {self.fmri_inputs_checked}") + + # Initialize output directory to be /bids_dir/derivatives + self.output_dir = os.path.join(bids_root, "derivatives") + + self.subjects = subjects + # self.list_of_subjects_to_be_processed = list_of_subjects_to_be_processed + self.anat_config = anat_config + self.dmri_config = dmri_config + self.fmri_config = fmri_config + + if 'FREESURFER_HOME' in os.environ: + self.fs_license = os.path.join( + os.environ['FREESURFER_HOME'], 'license.txt') + elif os.path.isfile(os.path.join(bids_root, 'code', 'license.txt')): + self.fs_license = os.path.join(bids_root, 'code', 'license.txt') + else: + print_error('.. ERROR: Environment variable $FREESURFER_HOME not found and no Freesurfer license file ' + 'found in local code-folder ') + self.fs_license = '' + print_warning('Freesurfer license unset ({})'.format(self.fs_license)) + + self.datalad_is_available = project.is_tool("datalad") + + self.on_trait_change(self.update_run_dmri_pipeline, "run_dmri_pipeline") + self.on_trait_change(self.update_run_fmri_pipeline, "run_fmri_pipeline") + + self.on_trait_change( + self.number_of_parallel_procs_updated, + "number_of_participants_processed_in_parallel", + ) + + self.on_trait_change( + self.update_checksettings, "list_of_subjects_to_be_processed" + ) + self.on_trait_change(self.update_checksettings, "anat_config") + self.on_trait_change(self.update_checksettings, "run_dmri_pipeline") + self.on_trait_change(self.update_checksettings, "dmri_config") + self.on_trait_change(self.update_checksettings, "run_fmri_pipeline") + self.on_trait_change(self.update_checksettings, "fmri_config") + self.on_trait_change(self.update_checksettings, "fs_license") + # self.on_trait_change(self.update_checksettings, 'fs_average') + + def number_of_parallel_procs_updated(self, new): + """Callback function when ``number_of_parallel_procs`` is updated.""" + number_of_threads_max = int((multiprocessing.cpu_count() - 1) / new) + + if number_of_threads_max > 4: + self.number_of_threads_max = 4 + else: + self.number_of_threads_max = number_of_threads_max + + print( + " .. INFO : Update number of threads max to : {}".format( + self.number_of_threads_max + ) + ) + + def update_run_anat_pipeline(self, new): + """Callback function when ``run_anat_pipeline`` is updated.""" + if new is False: + print_warning(" .. WARNING: At least anatomical pipeline should be run!") + self.run_anat_pipeline = True + + def update_run_dmri_pipeline(self, new): + """Callback function when ``run_dmri_pipeline`` is updated.""" + self.run_anat_pipeline = True + + def update_run_fmri_pipeline(self, new): + """Callback function when ``run_fmri_pipeline`` is updated.""" + self.run_anat_pipeline = True + + def update_checksettings(self, new): + """Function that reset ``settings_checked`` attribute to False.""" + self.settings_checked = False + + def _data_provenance_tracking_changed(self, new): + """Callback function `data_provenance_tracking` attribute is updated.""" + if new is True: + self.output_dir = os.path.join(self.bids_root, "derivatives") + self.data_provenance_tracking = new + + def _update_selection_fired(self): + """Callback function when the list of selected subjects is updated.""" + self.configure_traits(view="select_subjects_to_be_processed_view") + + def _check_fired(self): + """Callback function when the Check Setting button is clicked.""" + self.check_settings() + + def _start_bidsapp_fired(self): + """Callback function when the Run BIDS App button is clicked.""" + self.start_bids_app() + + def check_settings(self): + """Checks if all the parameters of the BIDS App run are properly set before execution.""" + print_warning("\n-----------------------------------------") + print_warning("BIDS App execution settings check summary") + print_warning("-----------------------------------------") + + self.settings_checked = True + + if os.path.isdir(self.bids_root): + print(f"* BIDS root directory : {self.bids_root}") + else: + print_error("Error: BIDS root invalid!") + self.settings_checked = False + + if os.path.exists(os.path.join(self.output_dir, __cmp_directory__)): + print(f"* Output directory (existing) : {self.output_dir}") + else: + os.makedirs(os.path.join(self.output_dir, __cmp_directory__)) + print_warning(f"Output directory (created) : {self.output_dir}") + + if len(self.list_of_subjects_to_be_processed) > 0: + print( + f"* Participant labels to be processed : {self.list_of_subjects_to_be_processed}" + ) + else: + print_error( + "Error: At least one participant label to be processed should selected!" + ) + self.settings_checked = False + # if not self.list_of_subjects_to_be_processed.empty(): + # print("List of subjects to be processed : {}".format(self.list_of_subjects_to_be_processed)) + # else: + # print("Warning: List of subjects empty!") + + if os.path.isfile(self.anat_config): + print(f"* Anatomical configuration file : {self.anat_config}") + else: + print_error( + "Error: Configuration file for anatomical pipeline not existing!" + ) + self.settings_checked = False + + if os.path.isfile(self.dmri_config): + print(f"* Diffusion configuration file : {self.dmri_config}") + else: + print_warning( + "Warning: Configuration file for diffusion pipeline not existing!" + ) + + if os.path.isfile(self.fmri_config): + print(f"* fMRI configuration file : {self.fmri_config}") + else: + print_warning("Warning: Configuration file for fMRI pipeline not existing!") + + if os.path.isfile(self.fs_license): + print(f"* Freesurfer license : {self.fs_license}") + else: + print_error(f"Error: Invalid Freesurfer license ({self.fs_license})!") + self.settings_checked = False + + # if os.path.isdir(self.fs_average): + # print("fsaverage directory : {}".format(self.fs_average)) + # else: + # print("Error: fsaverage directory ({}) not existing!".format(self.fs_average)) + # self.settings_checked = False + + print(f"Valid inputs for BIDS App : {self.settings_checked}") + print(f"BIDS App Version Tag: {self.bidsapp_tag}") + print(f"Data provenance tracking (datalad) : {self.data_provenance_tracking}") + print( + f"Update computing environment (datalad) : {self.datalad_update_environment}" + ) + print( + f"Number of participant processed in parallel : {self.number_of_participants_processed_in_parallel}" + ) + print(f"Number of OpenMP threads / participant : {self.number_of_threads}") + + print(f"Fix number of ITK threads : {self.fix_ants_number_of_threads}") + if self.fix_ants_number_of_threads: + print( + f"Number of ITK threads (ANTs) / participant : {self.ants_number_of_threads}" + ) + + print(f"Fix seed in ANTS random number generator : {self.fix_ants_random_seed}") + if self.fix_ants_random_seed: + print(f"Seed value : {self.ants_random_seed}") + + print( + f"Fix seed in MRtrix random number generator : {self.fix_mrtrix_random_seed}" + ) + if self.fix_ants_random_seed: + print(f"Seed value : {self.mrtrix_random_seed}") + + print("-----------------------------------------\n") + + return True + + def start_bidsapp_participant_level_process(self, bidsapp_tag, participant_labels): + """Create and run the BIDS App command. + + Parameters + ---------- + bidsapp_tag : traits.Str + Version tag of the CMP 3 BIDS App + + participant_labels : traits.List + List of participants labels in the form ["01", "03", "04", ...] + """ + + cmd = [ + "docker", + "run", + "-it", + "--rm", + "-v", + "{}:/bids_dir".format(self.bids_root), + "-v", + "{}:/output_dir".format(self.output_dir), + "-v", + "{}:/bids_dir/code/license.txt".format(self.fs_license), + "-v", + "{}:/code/ref_anatomical_config.json".format(self.anat_config), + ] + + if self.run_dmri_pipeline: + cmd.append("-v") + cmd.append("{}:/code/ref_diffusion_config.json".format(self.dmri_config)) + + if self.run_fmri_pipeline: + cmd.append("-v") + cmd.append("{}:/code/ref_fMRI_config.json".format(self.fmri_config)) + + cmd.append("-u") + cmd.append("{}:{}".format(os.geteuid(), os.getegid())) + + cmd.append("sebastientourbier/connectomemapper-bidsapp:{}".format(bidsapp_tag)) + cmd.append("/bids_dir") + cmd.append("/output_dir") + cmd.append("participant") + + cmd.append("--participant_label") + for label in participant_labels: + cmd.append("{}".format(label)) + + cmd.append("--anat_pipeline_config") + cmd.append("/code/ref_anatomical_config.json") + + if self.run_dmri_pipeline: + cmd.append("--dwi_pipeline_config") + cmd.append("/code/ref_diffusion_config.json") + + if self.run_fmri_pipeline: + cmd.append("--func_pipeline_config") + cmd.append("/code/ref_fMRI_config.json") + + cmd.append("--fs_license") + cmd.append("{}".format("/bids_dir/code/license.txt")) + + cmd.append("--number_of_participants_processed_in_parallel") + cmd.append("{}".format(self.number_of_participants_processed_in_parallel)) + + cmd.append("--number_of_threads") + cmd.append("{}".format(self.number_of_threads)) + + if self.fix_ants_number_of_threads: + cmd.append("--ants_number_of_threads") + cmd.append("{}".format(self.ants_number_of_threads)) + + if self.fix_ants_random_seed: + cmd.append("--ants_random_seed") + cmd.append("{}".format(self.ants_random_seed)) + + if self.fix_mrtrix_random_seed: + cmd.append("--mrtrix_random_seed") + cmd.append("{}".format(self.mrtrix_random_seed)) + + print_blue("... BIDS App execution command: {}".format(" ".join(cmd))) + + proc = Popen(cmd) + # proc = Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + + return proc + + def start_bidsapp_participant_level_process_with_datalad( + self, bidsapp_tag, participant_labels + ): + """Create and run the BIDS App command with Datalad. + + Parameters + ---------- + bidsapp_tag : traits.Str + Version tag of the CMP 3 BIDS App + + participant_labels : traits.List + List of participants labels in the form ["01", "03", "04", ...] + """ + cmd = [ + "datalad", + "containers-run", + "--container-name", + "connectomemapper-bidsapp-{}".format("-".join(bidsapp_tag.split("."))), + "-m", + "Processing with connectomemapper-bidsapp {}".format(bidsapp_tag), + "--input", + f"{self.anat_config}", + ] + + # for label in participant_labels: + # cmd.append('--input') + # cmd.append('sub-{}/ses-*/anat/sub-*_T1w.*'.format(label)) + # + # cmd.append('--input') + # cmd.append('derivatives/freesurfer/sub-{}*/*'.format(label)) + # + # if self.run_dmri_pipeline: + # cmd.append('--input') + # cmd.append('sub-{}/ses-*/dwi/sub-*_dwi.*'.format(label)) + # + # if self.run_fmri_pipeline: + # cmd.append('--input') + # cmd.append('sub-{}/ses-*/func/sub-*_bold.*'.format(label)) + + if self.run_dmri_pipeline: + cmd.append("--input") + cmd.append(f"{self.dmri_config}") + + if self.run_fmri_pipeline: + cmd.append("--input") + cmd.append(f"{self.fmri_config}") + + cmd.append("--output") + cmd.append(f"{self.output_dir}") + # for label in participant_labels: + # cmd.append('--input') + # cmd.append('{}'.format(label)) + + cmd.append("/bids_dir") + cmd.append("/output_dir") + cmd.append("participant") + + cmd.append("--participant_label") + for label in participant_labels: + cmd.append("{}".format(label)) + + # Counter to track position of config file as --input + i = 0 + cmd.append("--anat_pipeline_config") + cmd.append("/{{inputs[{}]}}".format(i)) + i += 1 + if self.run_dmri_pipeline: + cmd.append("--dwi_pipeline_config") + cmd.append("/{{inputs[{}]}}".format(i)) + i += 1 + + if self.run_fmri_pipeline: + cmd.append("--func_pipeline_config") + cmd.append("/{{inputs[{}]}}".format(i)) + + print_blue("... Datalad cmd : {}".format(" ".join(cmd))) + + proc = Popen(cmd, cwd=os.path.join(self.bids_root)) + # proc = Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=os.path.join(self.bids_root,'derivatives')) + + return proc + + @classmethod + def manage_bidsapp_procs(self, proclist): + """Manage parallelized process at the participant level + + Parameters + ---------- + proclist : List of subprocess.Popen + List of Popen processes + """ + for proc in proclist: + if proc.poll() is not None: + proclist.remove(proc) + + @classmethod + def run(self, command, env=None, cwd=os.getcwd()): + """Function to run datalad commands. + + It runs the command specified as input via ``subprocess.run()``. + + Parameters + ---------- + command : string + String containing the command to be executed (required) + + env : os.environ + Specify a custom os.environ + + cwd : os.path + Specify a custom current working directory + + Examples + -------- + >>> cmd = 'datalad save -m my dataset change message' + >>> run(cmd) # doctest: +SKIP + """ + merged_env = os.environ + if env is not None: + merged_env.update(env) + process = Popen( + command, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + shell=True, + env=merged_env, + cwd=cwd, + ) + while True: + line = process.stdout.readline() + # Remove the "b'" prefix and the "'" at the end return by datalad + line = str(line)[2:-1] + print(line) + if line == "" and process.poll() is not None: + break + if process.returncode != 0: + raise Exception( + BColors.FAIL + + f"Non zero return code: {process.returncode}" + + BColors.ENDC + ) + + def start_bids_app(self): + """Function executed when the Run BIDS App button is clicked. + + It implements all steps in the creation and execution of the BIDS App + with or without datalad. + """ + print_blue("[Run BIDS App]") + + # Copy freesurfer license into dataset/code directory at the location + # the BIDS app expects to find it. + + license_dst = os.path.join(self.bids_root, "code", "license.txt") + + if not os.access(license_dst, os.F_OK): + dst = os.path.join(self.bids_root, "code", "license.txt") + print("> Copy FreeSurfer license (BIDS App Manager) ") + print("... src : {}".format(self.fs_license)) + print("... dst : {}".format(dst)) + shutil.copy2(src=self.fs_license, dst=dst) + else: + print_warning( + "> FreeSurfer license copy skipped as it already exists(BIDS App Manager) " + ) + + print("> Datalad available: {}".format(self.datalad_is_available)) + + # self.datalad_is_available = False + + if self.datalad_is_available and self.data_provenance_tracking: + # Detect structure subject/session + session_structure = False + res = glob.glob(os.path.join(self.bids_root, "sub-*/*/anat")) + # print(res) + if len(res) > 0: + session_structure = True + print(" INFO : Subject/Session structure detected!") + else: + print(" INFO : Subject structure detected!") + + # Equivalent to: + # >> datalad create derivatives + # >> cd derivatives + # >> datalad containers-add connectomemapper-bidsapp-{} --url dhub://sebastientourbier/connectomemapper-bidsapp:{} + if not os.path.isdir(os.path.join(self.bids_root, ".datalad")): + cmd = [ + "datalad", + "create", + "--force", + "-D", + f'"Creation of datalad dataset to be processed by the connectome mapper bidsapp (tag:{self.bidsapp_tag})"', + "-c", + "text2git", + "-d", + f"{self.bids_root}", + ] + cmd = " ".join(cmd) + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + print( + " INFO: A datalad dataset has been created with success at the root directory!" + ) + msg = ( + "Add all files to datalad. " + "Dataset ready to be linked with the BIDS App." + ) + + except Exception: + msg = "Save state after error at datalad dataset creation" + print_error( + " DATALAD ERROR: Failed to create the datalad dataset" + ) + else: + msg = "Datalad dataset up-to-date and ready to be linked with the BIDS App." + print(" INFO: A datalad dataset already exists!") + + # log_filename = os.path.join(self.bids_root,'derivatives','cmp','main-datalad_log-cmpbidsapp.txt') + # + # if not os.path.exists(os.path.join(self.bids_root,'derivatives','cmp')): + # os.makedirs(os.path.join(self.bids_root,'derivatives','cmp')) + + # create an empty log file to be tracked by datalad + # f = open(log_filename,"w+") + # f.close() + + cmd = f'datalad save -d . -m "{msg}"' + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error(" DATALAD ERROR: Failed to add changes to dataset") + + datalad_container = os.path.join( + self.bids_root, + ".datalad", + "environments", + "connectomemapper-bidsapp-{}".format( + "-".join(self.bidsapp_tag.split(".")) + ), + "image", + ) + add_container = True + update_container = False + if os.path.isdir(datalad_container): + if self.datalad_update_environment: + print( + " INFO: Container already listed in the datalad dataset and will be updated!" + ) + shutil.rmtree(datalad_container) + add_container = True + else: + add_container = False + print( + " INFO: Container already listed in the datalad dataset and will NOT be updated!" + ) + else: + add_container = True + print( + " INFO: Add a new computing environment (container image) to the datalad dataset!" + ) + + if add_container: + # Define the docker run command executed by Datalad. + # It makes the assumption that the license.txt and the configuration files + # are located in the code/ directory. + docker_cmd = [ + "docker", + "run", + "--rm", + "-t", + "-v", + '"$(pwd)":/bids_dir', + "-v", + '"$(pwd)"/derivatives:/output_dir', + "-v", + '"$(pwd)"/code/license.txt:/bids_dir/code/license.txt', + "-v", + f'"$(pwd)"/code/{os.path.basename(self.anat_config)}:/code/ref_anatomical_config.json', + ] + + if self.run_dmri_pipeline: + docker_cmd.append("-v") + docker_cmd.append( + f'"$(pwd)"/code/{os.path.basename(self.dmri_config)}:/code/ref_diffusion_config.json' + ) + + if self.run_fmri_pipeline: + docker_cmd.append("-v") + docker_cmd.append( + f'"$(pwd)"/code/{os.path.basename(self.fmri_config)}:/code/ref_fMRI_config.json' + ) + + docker_cmd.append("-u") + docker_cmd.append("{}:{}".format(os.geteuid(), os.getegid())) + + docker_cmd.append( + f"sebastientourbier/connectomemapper-bidsapp:{self.bidsapp_tag}" + ) + docker_cmd.append("{cmd}") + + # Define and run the command to add the container image to datalad + version_tag = "-".join(self.bidsapp_tag.split(".")) + cmd = [ + "datalad", + "containers-add", + f"connectomemapper-bidsapp-{version_tag}", + "--url", + f"dhub://sebastientourbier/connectomemapper-bidsapp:{self.bidsapp_tag}", + "-d", + ".", + "--call-fmt", + ] + + cmd = " ".join(cmd) + docker_cmd = " ".join(docker_cmd) + cmd = f'{cmd} "{docker_cmd}"' + + if self.datalad_update_environment: + cmd = f"{cmd} --update" + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.join(self.bids_root)) + print( + " INFO: Container image has been linked to dataset with success!" + ) + except Exception: + print_error( + " DATALAD ERROR: Failed to link the container image to the dataset" + ) + + # Create a list of files to be retrieved by datalad get + datalad_get_list = [self.anat_config] + + if self.run_dmri_pipeline: + datalad_get_list.append(self.dmri_config) + + if self.run_dmri_pipeline: + datalad_get_list.append(self.fmri_config) + + if session_structure: + for label in self.list_of_subjects_to_be_processed: + datalad_get_list.append( + "sub-{}/ses-*/anat/sub-{}*_T1w.*".format(label, label) + ) + datalad_get_list.append( + "derivatives/{}/sub-{}*/*".format(__freesurfer_directory__, label) + ) + if self.run_dmri_pipeline: + datalad_get_list.append( + "sub-{}/ses-*/dwi/sub-{}*_dwi.*".format(label, label) + ) + if self.run_fmri_pipeline: + datalad_get_list.append( + "sub-{}/ses-*/func/sub-{}*_bold.*".format(label, label) + ) + else: + for label in self.list_of_subjects_to_be_processed: + datalad_get_list.append( + "sub-{}/anat/sub-{}*_T1w.*".format(label, label) + ) + datalad_get_list.append( + "derivatives/{}/sub-{}/*".format(__freesurfer_directory__, label) + ) + if self.run_dmri_pipeline: + datalad_get_list.append( + "sub-{}/dwi/sub-{}*_dwi.*".format(label, label) + ) + if self.run_fmri_pipeline: + datalad_get_list.append( + "sub-{}/func/sub-{}*_bold.*".format(label, label) + ) + + cmd = ( + 'datalad save -d . -m "Dataset state after adding the container image. ' + 'Datasets ready to get files via datalad run."' + ) + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error( + " DATALAD ERROR: Failed to add existing files to dataset" + ) + + cmd = 'datalad run -d . -m "Get files for sub-{}" bash -c "datalad get {}"'.format( + self.list_of_subjects_to_be_processed, " ".join(datalad_get_list) + ) + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error( + " DATALAD ERROR: Failed to get files (cmd: datalad get {})".format( + " ".join(datalad_get_list) + ) + ) + + cmd = ( + 'datalad save -d . -m "Dataset state after getting the files. Dataset ready for connectome mapping." ' + "--version-tag ready4analysis-{}".format(time.strftime("%Y%m%d-%H%M%S")) + ) + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error(" DATALAD ERROR: Failed to commit changes to dataset") + + cmd = "datalad status -d ." + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error(" DATALAD ERROR: Failed to run datalad rev-status") + + # maxprocs = multiprocessing.cpu_count() + processes = [] + + self.docker_running = True + + if self.datalad_is_available and self.data_provenance_tracking: + + proc = self.start_bidsapp_participant_level_process_with_datalad( + self.bidsapp_tag, self.list_of_subjects_to_be_processed + ) + + else: + proc = self.start_bidsapp_participant_level_process( + self.bidsapp_tag, self.list_of_subjects_to_be_processed + ) + + processes.append(proc) + + while len(processes) > 0: + self.manage_bidsapp_procs(processes) + + if self.datalad_is_available and self.data_provenance_tracking: + # Clean remaining cache files generated in tmp/ of the docker image + # project.clean_cache(self.bids_root) + + cmd = 'datalad save -d . -m "Dataset processed by the connectomemapper-bidsapp:{}" --version-tag processed-{}'.format( + self.bidsapp_tag, time.strftime("%Y%m%d-%H%M%S") + ) + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error( + " DATALAD ERROR: Failed to commit derivatives to datalad dataset" + ) + + cmd = "datalad diff -t HEAD~1" + try: + print_blue(f"... cmd: {cmd}") + self.run(cmd, env={}, cwd=os.path.abspath(self.bids_root)) + except Exception: + print_error(" DATALAD ERROR: Failed to run datalad diff -t HEAD~1") + + print("Processing with BIDS App Finished") + self.docker_running = False + return True + + # def stop_bids_app(self, ui_info): + # print("Stop BIDS App") + # #self.docker_process.kill() + # self.docker_running = False + # return True diff --git a/cmp/bidsappmanager/gui/config.py b/cmp/bidsappmanager/gui/config.py new file mode 100644 index 000000000..7e8accbbf --- /dev/null +++ b/cmp/bidsappmanager/gui/config.py @@ -0,0 +1,273 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Connectome Mapper Pipeline Configurator Window.""" + +# General imports +import os + +import pkg_resources + +from pyface.api import ImageResource +from traitsui.qt4.extra.qt_view import QtView +from traitsui.api import * +from traits.api import * + +# Own imports +import cmp.project +from cmtklib.util import ( + return_button_style_sheet, + print_blue +) + +import cmp.bidsappmanager.project as project +import cmp.bidsappmanager.gui.handlers +from cmp.bidsappmanager.gui.globals import ( + style_sheet, get_icon +) + +# Remove warnings visible whenever you import scipy (or another package) +# that was compiled against an older numpy than is installed. +import warnings +warnings.filterwarnings("ignore", message="numpy.dtype size changed") +warnings.filterwarnings("ignore", message="numpy.ufunc size changed") + + +class PipelineConfiguratorWindow(HasTraits): + """Class that defines the Pipeline Configurator Window. + + Attributes + ---------- + project_info : cmp.project.ProjectInfo + Instance of :class:`CMP_Project_Info` that represents the processing project + + anat_pipeline : Instance(HasTraits) + Instance of anatomical MRI pipeline UI + + dmri_pipeline : Instance(HasTraits) + Instance of diffusion MRI pipeline UI + + fmri_pipeline : Instance(HasTraits) + Instance of functional MRI pipeline UI + + anat_inputs_checked : traits.Bool + Boolean that indicates if anatomical pipeline inputs are available + (Default: False) + + dmri_inputs_checked = : traits.Bool + Boolean that indicates if diffusion pipeline inputs are available + (Default: False) + + fmri_inputs_checked : traits.Bool + Boolean that indicates if functional pipeline inputs are available + (Default: False) + + anat_save_config : traits.ui.Action + TraitsUI Action to save the anatomical pipeline configuration + + dmri_save_config : traits.ui.Action + TraitsUI Action to save the diffusion pipeline configuration + + fmri_save_config : traits.ui.Action + TraitsUI Action to save the functional pipeline configuration + + save_all_config : traits.ui.Button + Button to save all configuration files at once + + traits_view : QtView + TraitsUI QtView that describes the content of the window + """ + project_info = Instance(cmp.project.ProjectInfo) + + anat_pipeline = Instance(HasTraits) + dmri_pipeline = Instance(HasTraits) + fmri_pipeline = Instance(HasTraits) + + anat_inputs_checked = Bool(False) + dmri_inputs_checked = Bool(False) + fmri_inputs_checked = Bool(False) + + anat_save_config = Action( + name="Save anatomical pipeline configuration as...", + action="save_anat_config_file", + ) + dmri_save_config = Action( + name="Save diffusion pipeline configuration as...", + action="save_dmri_config_file", + ) + fmri_save_config = Action( + name="Save fMRI pipeline configuration as...", action="save_fmri_config_file" + ) + + # anat_load_config = Action(name='Load anatomical pipeline configuration...',action='anat_load_config_file') + # dmri_load_config = Action(name='Load diffusion pipeline configuration...',action='load_dmri_config_file') + # fmri_load_config = Action(name='Load fMRI pipeline configuration...',action='load_fmri_config_file') + + save_all_config = Button("") + + traits_view = QtView( + Group( + Group( + Item("anat_pipeline", style="custom", show_label=False), + label="Anatomical pipeline", + dock="tab", + ), + Group( + Item( + "dmri_pipeline", + style="custom", + show_label=False, + enabled_when="dmri_inputs_checked", + visible_when="dmri_inputs_checked", + ), + label="Diffusion pipeline", + dock="tab", + ), + Group( + Item( + "fmri_pipeline", + style="custom", + show_label=False, + enabled_when="fmri_inputs_checked", + visible_when="fmri_inputs_checked", + ), + label="fMRI pipeline", + dock="tab", + ), + orientation="horizontal", + layout="tabbed", + springy=True, + enabled_when="anat_inputs_checked", + ), + spring, + HGroup( + spring, + Item( + "save_all_config", + style="custom", + width=315, + height=35, + resizable=False, + label="", + show_label=False, + style_sheet=return_button_style_sheet( + ImageResource( + pkg_resources.resource_filename( + "resources", + os.path.join("buttons", "configurator-saveall.png"), + ) + ).absolute_path + ), + enabled_when="anat_inputs_checked==True", + ), + spring, + show_labels=False, + label="", + ), + title="Connectome Mapper 3 Configurator", + menubar=MenuBar( + Menu( + ActionGroup(anat_save_config, dmri_save_config, fmri_save_config), + ActionGroup(Action(name="Quit", action="_on_close")), + name="File", + ) + ), + handler=cmp.bidsappmanager.gui.handlers.ConfigQualityWindowHandler(), + style_sheet=style_sheet, + buttons=[], + width=0.5, + height=0.8, + resizable=True, # scrollable=True, + icon=get_icon("configurator.png"), + ) + + def __init__( + self, + project_info=None, + anat_pipeline=None, + dmri_pipeline=None, + fmri_pipeline=None, + anat_inputs_checked=False, + dmri_inputs_checked=False, + fmri_inputs_checked=False, + ): + """Constructor of an :class:``PipelineConfiguratorWindow`` instance. + + Parameters + ---------- + project_info : cmp.project.ProjectInfo + :class:`CMP_Project_Info` object (Default: None) + + anat_pipeline + Instance of :class:`cmp.bidsappmanager.pipelines.anatomical.AnatomicalPipelineUI` + (Default: None) + + dmri_pipeline + Instance of :class:`cmp.bidsappmanager.pipelines.diffusion.DiffusionPipelineUI` + (Default: None) + + fmri_pipeline + Instance of :class:`cmp.bidsappmanager.pipelines.functional.fMRIPipelineUI` + (Default: None) + + anat_inputs_checked : traits.Bool + Boolean that indicates if anatomical pipeline inputs are available + (Default: False) + + dmri_inputs_checked = : traits.Bool + Boolean that indicates if diffusion pipeline inputs are available + (Default: False) + + fmri_inputs_checked : traits.Bool + Boolean that indicates if functional pipeline inputs are available + (Default: False) + """ + print("> Initialize window...") + self.project_info = project_info + + self.anat_pipeline = anat_pipeline + self.dmri_pipeline = dmri_pipeline + self.fmri_pipeline = fmri_pipeline + + if self.anat_pipeline is not None: + self.anat_pipeline.view_mode = "config_view" + + if self.dmri_pipeline is not None: + self.dmri_pipeline.view_mode = "config_view" + + if self.fmri_pipeline is not None: + self.fmri_pipeline.view_mode = "config_view" + + self.anat_inputs_checked = anat_inputs_checked + self.dmri_inputs_checked = dmri_inputs_checked + self.fmri_inputs_checked = fmri_inputs_checked + + def update_diffusion_imaging_model(self, new): + self.dmri_pipeline.diffusion_imaging_model = new + + def _save_all_config_fired(self): + print_blue("[Save all pipeline configuration files]") + + if self.anat_inputs_checked: + anat_config_file = os.path.join( + self.project_info.base_directory, "code", "ref_anatomical_config.json" + ) + project.anat_save_config(self.anat_pipeline, anat_config_file) + print(" * Anatomical config saved as {}".format(anat_config_file)) + + if self.dmri_inputs_checked: + dmri_config_file = os.path.join( + self.project_info.base_directory, "code", "ref_diffusion_config.json" + ) + project.dmri_save_config(self.dmri_pipeline, dmri_config_file) + print(" * Diffusion config saved as {}".format(dmri_config_file)) + + if self.fmri_inputs_checked: + fmri_config_file = os.path.join( + self.project_info.base_directory, "code", "ref_fMRI_config.json" + ) + project.fmri_save_config(self.fmri_pipeline, fmri_config_file) + print(" * fMRI config saved as {}".format(fmri_config_file)) diff --git a/cmp/bidsappmanager/gui/globals.py b/cmp/bidsappmanager/gui/globals.py new file mode 100644 index 000000000..53f781039 --- /dev/null +++ b/cmp/bidsappmanager/gui/globals.py @@ -0,0 +1,123 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Modules that defines multiple variables and functions used by the different windows of the GUI.""" + +import os +from pyface.api import ImageResource + +# Remove warnings visible whenever you import scipy (or another package) +# that was compiled against an older numpy than is installed. +import warnings +warnings.filterwarnings("ignore", message="numpy.dtype size changed") +warnings.filterwarnings("ignore", message="numpy.ufunc size changed") + + +# global modal_width +modal_width = 400 + +# global style_sheet +style_sheet = """ + QLabel { + font: 12pt "Verdana"; + margin-left: 5px; + background-color: transparent; + } + QPushButton { + border: 0px solid lightgray; + border-radius: 4px; + color: transparent; + background-color: transparent; + min-width: 222px; + icon-size: 222px; + font: 12pt "Verdana"; + margin: 0px 0px 0px 0px; + padding:0px 0px; + } + QPushButton:pressed { + background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, + stop: 0 #dadbde, stop: 1 #f6f7fa); + } + QMenuBar { + background-color: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, + stop: 0 #dadbde, stop: 1 #f6f7fa) + font: 14pt "Verdana"; + } + QMenuBar::item { + spacing: 5px; /* spacing between menu bar items */ + padding: 5px 5px; + background: transparent; + border-radius: 4px; + } + QMenuBar::item:selected { /* when selected using mouse or keyboard */ + background: #a8a8a8; + } + QMenuBar::item:pressed { + background: #888888; + } + QMainWindow { + background-color: yellow; + image: url("images/cmp.png"); + } + QMainWindow::separator { + background: yellow; + width: 1px; /* when vertical */ + height: 1px; /* when horizontal */ + } + QMainWindow::separator:hover { + background: red; + } + + QListView::item:selected { + border: 1px solid #6a6ea9; + } + + QListView::item:selected:!active { + background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, + stop: 0 #ABAFE5, stop: 1 #8588B2); + } + + QListView::item:selected:active { + background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, + stop: 0 #6a6ea9, stop: 1 #888dd9); + } + + QListView::item:hover { + background: qlineargradient(x1: 0, y1: 0, x2: 0, y2: 1, + stop: 0 #FAFBFE, stop: 1 #DCDEF1); + } + QProgressBar { + border: 2px solid grey; + border-radius: 5px; + } + + QProgressBar::chunk { + background-color: #05B8CC; + width: 20px; + } + """ + + +def get_icon(path): + """Return an instance of `ImageResource` or None is there is not graphical backend. + + Parameters + ---------- + path : string + Path to an image file + + Returns + ------- + icon : ImageResource + Return an instance of `ImageResource` or None is there is not graphical backend. + """ + on_rtd = os.environ.get("READTHEDOCS") == "True" + if on_rtd: + print("READTHEDOCS: Return None for icon") + icon = None + else: + icon = ImageResource(path) + return icon diff --git a/cmp/bidsappmanager/gui/handlers.py b/cmp/bidsappmanager/gui/handlers.py new file mode 100644 index 000000000..ba8f9a9db --- /dev/null +++ b/cmp/bidsappmanager/gui/handlers.py @@ -0,0 +1,2247 @@ +import multiprocessing +import os +import shutil +from subprocess import Popen + +from bids import BIDSLayout +from pyface.constant import OK +from pyface.file_dialog import FileDialog +from traits.has_traits import HasTraits +from traits.trait_types import Bool, Instance +from traitsui.handler import Handler +from traitsui.message import error + +# Own imports +from cmtklib.config import ( + anat_save_config, get_anat_process_detail_json, + dmri_save_config, fmri_save_config, + get_dmri_process_detail_json, get_fmri_process_detail_json, + anat_load_config_json, dmri_load_config_json, + fmri_load_config_json, convert_config_ini_2_json +) +from cmtklib.process import run +from cmtklib.util import print_warning, print_error, print_blue + +import cmp.bidsappmanager.project +from cmp.bidsappmanager.pipelines.anatomical import anatomical as anatomical_pipeline +from cmp.bidsappmanager.pipelines.diffusion import diffusion as diffusion_pipeline +from cmp.bidsappmanager.pipelines.functional import fMRI as fMRI_pipeline + + +class ConfigQualityWindowHandler(Handler): + """Event handler of the Configurator and Inspector (Quality Control) windows. + + Attributes + ---------- + project_loaded : traits.Bool + Indicate if project has been successfully loaded + (Default: False) + + anat_pipeline : Instance(HasTraits) + Instance of :class:`AnatomicalPipelineUI` class + + anat_inputs_checked : traits.Bool + Indicate if anatomical pipeline inputs are available + (Default: False) + + anat_outputs_checked : traits.Bool + Indicate if anatomical pipeline outputs are available + (Default: False) + + anatomical_processed : traits.Bool + Indicate if anatomical pipeline was run + (Default: False) + + dmri_pipeline : Instance(HasTraits) + Instance of :class:`DiffusionPipelineUI` class + + dmri_inputs_checked : traits.Bool + Indicate if diffusion pipeline inputs are available + (Default: False) + + dmri_processed : traits.Bool + Indicate if diffusion pipeline was run + (Default: False) + + fmri_pipeline : Instance(HasTraits) + Instance of :class:`fMRIPipelineUI` class + + fmri_inputs_checked : traits.Bool + Indicate if fMRI pipeline inputs are available + (Default: False) + + fmri_processed : traits.Bool + Indicate if fMRI pipeline was run + (Default: False) + """ + + project_loaded = Bool(False) + + anat_pipeline = Instance(HasTraits) + anat_inputs_checked = Bool(False) + anat_outputs_checked = Bool(False) + anatomical_processed = Bool(False) + + dmri_pipeline = Instance(HasTraits) + dmri_inputs_checked = Bool(False) + dmri_processed = Bool(False) + + fmri_pipeline = Instance(HasTraits) + fmri_inputs_checked = Bool(False) + fmri_processed = Bool(False) + + def new_project(self, ui_info): + """Function that creates a new :class:`ProjectInfoUI` instance. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + print("> Load Project") + new_project = cmp.bidsappmanager.project.ProjectInfoUI() + np_res = new_project.configure_traits(view="create_view") + ui_info.ui.context["object"].handler = self + + if np_res and os.path.exists(new_project.base_directory): + try: + bids_layout = BIDSLayout(new_project.base_directory) + new_project.bids_layout = bids_layout + print(bids_layout) + + for subj in bids_layout.get_subjects(): + if "sub-" + str(subj) not in new_project.subjects: + new_project.subjects.append("sub-" + str(subj)) + + print(" .. INFO: Available subjects : ") + print(new_project.subjects) + new_project.number_of_subjects = len(new_project.subjects) + + np_res = new_project.configure_traits(view="subject_view") + print(" .. INFO: Selected subject : " + new_project.subject) + + subject = new_project.subject.split("-")[1] + new_project.subject_sessions = [""] + new_project.subject_session = "" + + sessions = bids_layout.get( + target="session", return_type="id", subject=subject + ) + + if len(sessions) > 0: + print("Warning: multiple sessions") + for ses in sessions: + new_project.subject_sessions.append("ses-" + str(ses)) + np_res = new_project.configure_traits(view="subject_session_view") + print( + " .. INFO: Selected session : " + new_project.subject_session + ) + + except Exception as e: + msg = "Invalid BIDS dataset. Please see documentation for more details." + print_warning(f" .. EXCEPTION: {msg}") + print_error(f" : {e}") + error(message=msg, title="BIDS error") + return + + self.anat_pipeline = cmp.bidsappmanager.project.init_anat_project(new_project, True) + if self.anat_pipeline is not None: + anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) + if anat_inputs_checked: + ui_info.ui.context["object"].project_info = new_project + self.anat_pipeline.number_of_cores = new_project.number_of_cores + ui_info.ui.context["object"].anat_pipeline = self.anat_pipeline + self.anat_inputs_checked = anat_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.t1_available = self.anat_inputs_checked + + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_subject_anat_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_session_anat_pipeline, + "subject_session", + ) + anat_save_config( + self.anat_pipeline, + ui_info.ui.context["object"].project_info.anat_config_file, + ) + self.project_loaded = True + + ui_info.ui.context[ + "object" + ].project_info.parcellation_scheme = get_anat_process_detail_json( + new_project, "parcellation_stage", "parcellation_scheme" + ) + ui_info.ui.context[ + "object" + ].project_info.freesurfer_subjects_dir = get_anat_process_detail_json( + new_project, "segmentation_stage", "freesurfer_subjects_dir" + ) + ui_info.ui.context[ + "object" + ].project_info.freesurfer_subject_id = get_anat_process_detail_json( + new_project, "segmentation_stage", "freesurfer_subject_id" + ) + + dmri_inputs_checked, self.dmri_pipeline = cmp.bidsappmanager.project.init_dmri_project( + new_project, bids_layout, True + ) + if self.dmri_pipeline is not None: + if dmri_inputs_checked: + self.dmri_pipeline.number_of_cores = ( + new_project.number_of_cores + ) + print( + " .. INFO: Number of cores (pipeline) = %s" + % self.dmri_pipeline.number_of_cores + ) + self.dmri_pipeline.parcellation_scheme = ui_info.ui.context[ + "object" + ].project_info.parcellation_scheme + ui_info.ui.context[ + "object" + ].dmri_pipeline = self.dmri_pipeline + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context[ + "object" + ].update_subject_dmri_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context[ + "object" + ].update_session_dmri_pipeline, + "subject_session", + ) + dmri_save_config( + self.dmri_pipeline, + ui_info.ui.context[ + "object" + ].project_info.dmri_config_file, + ) + self.dmri_inputs_checked = dmri_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.dmri_available = self.dmri_inputs_checked + self.project_loaded = True + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context[ + "object" + ].update_diffusion_imaging_model, + "diffusion_imaging_model", + ) + + fmri_inputs_checked, self.fmri_pipeline = cmp.bidsappmanager.project.init_fmri_project( + new_project, bids_layout, True + ) + if self.fmri_pipeline is not None: + if fmri_inputs_checked: + self.fmri_pipeline.number_of_cores = ( + new_project.number_of_cores + ) + print( + " .. INFO: Number of cores (pipeline) = %s" + % self.fmri_pipeline.number_of_cores + ) + self.fmri_pipeline.parcellation_scheme = ui_info.ui.context[ + "object" + ].project_info.parcellation_scheme + self.fmri_pipeline.subjects_dir = ui_info.ui.context[ + "object" + ].project_info.freesurfer_subjects_dir + self.fmri_pipeline.subject_id = ui_info.ui.context[ + "object" + ].project_info.freesurfer_subject_id + ui_info.ui.context[ + "object" + ].fmri_pipeline = self.fmri_pipeline + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context[ + "object" + ].update_subject_fmri_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context[ + "object" + ].update_session_fmri_pipeline, + "subject_session", + ) + fmri_save_config( + self.fmri_pipeline, + ui_info.ui.context[ + "object" + ].project_info.fmri_config_file, + ) + self.fmri_inputs_checked = fmri_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.fmri_available = self.fmri_inputs_checked + self.project_loaded = True + + def load_project(self, ui_info): + """Function that creates a new :class:`ProjectInfoUI` instance from an existing project. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + print("> Load Project") + loaded_project = cmp.bidsappmanager.project.ProjectInfoUI() + np_res = loaded_project.configure_traits(view="open_view") + ui_info.ui.context["object"].handler = self + + print(" .. INFO: BIDS directory: %s" % loaded_project.base_directory) + try: + bids_layout = BIDSLayout(loaded_project.base_directory) + loaded_project.bids_layout = bids_layout + + loaded_project.subjects = [] + for subj in bids_layout.get_subjects(): + if "sub-" + str(subj) not in loaded_project.subjects: + loaded_project.subjects.append("sub-" + str(subj)) + loaded_project.subjects.sort() + + print(" .. INFO: Available subjects : ") + print(loaded_project.subjects) + loaded_project.number_of_subjects = len(loaded_project.subjects) + + except ValueError as e: + msg = str(e) + error(message=msg, title="BIDS error") + return + except Exception: + error( + message="Invalid BIDS dataset. Please see documentation for more details.", + title="BIDS error", + ) + return + + self.anat_inputs_checked = False + + if np_res and os.path.exists(loaded_project.base_directory): + sessions = [] + for subj in bids_layout.get_subjects(): + subj_sessions = bids_layout.get( + target="session", return_type="id", subject=subj + ) + for subj_session in subj_sessions: + sessions.append(subj_session) + + loaded_project.anat_available_config = [] + + for subj in bids_layout.get_subjects(): + subj_sessions = bids_layout.get( + target="session", return_type="id", subject=subj + ) + if len(subj_sessions) > 0: + for subj_session in subj_sessions: + config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "sub-%s_ses-%s_anatomical_config.json" + % (subj, subj_session), + ) + if os.path.isfile(config_file): + loaded_project.anat_available_config.append( + "sub-%s_ses-%s" % (subj, subj_session) + ) + else: + config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "sub-%s_anatomical_config.json" % subj, + ) + if os.path.isfile(config_file): + loaded_project.anat_available_config.append("sub-%s" % subj) + + if len(loaded_project.anat_available_config) > 1: + loaded_project.anat_available_config.sort() + loaded_project.anat_config_to_load = ( + loaded_project.anat_available_config[0] + ) + anat_config_selected = loaded_project.configure_traits( + view="anat_select_config_to_load" + ) + + if not anat_config_selected: + return 0 + else: + loaded_project.anat_config_to_load = ( + loaded_project.anat_available_config[0] + ) + + print( + " .. INFO: Anatomical config to load: %s" + % loaded_project.anat_config_to_load + ) + loaded_project.anat_config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "%s_anatomical_config.json" % loaded_project.anat_config_to_load, + ) + print( + " .. INFO: Anatomical config file: %s" + % loaded_project.anat_config_file + ) + + loaded_project.subject = get_anat_process_detail_json( + loaded_project, "Global", "subject" + ) + loaded_project.subject_sessions = [ + "ses-%s" % s + for s in bids_layout.get( + target="session", + return_type="id", + subject=loaded_project.subject.split("-")[1], + ) + ] + if len(loaded_project.subject_sessions) > 0: + print(" .. INFO: Dataset has session(s)") + loaded_project.subject_session = get_anat_process_detail_json( + loaded_project, "Global", "subject_session" + ) + print("Selected session : " + loaded_project.subject_session) + else: + loaded_project.subject_sessions = [""] + loaded_project.subject_session = "" + print(" .. INFO: Dataset has no session") + + loaded_project.parcellation_scheme = get_anat_process_detail_json( + loaded_project, "parcellation_stage", "parcellation_scheme" + ) + loaded_project.atlas_info = get_anat_process_detail_json( + loaded_project, "parcellation_stage", "atlas_info" + ) + loaded_project.freesurfer_subjects_dir = get_anat_process_detail_json( + loaded_project, "segmentation_stage", "freesurfer_subjects_dir" + ) + loaded_project.freesurfer_subject_id = get_anat_process_detail_json( + loaded_project, "segmentation_stage", "freesurfer_subject_id" + ) + + self.anat_pipeline = cmp.bidsappmanager.project.init_anat_project(loaded_project, False) + if self.anat_pipeline is not None: + anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) + if anat_inputs_checked: + cmp.bidsappmanager.project.update_anat_last_processed( + loaded_project, self.anat_pipeline + ) # Not required as the project is new, so no update should be done on processing status + ui_info.ui.context["object"].project_info = loaded_project + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_subject_anat_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_session_anat_pipeline, + "subject_session", + ) + ui_info.ui.context["object"].anat_pipeline = self.anat_pipeline + ui_info.ui.context[ + "object" + ].anat_pipeline.number_of_cores = ui_info.ui.context[ + "object" + ].project_info.number_of_cores + self.anat_inputs_checked = anat_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.t1_available = self.anat_inputs_checked + anat_save_config( + self.anat_pipeline, + ui_info.ui.context["object"].project_info.anat_config_file, + ) + self.project_loaded = True + self.anat_outputs_checked, _ = self.anat_pipeline.check_output() + if self.anat_outputs_checked: + print(" .. INFO: Available outputs") + + loaded_project.dmri_available_config = [] + + subjid = loaded_project.subject.split("-")[1] + subj_sessions = bids_layout.get( + target="session", return_type="id", subject=subjid + ) + + if len(subj_sessions) > 0: + for subj_session in subj_sessions: + config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "%s_ses-%s_diffusion_config.json" + % (loaded_project.subject, subj_session), + ) + if ( + os.path.isfile(config_file) + and subj_session == loaded_project.subject_session.split("-")[1] + ): + loaded_project.dmri_available_config.append( + "%s_ses-%s" % (loaded_project.subject, subj_session) + ) + else: + config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "sub-%s_diffusion_config.json" % loaded_project.subject, + ) + if os.path.isfile(config_file): + loaded_project.dmri_available_config.append( + "%s" % loaded_project.subject + ) + + if len(loaded_project.dmri_available_config) > 1: + loaded_project.dmri_available_config.sort() + loaded_project.dmri_config_to_load = ( + loaded_project.dmri_available_config[0] + ) + dmri_config_selected = loaded_project.configure_traits( + view="dmri_select_config_to_load" + ) + if not dmri_config_selected: + return 0 + elif not loaded_project.dmri_available_config: + loaded_project.dmri_config_to_load = ( + "%s_diffusion" % loaded_project.subject + ) + else: + loaded_project.dmri_config_to_load = ( + loaded_project.dmri_available_config[0] + ) + + print( + " .. INFO: Diffusion config to load: %s" + % loaded_project.dmri_config_to_load + ) + loaded_project.dmri_config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "%s_diffusion_config.json" % loaded_project.dmri_config_to_load, + ) + print( + " .. INFO: Diffusion config file: %s" % loaded_project.dmri_config_file + ) + + if os.path.isfile(loaded_project.dmri_config_file): + print(" .. INFO: Load existing diffusion config file") + loaded_project.process_type = get_dmri_process_detail_json( + loaded_project, "Global", "process_type" + ) + loaded_project.diffusion_imaging_model = get_dmri_process_detail_json( + loaded_project, "Global", "diffusion_imaging_model" + ) + + dmri_inputs_checked, self.dmri_pipeline = cmp.bidsappmanager.project.init_dmri_project( + loaded_project, bids_layout, False + ) + if self.dmri_pipeline is not None: + if dmri_inputs_checked: + cmp.bidsappmanager.project.update_dmri_last_processed(loaded_project, self.dmri_pipeline) + ui_info.ui.context["object"].project_info = loaded_project + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_subject_dmri_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_session_dmri_pipeline, + "subject_session", + ) + self.dmri_pipeline.parcellation_scheme = ( + loaded_project.parcellation_scheme + ) + self.dmri_pipeline.atlas_info = loaded_project.atlas_info + ui_info.ui.context["object"].dmri_pipeline = self.dmri_pipeline + ui_info.ui.context[ + "object" + ].dmri_pipeline.number_of_cores = ui_info.ui.context[ + "object" + ].project_info.number_of_cores + self.dmri_inputs_checked = dmri_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.dmri_available = self.dmri_inputs_checked + dmri_save_config( + self.dmri_pipeline, + ui_info.ui.context["object"].project_info.dmri_config_file, + ) + self.project_loaded = True + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_diffusion_imaging_model, + "diffusion_imaging_model", + ) + else: + dmri_inputs_checked, self.dmri_pipeline = cmp.bidsappmanager.project.init_dmri_project( + loaded_project, bids_layout, True + ) + print_warning( + " .. WARNING: No existing config for diffusion pipeline found - " + + "Created new diffusion pipeline with default parameters" + ) + if ( + self.dmri_pipeline is not None + ): # and self.dmri_pipeline is not None: + if dmri_inputs_checked: + ui_info.ui.context["object"].project_info = loaded_project + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_subject_dmri_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_session_dmri_pipeline, + "subject_session", + ) + self.dmri_pipeline.number_of_cores = ( + loaded_project.number_of_cores + ) + print( + " .. INFO: Number of cores (pipeline) = %s" + % self.dmri_pipeline.number_of_cores + ) + self.dmri_pipeline.parcellation_scheme = ( + loaded_project.parcellation_scheme + ) + self.dmri_pipeline.atlas_info = loaded_project.atlas_info + ui_info.ui.context["object"].dmri_pipeline = self.dmri_pipeline + dmri_save_config( + self.dmri_pipeline, + ui_info.ui.context["object"].project_info.dmri_config_file, + ) + self.dmri_inputs_checked = dmri_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.dmri_available = self.dmri_inputs_checked + self.project_loaded = True + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_diffusion_imaging_model, + "diffusion_imaging_model", + ) + + if len(subj_sessions) > 0: + for subj_session in subj_sessions: + config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "%s_ses-%s_fMRI_config.json" + % (loaded_project.subject, subj_session), + ) + if ( + os.path.isfile(config_file) + and subj_session == loaded_project.subject_session.split("-")[1] + ): + loaded_project.fmri_available_config.append( + "%s_ses-%s" % (loaded_project.subject, subj_session) + ) + else: + config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "sub-%s_fMRI_config.json" % loaded_project.subject, + ) + if os.path.isfile(config_file): + loaded_project.fmri_available_config.append( + "sub-%s" % loaded_project.subject + ) + + if len(loaded_project.fmri_available_config) > 1: + loaded_project.fmri_available_config.sort() + loaded_project.fmri_config_to_load = ( + loaded_project.fmri_available_config[0] + ) + fmri_config_selected = loaded_project.configure_traits( + view="fmri_select_config_to_load" + ) + if not fmri_config_selected: + return 0 + elif not loaded_project.fmri_available_config: + loaded_project.fmri_config_to_load = "%s_fMRI" % loaded_project.subject + else: + loaded_project.fmri_config_to_load = ( + loaded_project.fmri_available_config[0] + ) + + print( + " .. INFO: fMRI config to load: %s" + % loaded_project.fmri_config_to_load + ) + loaded_project.fmri_config_file = os.path.join( + loaded_project.base_directory, + "derivatives", + "%s_fMRI_config.json" % loaded_project.fmri_config_to_load, + ) + print(" .. INFO: fMRI config file: %s" % loaded_project.fmri_config_file) + + if os.path.isfile(loaded_project.fmri_config_file): + print(" .. INFO: Load existing fmri config file") + loaded_project.process_type = get_fmri_process_detail_json( + loaded_project, "Global", "process_type" + ) + + fmri_inputs_checked, self.fmri_pipeline = cmp.bidsappmanager.project.init_fmri_project( + loaded_project, bids_layout, False + ) + if self.fmri_pipeline is not None: + if fmri_inputs_checked: + cmp.bidsappmanager.project.update_fmri_last_processed(loaded_project, self.fmri_pipeline) + ui_info.ui.context["object"].project_info = loaded_project + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_subject_fmri_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_session_fmri_pipeline, + "subject_session", + ) + self.fmri_pipeline.parcellation_scheme = ( + loaded_project.parcellation_scheme + ) + self.fmri_pipeline.atlas_info = loaded_project.atlas_info + self.fmri_pipeline.subjects_dir = ( + loaded_project.freesurfer_subjects_dir + ) + self.fmri_pipeline.subject_id = ( + loaded_project.freesurfer_subject_id + ) + ui_info.ui.context["object"].fmri_pipeline = self.fmri_pipeline + ui_info.ui.context[ + "object" + ].fmri_pipeline.number_of_cores = ui_info.ui.context[ + "object" + ].project_info.number_of_cores + self.fmri_inputs_checked = fmri_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.fmri_available = self.fmri_inputs_checked + fmri_save_config( + self.fmri_pipeline, + ui_info.ui.context["object"].project_info.fmri_config_file, + ) + self.project_loaded = True + else: + fmri_inputs_checked, self.fmri_pipeline = cmp.bidsappmanager.project.init_fmri_project( + loaded_project, bids_layout, True + ) + print_warning( + " .. WARNING: No existing config for fMRI pipeline found - " + + "Created new fMRI pipeline with default parameters" + ) + if self.fmri_pipeline is not None: + if fmri_inputs_checked: + ui_info.ui.context["object"].project_info = loaded_project + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_subject_fmri_pipeline, + "subject", + ) + ui_info.ui.context["object"].project_info.on_trait_change( + ui_info.ui.context["object"].update_session_fmri_pipeline, + "subject_session", + ) + self.fmri_pipeline.number_of_cores = ( + loaded_project.number_of_cores + ) + print( + " .. INFO: Number of cores (pipeline) = %s" + % self.fmri_pipeline.number_of_cores + ) + self.fmri_pipeline.parcellation_scheme = ( + loaded_project.parcellation_scheme + ) + self.fmri_pipeline.atlas_info = loaded_project.atlas_info + self.fmri_pipeline.subjects_dir = ( + loaded_project.freesurfer_subjects_dir + ) + self.fmri_pipeline.subject_id = ( + loaded_project.freesurfer_subject_id + ) + ui_info.ui.context["object"].fmri_pipeline = self.fmri_pipeline + fmri_save_config( + self.fmri_pipeline, + ui_info.ui.context["object"].project_info.fmri_config_file, + ) + self.fmri_inputs_checked = fmri_inputs_checked + ui_info.ui.context[ + "object" + ].project_info.fmri_available = self.fmri_inputs_checked + self.project_loaded = True + + def update_subject_anat_pipeline(self, ui_info): + """Function that updates attributes of the :class:`AnatomicalPipelineUI` instance. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + ui_info.handler = self + + self.anat_pipeline.subject = ui_info.project_info.subject + self.anat_pipeline.global_conf.subject = ui_info.project_info.subject + + updated_project = ui_info.project_info + + bids_layout = BIDSLayout(updated_project.base_directory) + + if len(updated_project.subject_sessions) > 0: + self.anat_pipeline.global_conf.subject_session = ( + updated_project.subject_session + ) + self.anat_pipeline.subject_directory = os.path.join( + updated_project.base_directory, + updated_project.subject, + updated_project.subject_session, + ) + updated_project.anat_config_file = os.path.join( + updated_project.base_directory, + "derivatives", + "%s_%s_anatomical_config.json" + % (updated_project.subject, updated_project.subject_session), + ) + else: + self.anat_pipeline.global_conf.subject_session = "" + self.anat_pipeline.subject_directory = os.path.join( + updated_project.base_directory, updated_project.subject + ) + updated_project.anat_config_file = os.path.join( + updated_project.base_directory, + "derivatives", + "%s_anatomical_config.json" % updated_project.subject, + ) + + self.anat_pipeline.derivatives_directory = os.path.join( + updated_project.base_directory, "derivatives" + ) + + if os.path.isfile(updated_project.anat_config_file): + print( + " .. INFO: Existing anatomical config file for subject %s: %s" + % (updated_project.subject, updated_project.anat_config_file) + ) + + updated_project.parcellation_scheme = get_anat_process_detail_json( + updated_project, "parcellation_stage", "parcellation_scheme" + ) + updated_project.atlas_info = get_anat_process_detail_json( + updated_project, "parcellation_stage", "atlas_info" + ) + updated_project.freesurfer_subjects_dir = get_anat_process_detail_json( + updated_project, "segmentation_stage", "freesurfer_subjects_dir" + ) + updated_project.freesurfer_subject_id = get_anat_process_detail_json( + updated_project, "segmentation_stage", "freesurfer_subject_id" + ) + + self.anat_pipeline = cmp.bidsappmanager.project.init_anat_project(updated_project, False) + if self.anat_pipeline is not None: + anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) + if anat_inputs_checked: + cmp.bidsappmanager.project.update_anat_last_processed( + updated_project, self.anat_pipeline + ) # Not required as the project is new, so no update should be done on processing status + ui_info.project_info = updated_project + ui_info.project_info.on_trait_change( + ui_info.update_subject_anat_pipeline, "subject" + ) + ui_info.project_info.on_trait_change( + ui_info.update_session_anat_pipeline, "subject_session" + ) + ui_info.anat_pipeline = self.anat_pipeline + ui_info.anat_pipeline.number_of_cores = ( + ui_info.project_info.number_of_cores + ) + self.anat_inputs_checked = anat_inputs_checked + ui_info.project_info.t1_available = self.anat_inputs_checked + anat_save_config( + self.anat_pipeline, ui_info.project_info.anat_config_file + ) + self.project_loaded = True + self.anat_outputs_checked, msg = self.anat_pipeline.check_output() + if self.anat_outputs_checked: + print(" .. INFO: Available outputs") + + else: + print( + " .. INFO: Unprocessed anatomical data for subject %s" + % updated_project.subject + ) + self.anat_pipeline = cmp.bidsappmanager.project.init_anat_project(updated_project, True) + if self.anat_pipeline is not None: # and self.dmri_pipeline is not None: + anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) + if anat_inputs_checked: + ui_info.project_info = updated_project + ui_info.project_info.on_trait_change( + ui_info.update_subject_anat_pipeline, "subject" + ) + ui_info.project_info.on_trait_change( + ui_info.update_session_anat_pipeline, "subject_session" + ) + self.anat_pipeline.number_of_cores = updated_project.number_of_cores + ui_info.anat_pipeline = self.anat_pipeline + self.anat_inputs_checked = anat_inputs_checked + ui_info.project_info.t1_available = self.anat_inputs_checked + anat_save_config( + self.anat_pipeline, ui_info.project_info.anat_config_file + ) + self.project_loaded = True + + ui_info.project_info.parcellation_scheme = get_anat_process_detail_json( + updated_project, "parcellation_stage", "parcellation_scheme" + ) + ui_info.project_info.freesurfer_subjects_dir = get_anat_process_detail_json( + updated_project, "segmentation_stage", "freesurfer_subjects_dir" + ) + ui_info.project_info.freesurfer_subject_id = get_anat_process_detail_json( + updated_project, "segmentation_stage", "freesurfer_subject_id" + ) + + return ui_info + + def update_subject_dmri_pipeline(self, ui_info): + """Function that updates attributes of the :class:`DiffusionPipelineUI` instance. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + self.dmri_pipeline.subject = ui_info.project_info.subject + self.dmri_pipeline.global_conf.subject = ui_info.project_info.subject + + updated_project = ui_info.project_info + + bids_layout = BIDSLayout(updated_project.base_directory) + + if len(updated_project.subject_sessions) > 0: + self.dmri_pipeline.global_conf.subject_session = ( + updated_project.subject_session + ) + self.dmri_pipeline.subject_directory = os.path.join( + updated_project.base_directory, + updated_project.subject, + updated_project.subject_session, + ) + updated_project.dmri_config_file = os.path.join( + updated_project.base_directory, + "derivatives", + "%s_%s_diffusion_config.json" + % (updated_project.subject, updated_project.subject_session), + ) + else: + self.dmri_pipeline.global_conf.subject_session = "" + self.dmri_pipeline.subject_directory = os.path.join( + updated_project.base_directory, updated_project.subject + ) + updated_project.dmri_config_file = os.path.join( + updated_project.base_directory, + "derivatives", + "%s_diffusion_config.json" % updated_project.subject, + ) + + self.dmri_pipeline.derivatives_directory = os.path.join( + updated_project.base_directory, "derivatives" + ) + + if os.path.isfile(updated_project.dmri_config_file): + print(" .. INFO: Load existing diffusion config file") + updated_project.process_type = get_dmri_process_detail_json( + updated_project, "Global", "process_type" + ) + updated_project.diffusion_imaging_model = get_dmri_process_detail_json( + updated_project, "diffusion_stage", "diffusion_imaging_model" + ) + + dmri_inputs_checked, self.dmri_pipeline = cmp.bidsappmanager.project.init_dmri_project( + updated_project, bids_layout, False + ) + if self.dmri_pipeline is not None: # and self.dmri_pipeline is not None: + if dmri_inputs_checked: + cmp.bidsappmanager.project.update_dmri_last_processed(updated_project, self.dmri_pipeline) + ui_info.project_info = updated_project + ui_info.project_info.on_trait_change( + ui_info.update_subject_dmri_pipeline, "subject" + ) + ui_info.project_info.on_trait_change( + ui_info.update_session_dmri_pipeline, "subject_session" + ) + self.dmri_pipeline.parcellation_scheme = ( + updated_project.parcellation_scheme + ) + self.dmri_pipeline.atlas_info = updated_project.atlas_info + ui_info.dmri_pipeline = self.dmri_pipeline + ui_info.dmri_pipeline.number_of_cores = ( + ui_info.project_info.number_of_cores + ) + self.dmri_inputs_checked = dmri_inputs_checked + ui_info.project_info.dmri_available = self.dmri_inputs_checked + dmri_save_config( + self.dmri_pipeline, ui_info.project_info.dmri_config_file + ) + self.project_loaded = True + ui_info.project_info.on_trait_change( + ui_info.update_diffusion_imaging_model, + "diffusion_imaging_model", + ) + else: + dmri_inputs_checked, self.dmri_pipeline = cmp.bidsappmanager.project.init_dmri_project( + updated_project, bids_layout, True + ) + print_warning( + " .. WARNING: No existing config for diffusion pipeline found - " + + "Created new diffusion pipeline with default parameters" + ) + if self.dmri_pipeline is not None: # and self.dmri_pipeline is not None: + if dmri_inputs_checked: + ui_info.project_info = updated_project + ui_info.project_info.on_trait_change( + ui_info.update_subject_dmri_pipeline, "subject" + ) + ui_info.project_info.on_trait_change( + ui_info.update_session_dmri_pipeline, "subject_session" + ) + self.dmri_pipeline.number_of_cores = updated_project.number_of_cores + print( + " .. INFO: Number of cores (pipeline) = %s" + % self.dmri_pipeline.number_of_cores + ) + self.dmri_pipeline.parcellation_scheme = ( + updated_project.parcellation_scheme + ) + self.dmri_pipeline.atlas_info = updated_project.atlas_info + ui_info.dmri_pipeline = self.dmri_pipeline + dmri_save_config( + self.dmri_pipeline, ui_info.project_info.dmri_config_file + ) + self.dmri_inputs_checked = dmri_inputs_checked + ui_info.project_info.dmri_available = self.dmri_inputs_checked + self.project_loaded = True + ui_info.project_info.on_trait_change( + ui_info.update_diffusion_imaging_model, + "diffusion_imaging_model", + ) + + return ui_info + + def update_subject_fmri_pipeline(self, ui_info): + """Function that updates attributes of the :class:`fMRIPipelineUI` instance. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + ui_info.handler = self + + self.fmri_pipeline.subject = ui_info.project_info.subject + self.fmri_pipeline.global_conf.subject = ui_info.project_info.subject + + updated_project = ui_info.project_info + + bids_layout = BIDSLayout(updated_project.base_directory) + + if len(updated_project.subject_sessions) > 0: + self.fmri_pipeline.global_conf.subject_session = ( + updated_project.subject_session + ) + self.fmri_pipeline.subject_directory = os.path.join( + updated_project.base_directory, + ui_info.project_info.subject, + updated_project.subject_session, + ) + updated_project.fmri_config_file = os.path.join( + updated_project.base_directory, + "derivatives", + "%s_%s_fMRI_config.json" + % (updated_project.subject, updated_project.subject_session), + ) + else: + self.fmri_pipeline.global_conf.subject_session = "" + self.fmri_pipeline.subject_directory = os.path.join( + updated_project.base_directory, ui_info.project_info.subject + ) + updated_project.fmri_config_file = os.path.join( + updated_project.base_directory, + "derivatives", + "%s_fMRI_config.json" % updated_project.subject, + ) + + self.fmri_pipeline.derivatives_directory = os.path.join( + updated_project.base_directory, "derivatives" + ) + + print( + " .. INFO: fMRI config file loaded/created : %s" + % updated_project.fmri_config_file + ) + + if os.path.isfile(updated_project.fmri_config_file): + print( + " .. INFO: Load existing fMRI config file for subject %s" + % updated_project.subject + ) + updated_project.process_type = get_fmri_process_detail_json( + updated_project, "Global", "process_type" + ) + + fmri_inputs_checked, self.fmri_pipeline = cmp.bidsappmanager.project.init_fmri_project( + updated_project, bids_layout, False + ) + if self.fmri_pipeline is not None: + if fmri_inputs_checked: + cmp.bidsappmanager.project.update_fmri_last_processed(updated_project, self.fmri_pipeline) + ui_info.project_info = updated_project + ui_info.project_info.on_trait_change( + ui_info.update_subject_fmri_pipeline, "subject" + ) + ui_info.project_info.on_trait_change( + ui_info.update_session_fmri_pipeline, "subject_session" + ) + self.fmri_pipeline.parcellation_scheme = ( + updated_project.parcellation_scheme + ) + self.fmri_pipeline.atlas_info = updated_project.atlas_info + self.fmri_pipeline.subjects_dir = ( + updated_project.freesurfer_subjects_dir + ) + self.fmri_pipeline.subject_id = ( + updated_project.freesurfer_subject_id + ) + ui_info.fmri_pipeline = self.fmri_pipeline + + ui_info.fmri_pipeline.number_of_cores = ( + ui_info.project_info.number_of_cores + ) + self.fmri_inputs_checked = fmri_inputs_checked + ui_info.project_info.fmri_available = self.fmri_inputs_checked + fmri_save_config( + self.fmri_pipeline, ui_info.project_info.fmri_config_file + ) + self.project_loaded = True + else: + fmri_inputs_checked, self.fmri_pipeline = cmp.bidsappmanager.project.init_fmri_project( + updated_project, bids_layout, True + ) + print_warning( + " .. WARNING: No existing config for fMRI pipeline found but available fMRI data - " + + "Created new fMRI pipeline with default parameters" + ) + if self.fmri_pipeline is not None: + if fmri_inputs_checked: + ui_info.project_info = updated_project + ui_info.project_info.on_trait_change( + ui_info.update_subject_fmri_pipeline, "subject" + ) + ui_info.project_info.on_trait_change( + ui_info.update_session_fmri_pipeline, "subject_session" + ) + self.fmri_pipeline.number_of_cores = updated_project.number_of_cores + print( + " .. INFO: Number of cores (pipeline) = %s" + % self.fmri_pipeline.number_of_cores + ) + self.fmri_pipeline.parcellation_scheme = ( + updated_project.parcellation_scheme + ) + self.fmri_pipeline.atlas_info = updated_project.atlas_info + self.fmri_pipeline.subjects_dir = ( + updated_project.freesurfer_subjects_dir + ) + self.fmri_pipeline.subject_id = ( + updated_project.freesurfer_subject_id + ) + ui_info.fmri_pipeline = self.fmri_pipeline + fmri_save_config( + self.fmri_pipeline, ui_info.project_info.fmri_config_file + ) + self.fmri_inputs_checked = fmri_inputs_checked + ui_info.project_info.fmri_available = self.fmri_inputs_checked + self.project_loaded = True + + return ui_info + + @classmethod + def show_bidsapp_window(ui_info): + """Function that shows the BIDS App Interface Window. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with this handler + """ + print("Show BIDS App interface") + ui_info.ui.context["object"].show_bidsapp_interface() + + @classmethod + def save_anat_config_file(self, ui_info): + """Function that saves the anatomical pipeline configuration file. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + print_blue("[Save anatomical pipeline configuration]") + dialog = FileDialog( + action="save as", + default_filename=os.path.join( + ui_info.ui.context["object"].project_info.base_directory, + "code", + "ref_anatomical_config.json", + ), + ) + dialog.open() + if dialog.return_code == OK: + anat_save_config( + ui_info.ui.context["object"].anat_pipeline, + ui_info.ui.context["object"].project_info.anat_config_file, + ) + if ( + dialog.path + != ui_info.ui.context["object"].project_info.anat_config_file + ): + shutil.copy( + ui_info.ui.context["object"].project_info.anat_config_file, + dialog.path, + ) + + def load_anat_config_file(self, ui_info): + """Function that loads the anatomical pipeline configuration file. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + dialog = FileDialog(action="open", wildcard="*anatomical_config.json") + dialog.open() + if dialog.return_code == OK: + if ( + dialog.path + != ui_info.ui.context["object"].project_info.anat_config_file + ): + shutil.copy( + dialog.path, + ui_info.ui.context["object"].project_info.anat_config_file, + ) + anat_load_config_json( + self.anat_pipeline, + ui_info.ui.context["object"].project_info.anat_config_file, + ) + # TODO: load_config (anat_ or dmri_ ?) + + @classmethod + def save_dmri_config_file(self, ui_info): + """Function that saves the diffusion pipeline configuration file. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + print_blue("[Save anatomical pipeline configuration]") + dialog = FileDialog( + action="save as", + default_filename=os.path.join( + ui_info.ui.context["object"].project_info.base_directory, + "code", + "ref_diffusion_config.json", + ), + ) + dialog.open() + if dialog.return_code == OK: + dmri_save_config( + ui_info.ui.context["object"].dmri_pipeline, + ui_info.ui.context["object"].project_info.dmri_config_file, + ) + if ( + dialog.path + != ui_info.ui.context["object"].project_info.dmri_config_file + ): + shutil.copy( + ui_info.ui.context["object"].project_info.dmri_config_file, + dialog.path, + ) + + def load_dmri_config_file(self, ui_info): + """Function that loads the diffusion pipeline configuration file. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + dialog = FileDialog(action="open", wildcard="*diffusion_config.json") + dialog.open() + if dialog.return_code == OK: + if ( + dialog.path + != ui_info.ui.context["object"].project_info.dmri_config_file + ): + shutil.copy( + dialog.path, + ui_info.ui.context["object"].project_info.dmri_config_file, + ) + dmri_load_config_json( + self.dmri_pipeline, + ui_info.ui.context["object"].project_info.dmri_config_file, + ) + + @classmethod + def save_fmri_config_file(self, ui_info): + """Function that saves the fMRI pipeline configuration file. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + print_blue("[Save anatomical pipeline configuration]") + dialog = FileDialog( + action="save as", + default_filename=os.path.join( + ui_info.ui.context["object"].project_info.base_directory, + "code", + "ref_fMRI_config.json", + ), + ) + dialog.open() + if dialog.return_code == OK: + fmri_save_config( + ui_info.ui.context["object"].fmri_pipeline, + ui_info.ui.context["object"].project_info.fmri_config_file, + ) + if ( + dialog.path + != ui_info.ui.context["object"].project_info.fmri_config_file + ): + shutil.copy( + ui_info.ui.context["object"].project_info.fmri_config_file, + dialog.path, + ) + + def load_fmri_config_file(self, ui_info): + """Function that loads the fMRI pipeline configuration file. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + dialog = FileDialog(action="open", wildcard="*diffusion_config.json") + dialog.open() + if dialog.return_code == OK: + if ( + dialog.path + != ui_info.ui.context["object"].project_info.fmri_config_file + ): + shutil.copy( + dialog.path, + ui_info.ui.context["object"].project_info.fmri_config_file, + ) + fmri_load_config_json( + self.fmri_pipeline, + ui_info.ui.context["object"].project_info.fmri_config_file, + ) + + +class MainWindowHandler(Handler): + """Event handler of the Configurator and Inspector (Quality Control) windows. + + Attributes + ---------- + project_loaded : traits.Bool + Indicate if project has been successfully loaded + (Default: False) + + anat_pipeline : Instance(HasTraits) + Instance of :class:`AnatomicalPipelineUI` class + + anat_inputs_checked : traits.Bool + Indicate if anatomical pipeline inputs are available + (Default: False) + + anat_outputs_checked : traits.Bool + Indicate if anatomical pipeline outputs are available + (Default: False) + + anatomical_processed : traits.Bool + Indicate if anatomical pipeline was run + (Default: False) + + dmri_pipeline : Instance(HasTraits) + Instance of :class:`DiffusionPipelineUI` class + + dmri_inputs_checked : traits.Bool + Indicate if diffusion pipeline inputs are available + (Default: False) + + dmri_processed : traits.Bool + Indicate if diffusion pipeline was run + (Default: False) + + fmri_pipeline : Instance(HasTraits) + Instance of :class:`fMRIPipelineUI` class + + fmri_inputs_checked : traits.Bool + Indicate if fMRI pipeline inputs are available + (Default: False) + + fmri_processed : traits.Bool + Indicate if fMRI pipeline was run + (Default: False) + """ + + project_loaded = Bool(False) + + anat_pipeline = Instance(HasTraits) + anat_inputs_checked = Bool(False) + anat_outputs_checked = Bool(False) + anatomical_processed = Bool(False) + + dmri_pipeline = Instance(HasTraits) + dmri_inputs_checked = Bool(False) + dmri_processed = Bool(False) + + fmri_pipeline = Instance(HasTraits) + fmri_inputs_checked = Bool(False) + fmri_processed = Bool(False) + + def load_dataset(self, ui_info, debug=False): + """Function that creates a new :class:`ProjectInfoUI` instance from an existing project. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + + debug : bool + If True, print more information for debugging + """ + loaded_project = cmp.bidsappmanager.project.ProjectInfoUI() + np_res = loaded_project.configure_traits(view="open_view") + loaded_project.output_directory = os.path.join( + loaded_project.base_directory, "derivatives" + ) + + if loaded_project.creation_mode == "Install Datalad BIDS dataset": + datalad_is_available = cmp.bidsappmanager.project.is_tool("datalad") + + if datalad_is_available: + print(">>> Datalad dataset installation...") + if loaded_project.install_datalad_dataset_via_ssh: + if loaded_project.ssh_pwd != "": + os.environ["REMOTEUSERPWD"] = loaded_project.ssh_pwd + cmd = 'datalad install -D "Dataset {} (remote:{}) installed on {}" -s ssh://{}:$REMOTEUSERPWD@{}:{} {}'.format( + loaded_project.datalad_dataset_path, + loaded_project.ssh_remote, + loaded_project.base_directory, + loaded_project.ssh_user, + loaded_project.ssh_remote, + loaded_project.datalad_dataset_path, + loaded_project.base_directory, + ) + else: + cmd = 'datalad install -D "Dataset {} (remote:{}) installed on {}" -s ssh://{}@{}:{} {}'.format( + loaded_project.datalad_dataset_path, + loaded_project.ssh_remote, + loaded_project.base_directory, + loaded_project.ssh_user, + loaded_project.ssh_remote, + loaded_project.datalad_dataset_path, + loaded_project.base_directory, + ) + try: + print_blue("... cmd: {}".format(cmd)) + run( + cmd, + env={}, + cwd=os.path.abspath(loaded_project.base_directory), + ) + del os.environ["REMOTEUSERPWD"] + except Exception: + print(" ERROR: Failed to install datalad dataset via ssh") + del os.environ["REMOTEUSERPWD"] + else: + cmd = 'datalad install -D "Dataset {} installed on {}" -s {} {}'.format( + loaded_project.datalad_dataset_path, + loaded_project.base_directory, + loaded_project.datalad_dataset_path, + loaded_project.base_directory, + ) + try: + print_blue("... cmd: {}".format(cmd)) + run( + cmd, + env={}, + cwd=os.path.abspath(loaded_project.base_directory), + ) + except Exception: + print(" ERROR: Failed to install datalad dataset via ssh") + else: + print(" ERROR: Datalad is not installed!") + + # Install dataset via datalad + # datalad install -s ssh://user@IP_ADDRESS:/remote/path/to/ds-example /local/path/to/ds-example + # + + t1_available = False + t2_available = False + diffusion_available = False + fmri_available = False + + # print("Local BIDS dataset: %s" % loaded_project.base_directory) + if np_res: + try: + bids_layout = BIDSLayout(loaded_project.base_directory) + print(bids_layout) + + loaded_project.bids_layout = bids_layout + + loaded_project.subjects = [] + for subj in bids_layout.get_subjects(): + if debug: + print("sub: %s" % subj) + if "sub-" + str(subj) not in loaded_project.subjects: + loaded_project.subjects.append("sub-" + str(subj)) + # loaded_project.subjects = ['sub-'+str(subj) for subj in bids_layout.get_subjects()] + loaded_project.subjects.sort() + + if debug: + print("Available subjects : ") + print(loaded_project.subjects) + loaded_project.number_of_subjects = len(loaded_project.subjects) + + loaded_project.subject = loaded_project.subjects[0] + if debug: + print(loaded_project.subject) + + subject = loaded_project.subject.split("-")[1] + + sessions = bids_layout.get( + target="session", return_type="id", subject=subject + ) + + if debug: + print("Sessions: ") + print(sessions) + + if len(sessions) > 0: + loaded_project.subject_sessions = ["ses-{}".format(sessions[0])] + loaded_project.subject_session = "ses-{}".format(sessions[0]) + else: + loaded_project.subject_sessions = [""] + loaded_project.subject_session = "" + + if len(sessions) > 0: + print( + f" ... Check for available input modalities for subject {subject} of session {sessions[0]}..." + ) + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, + session=sessions[0], + suffix="bold", + extensions=["nii", "nii.gz"], + ) + ] + if len(query_files) > 0: + print(" * Available BOLD(s): {}".format(query_files)) + fmri_available = True + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, + session=sessions[0], + suffix="T1w", + extensions=["nii", "nii.gz"], + ) + ] + if len(query_files) > 0: + print(" * Available T1w(s): {}".format(query_files)) + t1_available = True + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, + session=sessions[0], + suffix="T2w", + extensions=["nii", "nii.gz"], + ) + ] + if len(query_files) > 0: + print(" * Available T2w(s): {}".format(query_files)) + t2_available = True + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, + session=sessions[0], + suffix="dwi", + extensions=["nii", "nii.gz"], + ) + ] + if len(query_files) > 0: + print(" * Available DWI(s): {}".format(query_files)) + diffusion_available = True + + else: + print( + f" ... Check for available input modalities for subject {subject}..." + ) + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, suffix="T1w", extensions=["nii", "nii.gz"] + ) + ] + if len(query_files) > 0: + print(" * Available T1w(s): {}".format(query_files)) + t1_available = True + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, suffix="T2w", extensions=["nii", "nii.gz"] + ) + ] + if len(query_files) > 0: + print(" * Available T2w(s): {}".format(query_files)) + t2_available = True + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, suffix="dwi", extensions=["nii", "nii.gz"] + ) + ] + if len(query_files) > 0: + print(" * Available DWI(s): {}".format(query_files)) + diffusion_available = True + + query_files = [ + f.filename + for f in bids_layout.get( + subject=subject, suffix="bold", extensions=["nii", "nii.gz"] + ) + ] + if len(query_files) > 0: + print(" * Available BOLD(s): {}".format(query_files)) + fmri_available = True + except ValueError as e: + msg = str(e) + error(message=msg, title="BIDS error") + except Exception: + error(message="Invalid BIDS dataset. Please see documentation for more details.", + title="BIDS error") + return + + ui_info.ui.context["object"].project_info = loaded_project + + anat_inputs_checked = False + if t1_available: + anat_inputs_checked = True + + dmri_inputs_checked = False + if t1_available and diffusion_available: + dmri_inputs_checked = True + + if t2_available and debug: + print("T2 available") + + fmri_inputs_checked = False + if t1_available and fmri_available: + fmri_inputs_checked = True + if debug: + print("fmri input check : {}".format(fmri_inputs_checked)) + + self.anat_inputs_checked = anat_inputs_checked + self.dmri_inputs_checked = dmri_inputs_checked + self.fmri_inputs_checked = fmri_inputs_checked + + if anat_inputs_checked: + + self.anat_pipeline = anatomical_pipeline.AnatomicalPipelineUI( + loaded_project + ) + self.anat_pipeline.number_of_cores = loaded_project.number_of_cores + + code_directory = os.path.join(loaded_project.base_directory, "code") + + anat_config_file = os.path.join( + code_directory, "ref_anatomical_config.json" + ) + + # Check for old configuration file with INI format + # when there is no existing json configuration file + # and convert it to JSON format if so + if not os.path.isfile(anat_config_file): + anat_config_ini_file = os.path.join( + code_directory, "ref_anatomical_config.ini" + ) + if os.path.isfile(anat_config_ini_file): + anat_config_file = convert_config_ini_2_json( + anat_config_ini_file + ) + + loaded_project.anat_config_file = anat_config_file + + if self.anat_pipeline is not None and not os.path.isfile( + anat_config_file + ): + if not os.path.exists(code_directory): + try: + os.makedirs(code_directory) + except os.error: + print_warning("%s was already existing" % code_directory) + finally: + print("Created directory %s" % code_directory) + + print(">> Create new reference anatomical config file...") + anat_save_config( + self.anat_pipeline, loaded_project.anat_config_file + ) + else: + print(">> Load reference anatomical config file...") + # if datalad_is_available: + # print('... Datalad get anatomical config file : {}'.format(loaded_project.anat_config_file)) + # cmd = 'datalad run -m "Get reference anatomical config file" bash -c "datalad get code/ref_anatomical_config.json"' + # try: + # print('... cmd: {}'.format(cmd)) + # core.run( cmd, env={}, cwd=os.path.abspath(loaded_project.base_directory)) + # except Exception: + # print(" ERROR: Failed to get file") + + anat_load_config_json( + self.anat_pipeline, loaded_project.anat_config_file + ) + + self.anat_pipeline.config_file = loaded_project.anat_config_file + + ui_info.ui.context["object"].anat_pipeline = self.anat_pipeline + loaded_project.t1_available = self.anat_inputs_checked + + loaded_project.parcellation_scheme = self.anat_pipeline.stages[ + "Parcellation" + ].config.parcellation_scheme + loaded_project.freesurfer_subjects_dir = self.anat_pipeline.stages[ + "Segmentation" + ].config.freesurfer_subjects_dir + loaded_project.freesurfer_subject_id = self.anat_pipeline.stages[ + "Segmentation" + ].config.freesurfer_subject_id + + ui_info.ui.context["object"].project_info = loaded_project + + self.project_loaded = True + + if dmri_inputs_checked: + self.dmri_pipeline = diffusion_pipeline.DiffusionPipelineUI( + loaded_project + ) + self.dmri_pipeline.number_of_cores = loaded_project.number_of_cores + self.dmri_pipeline.parcellation_scheme = ui_info.ui.context[ + "object" + ].project_info.parcellation_scheme + + code_directory = os.path.join(loaded_project.base_directory, "code") + dmri_config_file = os.path.join( + code_directory, "ref_diffusion_config.json" + ) + + # Check for old configuration file with INI format + # when there is no existing json configuration file + # and convert it to JSON format if so + if not os.path.isfile(dmri_config_file): + dmri_config_ini_file = os.path.join( + code_directory, "ref_diffusion_config.ini" + ) + if os.path.isfile(dmri_config_ini_file): + dmri_config_file = convert_config_ini_2_json( + dmri_config_ini_file + ) + + loaded_project.dmri_config_file = dmri_config_file + self.dmri_pipeline.config_file = dmri_config_file + + if ( + not os.path.isfile(dmri_config_file) + and self.dmri_pipeline is not None + ): + + # Look for diffusion acquisition model information from filename (acq-*) + if loaded_project.subject_session != "": + session = loaded_project.subject_session.split("-")[1] + diffusion_imaging_models = [ + i + for i in bids_layout.get( + subject=subject, + session=session, + suffix="dwi", + target="acquisition", + return_type="id", + extensions=["nii", "nii.gz"], + ) + ] + if debug: + print( + "DIFFUSION IMAGING MODELS : {}".format( + diffusion_imaging_models + ) + ) + + if len(diffusion_imaging_models) > 0: + if len(diffusion_imaging_models) > 1: + loaded_project.dmri_bids_acqs = ( + diffusion_imaging_models + ) + loaded_project.configure_traits( + view="dmri_bids_acq_view" + ) + else: + loaded_project.dmri_bids_acqs = [ + "{}".format(diffusion_imaging_models[0]) + ] + loaded_project.dmri_bids_acq = ( + diffusion_imaging_models[0] + ) + + if ("dsi" in loaded_project.dmri_bids_acq) or ( + "DSI" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = "DSI" + elif ("dti" in loaded_project.dmri_bids_acq) or ( + "DTI" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = "DTI" + elif ("hardi" in loaded_project.dmri_bids_acq) or ( + "HARDI" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = "HARDI" + elif ("multishell" in loaded_project.dmri_bids_acq) or ( + "MULTISHELL" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = ( + "multishell" + ) + else: + loaded_project.diffusion_imaging_model = "DTI" + else: + loaded_project.dmri_bids_acqs = [""] + loaded_project.dmri_bids_acq = "" + loaded_project.configure_traits( + view="diffusion_imaging_model_select_view" + ) + + files = [ + f.filename + for f in bids_layout.get( + subject=subject, + session=session, + suffix="dwi", + extensions=["nii", "nii.gz"], + ) + ] + + if debug: + print("****************************************") + print(files) + print("****************************************") + + if loaded_project.dmri_bids_acq != "": + for file in files: + if loaded_project.dmri_bids_acq in file: + dwi_file = file + if debug: + print( + "Loaded DWI file: {}".format(dwi_file) + ) + break + else: + dwi_file = files[0] + else: + diffusion_imaging_models = [ + i + for i in bids_layout.get( + subject=subject, + suffix="dwi", + target="acquisition", + return_type="id", + extensions=["nii", "nii.gz"], + ) + ] + + if len(diffusion_imaging_models) > 0: + if len(diffusion_imaging_models) > 1: + loaded_project.dmri_bids_acqs = ( + diffusion_imaging_models + ) + loaded_project.configure_traits( + view="dmri_bids_acq_view" + ) + else: + loaded_project.dmri_bids_acq = ( + diffusion_imaging_models[0] + ) + + if ("dsi" in loaded_project.dmri_bids_acq) or ( + "DSI" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = "DSI" + elif ("dti" in loaded_project.dmri_bids_acq) or ( + "DTI" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = "DTI" + elif ("hardi" in loaded_project.dmri_bids_acq) or ( + "HARDI" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = "HARDI" + elif ("multishell" in loaded_project.dmri_bids_acq) or ( + "MULTISHELL" in loaded_project.dmri_bids_acq + ): + loaded_project.diffusion_imaging_model = ( + "multishell" + ) + else: + loaded_project.diffusion_imaging_model = "DTI" + else: + loaded_project.dmri_bids_acqs = [""] + loaded_project.dmri_bids_acq = "" + loaded_project.configure_traits( + view="diffusion_imaging_model_select_view" + ) + + self.dmri_pipeline.diffusion_imaging_model = ( + loaded_project.diffusion_imaging_model + ) + self.dmri_pipeline.global_conf.diffusion_imaging_model = ( + loaded_project.diffusion_imaging_model + ) + self.dmri_pipeline.global_conf.dmri_bids_acq = ( + loaded_project.dmri_bids_acq + ) + self.dmri_pipeline.stages[ + "Diffusion" + ].diffusion_imaging_model = ( + loaded_project.diffusion_imaging_model + ) + print(">> Create new reference diffusion config file...") + dmri_save_config(self.dmri_pipeline, dmri_config_file) + else: + print(">> Load reference diffusion config file...") + + # if datalad_is_available: + # print('... Datalad get reference diffusion config file : {}'.format(loaded_project.anat_config_file)) + # cmd = 'datalad run -m "Get reference anatomical config file" bash -c "datalad get code/ref_diffusion_config.json"' + # try: + # print('... cmd: {}'.format(cmd)) + # core.run( cmd, env={}, cwd=os.path.abspath(loaded_project.base_directory)) + # except Exception: + # print(" ERROR: Failed to get file") + + dmri_load_config_json( + self.dmri_pipeline, loaded_project.dmri_config_file + ) + # TODO: check if diffusion imaging model (DSI/DTI/HARDI/multishell) is correct/valid. + + ui_info.ui.context["object"].dmri_pipeline = self.dmri_pipeline + loaded_project.dmri_available = self.dmri_inputs_checked + + ui_info.ui.context["object"].project_info = loaded_project + + self.project_loaded = True + + if fmri_inputs_checked: + self.fmri_pipeline = fMRI_pipeline.fMRIPipelineUI(loaded_project) + self.fmri_pipeline.number_of_cores = loaded_project.number_of_cores + self.fmri_pipeline.parcellation_scheme = ui_info.ui.context[ + "object" + ].project_info.parcellation_scheme + + self.fmri_pipeline.stages["Registration"].pipeline_mode = "fMRI" + self.fmri_pipeline.stages[ + "Registration" + ].registration_mode = "FSL (Linear)" + self.fmri_pipeline.stages[ + "Registration" + ].registration_mode_trait = ["FSL (Linear)", "BBregister (FS)"] + + code_directory = os.path.join(loaded_project.base_directory, "code") + fmri_config_file = os.path.join( + code_directory, "ref_fMRI_config.json" + ) + + # Check for old configuration file with INI format + # when there is no existing json configuration file + # and convert it to JSON format if so + if not os.path.isfile(fmri_config_file): + fmri_config_ini_file = os.path.join( + code_directory, "ref_fMRI_config.ini" + ) + if os.path.isfile(fmri_config_ini_file): + fmri_config_file = convert_config_ini_2_json( + fmri_config_ini_file + ) + + loaded_project.fmri_config_file = fmri_config_file + self.fmri_pipeline.config_file = fmri_config_file + + if ( + not os.path.isfile(fmri_config_file) + and self.fmri_pipeline is not None + ): + print(">> Create new reference fMRI config file...") + fmri_save_config(self.fmri_pipeline, fmri_config_file) + else: + print(">> Load reference fMRI config file...") + + # if datalad_is_available: + # print('... Datalad get reference fMRI config file : {}'.format(loaded_project.anat_config_file)) + # cmd = 'datalad run -m "Get reference fMRI config file" bash -c "datalad get code/ref_fMRI_config.json"' + # try: + # print('... cmd: {}'.format(cmd)) + # core.run( cmd, env={}, cwd=os.path.abspath(loaded_project.base_directory)) + # except Exception: + # print(" ERROR: Failed to get file") + + fmri_load_config_json( + self.fmri_pipeline, loaded_project.fmri_config_file + ) + + ui_info.ui.context["object"].fmri_pipeline = self.fmri_pipeline + loaded_project.fmri_available = self.fmri_inputs_checked + + ui_info.ui.context["object"].project_info = loaded_project + + self.project_loaded = True + + +class BIDSAppInterfaceWindowHandler(Handler): + """Event handler of the BIDS App Interface window. + + Attributes + ---------- + docker_process : subprocess.Popen + Instance of ``subprocess.Popen`` where BIDS App docker image is run + """ + + docker_process = Instance(Popen) + + def check_settings(self, ui_info): + """Function that checks if all parameters are properly set before execution of the BIDS App. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with ``self`` + """ + ui_info.ui.context["object"].settings_checked = True + ui_info.ui.context["object"].handler = self + + if os.path.isdir(ui_info.ui.context["object"].bids_root): + print( + "BIDS root directory : {}".format( + ui_info.ui.context["object"].bids_root + ) + ) + else: + print_error("Error: BIDS root invalid!") + ui_info.ui.context["object"].settings_checked = False + + if os.path.isfile(ui_info.ui.context["object"].anat_config): + print( + "Anatomical configuration file : {}".format( + ui_info.ui.context["object"].anat_config + ) + ) + else: + print_error( + "Error: Configuration file for anatomical pipeline not existing!" + ) + ui_info.ui.context["object"].settings_checked = False + + if os.path.isfile(ui_info.ui.context["object"].dmri_config): + print( + "Diffusion configuration file : {}".format( + ui_info.ui.context["object"].dmri_config + ) + ) + else: + print_warning( + "Warning: Configuration file for diffusion pipeline not existing!" + ) + + if os.path.isfile(ui_info.ui.context["object"].fmri_config): + print( + "fMRI configuration file : {}".format( + ui_info.ui.context["object"].fmri_config + ) + ) + else: + print_warning("Warning: Configuration file for fMRI pipeline not existing!") + + if os.path.isfile(ui_info.ui.context["object"].fs_license): + print( + "Freesurfer license : {}".format( + ui_info.ui.context["object"].fs_license + ) + ) + else: + print_error( + "Error: Invalid Freesurfer license ({})!".format( + ui_info.ui.context["object"].fs_license + ) + ) + ui_info.ui.context["object"].settings_checked = False + + msg = f'Valid inputs for BIDS App : {ui_info.ui.context["object"].settings_checked}' + if ui_info.ui.context["object"].settings_checked: + print(msg) + else: + print_error(msg) + + print("Docker running ? {}".format(ui_info.ui.context["object"].docker_running)) + return True + + @classmethod + def start_bidsapp_process(ui_info, participant_label): + """Function that runs the BIDS App on a single subject. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with this handler + participant_label : string + Label of the participant / subject (e.g. ``"01"``, no "sub-" prefix) + """ + cmd = [ + "docker", + "run", + "-it", + "--rm", + "-v", + "{}:/bids_dataset".format(ui_info.ui.context["object"].bids_root), + "-v", + "{}/derivatives:/outputs".format(ui_info.ui.context["object"].bids_root), + # '-v', '{}:/bids_dataset/derivatives/freesurfer/fsaverage'.format(ui_info.ui.context["object"].fs_average), + "-v", + "{}:/opt/freesurfer/license.txt".format( + ui_info.ui.context["object"].fs_license + ), + "-v", + "{}:/code/ref_anatomical_config.json".format( + ui_info.ui.context["object"].anat_config + ), + ] + + if ui_info.ui.context["object"].run_dmri_pipeline: + cmd.append("-v") + cmd.append( + "{}:/code/ref_diffusion_config.json".format( + ui_info.ui.context["object"].dmri_config + ) + ) + + if ui_info.ui.context["object"].run_fmri_pipeline: + cmd.append("-v") + cmd.append( + "{}:/code/ref_fMRI_config.json".format( + ui_info.ui.context["object"].fmri_config + ) + ) + + cmd.append("-u") + cmd.append("{}:{}".format(os.geteuid(), os.getegid())) + + cmd.append("sebastientourbier/connectomemapper-bidsapp:latest") + cmd.append("/bids_dataset") + cmd.append("/outputs") + cmd.append("participant") + + cmd.append("--participant_label") + cmd.append("{}".format(participant_label)) + + cmd.append("--anat_pipeline_config") + cmd.append("/code/ref_anatomical_config.json") + + if ui_info.ui.context["object"].run_dmri_pipeline: + cmd.append("--dwi_pipeline_config") + cmd.append("/code/ref_diffusion_config.json") + + if ui_info.ui.context["object"].run_fmri_pipeline: + cmd.append("--func_pipeline_config") + cmd.append("/code/ref_fMRI_config.json") + + print_blue(" ".join(cmd)) + + log_filename = os.path.join( + ui_info.ui.context["object"].bids_root, + "derivatives/cmp", + "sub-{}_log-cmpbidsapp.txt".format(participant_label), + ) + + with open(log_filename, "w+") as log: + proc = Popen(cmd, stdout=log, stderr=log) + + return proc + + @classmethod + def manage_bidsapp_procs(self, proclist): + """Function that managed the parallelized BIDS App Popen process. + + Parameters + ---------- + proclist + List of ``Popen`` processes running the BIDS App on a single subject + """ + for proc in proclist: + if proc.poll() is not None: + proclist.remove(proc) + + def start_bids_app(self, ui_info): + """Main function that runs the BIDS App on a set or sub-set of participants. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with this handler + """ + print("[Start BIDS App]") + + maxprocs = multiprocessing.cpu_count() + processes = [] + + ui_info.ui.context["object"].docker_running = True + + for label in ui_info.ui.context["object"].list_of_subjects_to_be_processed: + while len(processes) == maxprocs: + self.manage_bidsapp_procs(processes) + + proc = self.start_bidsapp_process(ui_info, label=label) + processes.append(proc) + + while len(processes) > 0: + self.manage_bidsapp_procs(processes) + + print("Processing with BIDS App Finished") + + ui_info.ui.context["object"].docker_running = False + + return True + + @classmethod + def stop_bids_app(ui_info): + """Function that stops the BIDS execution. + + Parameters + ---------- + ui_info : QtView + TraitsUI QtView associated with this handler + """ + print("Stop BIDS App") + # self.docker_process.kill() + ui_info.ui.context["object"].docker_running = False + return True \ No newline at end of file diff --git a/cmp/bidsappmanager/gui/principal.py b/cmp/bidsappmanager/gui/principal.py new file mode 100644 index 000000000..d45f3cc89 --- /dev/null +++ b/cmp/bidsappmanager/gui/principal.py @@ -0,0 +1,293 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Connectome Mapper Main Window.""" + +# General imports +import os +import pkg_resources + +from pyface.api import ImageResource +from traitsui.qt4.extra.qt_view import QtView +from traitsui.api import * +from traits.api import * + +from bids import BIDSLayout + +# Own imports +import cmp.project +from cmp.info import __version__ + +from cmtklib.util import ( + return_button_style_sheet, print_blue +) + +import cmp.bidsappmanager.gui.config +import cmp.bidsappmanager.gui.bidsapp +import cmp.bidsappmanager.gui.qc +import cmp.bidsappmanager.gui.handlers +from cmp.bidsappmanager.gui.globals import ( + style_sheet, get_icon +) + + +class MainWindow(HasTraits): + """Class that defines the Main window of the Connectome Mapper 3 GUI. + + Attributes + ---------- + project_info : cmp.bidsappmanager.project.ProjectInfoUI + Instance of :class:`CMP_Project_InfoUI` that represents the processing project + + anat_pipeline : Instance(HasTraits) + Instance of anatomical MRI pipeline UI + + dmri_pipeline : Instance(HasTraits) + Instance of diffusion MRI pipeline UI + + fmri_pipeline : Instance(HasTraits) + Instance of functional MRI pipeline UI + + bidsapp_ui : cmp.project.ProjectInfo + Instance of :class:`BIDSAppInterfaceWindow` + + load_dataset : traits.ui.Action + TraitsUI Action to load a BIDS dataset + + bidsapp : traits.ui.Button + Button that displays the BIDS App Interface window + + configurator : traits.ui.Button + Button thats displays the pipeline Configurator window + + quality_control : traits.ui.Button + Button that displays the pipeline Quality Control / Inspector window + + manager_group : traits.ui.View + TraitsUI View that describes the content of the main window + + traits_view : QtView + TraitsUI QtView that includes ``manager_group`` and parameterize + the window with menu + """ + project_info = Instance(cmp.project.ProjectInfo) + + anat_pipeline = Instance(HasTraits) + dmri_pipeline = Instance(HasTraits) + fmri_pipeline = Instance(HasTraits) + + # configurator_ui = Instance(CMP_PipelineConfigurationWindow) + bidsapp_ui = Instance(cmp.bidsappmanager.gui.bidsapp.BIDSAppInterfaceWindow) + # quality_control_ui = Instance(CMP_QualityControlWindow) + + load_dataset = Action(name="Load BIDS Dataset...", action="load_dataset") + + project_info.style_sheet = style_sheet + + configurator = Button("") + bidsapp = Button("") + quality_control = Button("") + + view_mode = 1 + + manager_group = VGroup( + spring, + HGroup( + spring, + HGroup( + Item( + "configurator", + style="custom", + width=200, + height=200, + resizable=False, + label="", + show_label=False, + style_sheet=return_button_style_sheet( + ImageResource( + pkg_resources.resource_filename( + "cmp", + os.path.join( + "bidsappmanager/images", "configurator_200x200.png" + ), + ) + ).absolute_path + ), + ), + show_labels=False, + label="", + ), + spring, + HGroup( + Item( + "bidsapp", + style="custom", + width=200, + height=200, + resizable=False, + style_sheet=return_button_style_sheet( + ImageResource( + pkg_resources.resource_filename( + "cmp", + os.path.join( + "bidsappmanager/images", "bidsapp_200x200.png" + ), + ) + ).absolute_path + ), + ), + show_labels=False, + label="", + ), + spring, + HGroup( + Item( + "quality_control", + style="custom", + width=200, + height=200, + resizable=False, + style_sheet=return_button_style_sheet( + ImageResource( + pkg_resources.resource_filename( + "cmp", + os.path.join( + "bidsappmanager/images", + "qualitycontrol_200x200.png", + ), + ) + ).absolute_path + ), + ), + show_labels=False, + label="", + ), + spring, + springy=True, + visible_when="handler.project_loaded==True", + ), + spring, + springy=True, + ) + + traits_view = QtView( + HGroup( + Include("manager_group"), + ), + title="Connectome Mapper {} - BIDS App Manager".format(__version__), + menubar=MenuBar( + Menu( + ActionGroup( + load_dataset, + ), + ActionGroup( + Action(name="Quit", action="_on_close"), + ), + name="File", + ), + ), + handler=cmp.bidsappmanager.gui.handlers.MainWindowHandler(), + style_sheet=style_sheet, + width=0.5, + height=0.8, + resizable=True, # , scrollable=True , resizable=True + icon=get_icon("cmp.png"), + ) + + def _bidsapp_fired(self): + """ Callback of the "bidsapp" button. This displays the BIDS App Interface window.""" + print_blue("[Open BIDS App Window]") + bids_layout = BIDSLayout(self.project_info.base_directory) + subjects = bids_layout.get_subjects() + + anat_config = os.path.join( + self.project_info.base_directory, "code/", "ref_anatomical_config.json" + ) + dmri_config = os.path.join( + self.project_info.base_directory, "code/", "ref_diffusion_config.json" + ) + fmri_config = os.path.join( + self.project_info.base_directory, "code/", "ref_fMRI_config.json" + ) + + self.bidsapp_ui = cmp.bidsappmanager.gui.bidsapp.BIDSAppInterfaceWindow( + project_info=self.project_info, + bids_root=self.project_info.base_directory, + subjects=subjects, + list_of_subjects_to_be_processed=subjects, + # anat_config=self.project_info.anat_config_file, + # dmri_config=self.project_info.dmri_config_file, + # fmri_config=self.project_info.fmri_config_file + anat_config=anat_config, + dmri_config=dmri_config, + fmri_config=fmri_config, + ) + self.bidsapp_ui.configure_traits() + + def _configurator_fired(self): + """Callback of the "configurator" button. This displays the Configurator Window.""" + print_blue("[Open Pipeline Configurator Window]") + if self.project_info.t1_available: + if os.path.isfile(self.project_info.anat_config_file): + print( + " .. Anatomical config file : %s" + % self.project_info.anat_config_file + ) + + if self.project_info.dmri_available: + if os.path.isfile(self.project_info.dmri_config_file): + print( + " .. Diffusion config file : %s" + % self.project_info.dmri_config_file + ) + + if self.project_info.fmri_available: + if os.path.isfile(self.project_info.fmri_config_file): + print(" .. fMRI config file : %s" % self.project_info.fmri_config_file) + + self.configurator_ui = cmp.bidsappmanager.gui.config.PipelineConfiguratorWindow( + project_info=self.project_info, + anat_pipeline=self.anat_pipeline, + dmri_pipeline=self.dmri_pipeline, + fmri_pipeline=self.fmri_pipeline, + anat_inputs_checked=self.project_info.t1_available, + dmri_inputs_checked=self.project_info.dmri_available, + fmri_inputs_checked=self.project_info.fmri_available, + ) + + self.configurator_ui.configure_traits() + + def _quality_control_fired(self): + """Callback of the "Inspector" button. This displays the Quality Control (Inspector) Window.""" + print_blue("[Open Quality Inspector Window]") + if self.project_info.t1_available: + if os.path.isfile(self.project_info.anat_config_file): + print( + " .. Anatomical config file : %s" + % self.project_info.anat_config_file + ) + + if self.project_info.dmri_available: + if os.path.isfile(self.project_info.dmri_config_file): + print( + " .. Diffusion config file : %s" + % self.project_info.dmri_config_file + ) + + if self.project_info.fmri_available: + if os.path.isfile(self.project_info.fmri_config_file): + print(" .. fMRI config file : %s" % self.project_info.fmri_config_file) + + try: + self.quality_control_ui = cmp.bidsappmanager.gui.qc.QualityInspectorWindow( + project_info=self.project_info, + anat_inputs_checked=self.project_info.t1_available, + dmri_inputs_checked=self.project_info.dmri_available, + fmri_inputs_checked=self.project_info.fmri_available, + ) + self.quality_control_ui.configure_traits() + except Exception as e: + print(e) diff --git a/cmp/bidsappmanager/gui/qc.py b/cmp/bidsappmanager/gui/qc.py new file mode 100644 index 000000000..83aab6fe2 --- /dev/null +++ b/cmp/bidsappmanager/gui/qc.py @@ -0,0 +1,470 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Connectome Mapper Output Quality Inspector Window.""" + +# General imports +import os + +from traitsui.qt4.extra.qt_view import QtView +from traitsui.api import * +from traits.api import * + +from bids import BIDSLayout + +# Own imports +import cmp.project + +from cmtklib.bids.io import ( + __cmp_directory__, __freesurfer_directory__ +) +from cmtklib.util import ( + BColors, + print_blue, + print_error, +) + +import cmp.bidsappmanager.project as project +import cmp.bidsappmanager.gui.handlers +from cmp.bidsappmanager.gui.globals import ( + style_sheet, get_icon +) + + +class QualityInspectorWindow(HasTraits): + """Class that defines the Quality Inspector Window. + + Attributes + ---------- + project_info : cmp.project.ProjectInfo + Instance of :class:`CMP_Project_Info` that represents the processing project + + anat_pipeline : Instance(HasTraits) + Instance of anatomical MRI pipeline + + dmri_pipeline : Instance(HasTraits) + Instance of diffusion MRI pipeline + + fmri_pipeline : Instance(HasTraits) + Instance of functional MRI pipeline + + anat_inputs_checked : traits.Bool + Indicates if inputs of anatomical pipeline are available + (Default: False) + + dmri_inputs_checked : traits.Bool + Indicates if inputs of diffusion pipeline are available + (Default: False) + + fmri_inputs_checked : traits.Bool + Indicates if inputs of functional pipeline are available + (Default: False) + + output_anat_available : traits.Bool + Indicates if outputs of anatomical pipeline are available + (Default: False) + + output_dmri_available : traits.Bool + Indicates if outputs of diffusion pipeline are available + (Default: False) + + output_fmri_available : traits.Bool + Indicates if outputs of functional pipeline are available + (Default: False) + + traits_view : QtView + TraitsUI QtView that describes the content of the window + """ + project_info = Instance(cmp.project.ProjectInfo) + + anat_pipeline = Instance(HasTraits) + dmri_pipeline = Instance(HasTraits) + fmri_pipeline = Instance(HasTraits) + + anat_inputs_checked = Bool(False) + dmri_inputs_checked = Bool(False) + fmri_inputs_checked = Bool(False) + + output_anat_available = Bool(False) + output_dmri_available = Bool(False) + output_fmri_available = Bool(False) + + traits_view = QtView( + Group( + # Group( + # # Include('dataset_view'),label='Data manager',springy=True + # Item('project_info',style='custom',show_label=False),label='Data manager',springy=True, dock='tab' + # ), + Group( + Item("anat_pipeline", style="custom", show_label=False), + visible_when="output_anat_available", + label="Anatomical pipeline", + dock="tab", + ), + Group( + Item( + "dmri_pipeline", + style="custom", + show_label=False, + visible_when="output_dmri_available", + ), + label="Diffusion pipeline", + dock="tab", + ), + Group( + Item( + "fmri_pipeline", + style="custom", + show_label=False, + visible_when="output_fmri_available", + ), + label="fMRI pipeline", + dock="tab", + ), + orientation="horizontal", + layout="tabbed", + springy=True, + enabled_when="output_anat_available", + ), + title="Connectome Mapper 3 Inspector", + menubar=MenuBar( + Menu( + ActionGroup( + Action(name="Quit", action="_on_close"), + ), + name="File", + ), + ), + handler=cmp.bidsappmanager.gui.handlers.ConfigQualityWindowHandler(), + style_sheet=style_sheet, + width=0.5, + height=0.8, + resizable=True, # scrollable=True, + icon=get_icon("qualitycontrol.png"), + ) + + error_msg = Str("") + error_view = View( + Group( + Item("error_msg", style="readonly", show_label=False), + ), + title="Error", + kind="modal", + # style_sheet=style_sheet, + buttons=["OK"], + ) + + def __init__( + self, + project_info=None, + anat_inputs_checked=False, + dmri_inputs_checked=False, + fmri_inputs_checked=False, + ): + """Constructor of an :class:``PipelineConfiguratorWindow`` instance. + + Parameters + ---------- + project_info : cmp.project.ProjectInfo + :class:`CMP_Project_Info` object (Default: None) + + anat_inputs_checked : traits.Bool + Boolean that indicates if anatomical pipeline inputs are available + (Default: False) + + dmri_inputs_checked = : traits.Bool + Boolean that indicates if diffusion pipeline inputs are available + (Default: False) + + fmri_inputs_checked : traits.Bool + Boolean that indicates if functional pipeline inputs are available + (Default: False) + """ + print("> Initialize window...") + self.project_info = project_info + + self.anat_inputs_checked = anat_inputs_checked + self.dmri_inputs_checked = dmri_inputs_checked + self.fmri_inputs_checked = fmri_inputs_checked + + aborded = self.select_subject() + + if aborded: + raise Exception( + BColors.FAIL + + " .. ABORDED: The quality control window will not be displayed." + + "Selection of subject/session was cancelled at initialization." + + BColors.ENDC + ) + + def select_subject(self): + """Function to select the subject and session for which to inspect outputs.""" + print("> Selection of subject (and session) for which to inspect outputs") + valid_selected_subject = False + select = True + aborded = False + + while not valid_selected_subject and not aborded: + + # Select subject from BIDS dataset + np_res = self.project_info.configure_traits(view="subject_view") + + if not np_res: + aborded = True + break + + print(" .. INFO: Selected subject: {}".format(self.project_info.subject)) + + # Select session if any + bids_layout = BIDSLayout(self.project_info.base_directory) + subject = self.project_info.subject.split("-")[1] + + sessions = bids_layout.get( + target="session", return_type="id", subject=subject + ) + + if len(sessions) > 0: + print(" .. INFO: Input dataset has sessions") + print(sessions) + + self.project_info.subject_sessions = [] + + for ses in sessions: + self.project_info.subject_sessions.append("ses-" + str(ses)) + + np_res = self.project_info.configure_traits(view="subject_session_view") + + if not np_res: + aborded = True + break + + self.project_info.anat_config_file = os.path.join( + self.project_info.base_directory, + "derivatives", + __cmp_directory__, + "{}".format(self.project_info.subject), + "{}".format(self.project_info.subject_session), + "{}_{}_anatomical_config.json".format( + self.project_info.subject, self.project_info.subject_session + ), + ) + if os.access(self.project_info.anat_config_file, os.F_OK): + print("> Initialize anatomical pipeline") + self.anat_pipeline = project.init_anat_project( + self.project_info, False + ) + else: + self.anat_pipeline = None + + if self.dmri_inputs_checked: + self.project_info.dmri_config_file = os.path.join( + self.project_info.base_directory, + "derivatives", + __cmp_directory__, + "{}".format(self.project_info.subject), + "{}".format(self.project_info.subject_session), + "{}_{}_diffusion_config.json".format( + self.project_info.subject, self.project_info.subject_session + ), + ) + if os.access(self.project_info.dmri_config_file, os.F_OK): + print("> Initialize diffusion pipeline") + ( + dmri_valid_inputs, + self.dmri_pipeline, + ) = project.init_dmri_project( + self.project_info, bids_layout, False + ) + else: + self.dmri_pipeline = None + + # self.dmri_pipeline.subject = self.project_info.subject + # self.dmri_pipeline.global_conf.subject = self.project_info.subject + + if self.fmri_inputs_checked: + self.project_info.fmri_config_file = os.path.join( + self.project_info.base_directory, + "derivatives", + __cmp_directory__, + "{}".format(self.project_info.subject), + "{}".format(self.project_info.subject_session), + "{}_{}_fMRI_config.json".format( + self.project_info.subject, self.project_info.subject_session + ), + ) + if os.access(self.project_info.fmri_config_file, os.F_OK): + print("> Initialize fMRI pipeline") + ( + fmri_valid_inputs, + self.fmri_pipeline, + ) = project.init_fmri_project( + self.project_info, bids_layout, False + ) + else: + self.fmri_pipeline = None + + # self.fmri_pipeline.subject = self.project_info.subject + # self.fmri_pipeline.global_conf.subject = self.project_info.subject + + # self.anat_pipeline.global_conf.subject_session = self.project_info.subject_session + + # if self.dmri_pipeline is not None: + # self.dmri_pipeline.global_conf.subject_session = self.project_info.subject_session + # + # if self.fmri_pipeline is not None: + # self.fmri_pipeline.global_conf.subject_session = self.project_info.subject_session + + print( + " .. INFO: Selected session %s" % self.project_info.subject_session + ) + if self.anat_pipeline is not None: + self.anat_pipeline.stages[ + "Segmentation" + ].config.freesurfer_subject_id = os.path.join( + self.project_info.base_directory, + "derivatives", + __freesurfer_directory__, + "{}_{}".format( + self.project_info.subject, self.project_info.subject_session + ), + ) + else: + print(" .. INFO: No session detected") + self.project_info.anat_config_file = os.path.join( + self.project_info.base_directory, + "derivatives", + __cmp_directory__, + "{}".format(self.project_info.subject), + "{}_anatomical_config.json".format(self.project_info.subject), + ) + if os.access(self.project_info.anat_config_file, os.F_OK): + self.anat_pipeline = project.init_anat_project( + self.project_info, False + ) + else: + self.anat_pipeline = None + + if self.dmri_inputs_checked: + self.project_info.dmri_config_file = os.path.join( + self.project_info.base_directory, + "derivatives", + __cmp_directory__, + "{}".format(self.project_info.subject), + "{}_diffusion_config.json".format(self.project_info.subject), + ) + if os.access(self.project_info.dmri_config_file, os.F_OK): + ( + dmri_valid_inputs, + self.dmri_pipeline, + ) = project.init_dmri_project( + self.project_info, bids_layout, False + ) + else: + self.dmri_pipeline = None + + # self.dmri_pipeline.subject = self.project_info.subject + # self.dmri_pipeline.global_conf.subject = self.project_info.subject + + if self.fmri_inputs_checked: + self.project_info.fmri_config_file = os.path.join( + self.project_info.base_directory, + "derivatives", + __cmp_directory__, + "{}".format(self.project_info.subject), + "{}_fMRI_config.json".format(self.project_info.subject), + ) + if os.access(self.project_info.fmri_config_file, os.F_OK): + ( + fmri_valid_inputs, + self.fmri_pipeline, + ) = project.init_fmri_project( + self.project_info, bids_layout, False + ) + else: + self.fmri_pipeline = None + + # self.fmri_pipeline.subject = self.project_info.subject + # self.fmri_pipeline.global_conf.subject = self.project_info.subject + + # self.anat_pipeline.global_conf.subject_session = '' + if self.anat_pipeline is not None: + self.anat_pipeline.stages[ + "Segmentation" + ].config.freesurfer_subjects_dir = os.path.join( + self.project_info.base_directory, + "derivatives", + __freesurfer_directory__, + "{}".format(self.project_info.subject), + ) + + if self.anat_pipeline is not None: + print("> Anatomical pipeline output inspection") + self.anat_pipeline.view_mode = "inspect_outputs_view" + for stage in list(self.anat_pipeline.stages.values()): + print(" ... Inspect stage {}".format(stage)) + stage.define_inspect_outputs() + # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) + if (len(stage.inspect_outputs) > 0) and ( + stage.inspect_outputs[0] != "Outputs not available" + ): + self.output_anat_available = True + + if self.dmri_pipeline is not None: + print("> Diffusion pipeline output inspection") + self.dmri_pipeline.view_mode = "inspect_outputs_view" + for stage in list(self.dmri_pipeline.stages.values()): + print(" ... Inspect stage {}".format(stage)) + stage.define_inspect_outputs() + # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) + if (len(stage.inspect_outputs) > 0) and ( + stage.inspect_outputs[0] != "Outputs not available" + ): + self.output_dmri_available = True + + if self.fmri_pipeline is not None: + print("> fMRI pipeline output inspection") + self.fmri_pipeline.view_mode = "inspect_outputs_view" + for stage in list(self.fmri_pipeline.stages.values()): + print(" ... Inspect stage {}".format(stage)) + stage.define_inspect_outputs() + # print('Stage {}: {}'.format(stage.stage_dir, stage.inspect_outputs)) + if (len(stage.inspect_outputs) > 0) and ( + stage.inspect_outputs[0] != "Outputs not available" + ): + self.output_fmri_available = True + + print_blue( + " .. Anatomical output(s) available : %s" % self.output_anat_available + ) + print_blue( + " .. Diffusion output(s) available : %s" % self.output_dmri_available + ) + print_blue( + " .. fMRI output(s) available : %s" % self.output_fmri_available + ) + + if ( + self.output_anat_available + or self.output_dmri_available + or self.output_fmri_available + ): + valid_selected_subject = True + else: + self.error_msg = ( + " .. ERROR: No output available! " + + "Please select another subject (and session if any)!" + ) + print_error(self.error_msg) + select = error( + message=self.error_msg, title="Error", buttons=["OK", "Cancel"] + ) + aborded = not select + + return aborded + + def update_diffusion_imaging_model(self, new): + """Function called when ``diffusion_imaging_model`` is updated.""" + self.dmri_pipeline.diffusion_imaging_model = new diff --git a/cmp/bidsappmanager/gui/traits.py b/cmp/bidsappmanager/gui/traits.py new file mode 100644 index 000000000..524e2fc5e --- /dev/null +++ b/cmp/bidsappmanager/gui/traits.py @@ -0,0 +1,33 @@ +# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors +# All rights reserved. +# +# This software is distributed under the open-source license Modified BSD. + +"""Module that defines traits-based classes for Connectome Mapper 3 BIDS App Interface TraitsUI View.""" + +from traits.api import Property +from traitsui.api import TabularAdapter + + +class MultiSelectAdapter(TabularAdapter): + """This adapter is used by left and right tables for selection of subject to be processed.""" + + # Titles and column names for each column of a table. + # In this example, each table has only one column. + columns = [("", "myvalue")] + width = 100 + + # Magically named trait which gives the display text of the column named + # 'myvalue'. This is done using a Traits Property and its getter: + myvalue_text = Property + + def _get_myvalue_text(self): + """The getter for Property 'myvalue_text'. + + It simply takes the value of the corresponding item in the list + being displayed in this table. A more complicated example could + format the item before displaying it. + """ + return f"sub-{self.item}" + diff --git a/cmp/bidsappmanager/pipelines/anatomical/anatomical.py b/cmp/bidsappmanager/pipelines/anatomical/anatomical.py index 3e3a5ea86..fc8f12f51 100644 --- a/cmp/bidsappmanager/pipelines/anatomical/anatomical.py +++ b/cmp/bidsappmanager/pipelines/anatomical/anatomical.py @@ -104,7 +104,7 @@ def __init__(self, project_info): Parameters ----------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo CMP_Project_Info object that stores general information such as the BIDS root and output directories (see :class_`cmp.project.CMP_Project_Info` for more details) diff --git a/cmp/bidsappmanager/pipelines/diffusion/diffusion.py b/cmp/bidsappmanager/pipelines/diffusion/diffusion.py index 60d473cc6..5ff957884 100644 --- a/cmp/bidsappmanager/pipelines/diffusion/diffusion.py +++ b/cmp/bidsappmanager/pipelines/diffusion/diffusion.py @@ -158,7 +158,7 @@ def __init__(self, project_info): Parameters ----------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo CMP_Project_Info object that stores general information such as the BIDS root and output directories (see :class_`cmp.project.CMP_Project_Info` for more details) diff --git a/cmp/bidsappmanager/pipelines/functional/fMRI.py b/cmp/bidsappmanager/pipelines/functional/fMRI.py index 3104054d5..8d59cb145 100644 --- a/cmp/bidsappmanager/pipelines/functional/fMRI.py +++ b/cmp/bidsappmanager/pipelines/functional/fMRI.py @@ -145,7 +145,7 @@ def __init__(self, project_info): Parameters ----------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo CMP_Project_Info object that stores general information such as the BIDS root and output directories (see :class_`cmp.project.CMP_Project_Info` for more details) diff --git a/cmp/bidsappmanager/project.py b/cmp/bidsappmanager/project.py index bc4494011..31991e3a4 100644 --- a/cmp/bidsappmanager/project.py +++ b/cmp/bidsappmanager/project.py @@ -11,22 +11,21 @@ import fnmatch import glob import shutil -import multiprocessing -from subprocess import Popen -from traitsui.api import * -from traits.api import * import warnings -from bids import BIDSLayout -from pyface.api import FileDialog, OK - # Own imports -from cmp.bidsappmanager import gui +from traits.trait_types import Enum, Bool, String, Password, Directory, List, Button +from traitsui.editors import EnumEditor +from traitsui.group import VGroup, HGroup, Group +from traitsui.include import Include +from traitsui.item import Item, spring +from traitsui.qt4.extra.qt_view import QtView +from traitsui.view import View + +import cmp.project + from cmtklib.bids.io import __cmp_directory__, __nipype_directory__, __freesurfer_directory__ -from cmp.bidsappmanager.pipelines.anatomical import anatomical as anatomical_pipeline -from cmp.bidsappmanager.pipelines.diffusion import diffusion as diffusion_pipeline -from cmp.bidsappmanager.pipelines.functional import fMRI as fMRI_pipeline from cmtklib.config import ( anat_load_config_json, anat_save_config, @@ -34,15 +33,15 @@ dmri_save_config, fmri_load_config_json, fmri_save_config, - get_anat_process_detail_json, - get_dmri_process_detail_json, - get_fmri_process_detail_json, - convert_config_ini_2_json, ) from cmtklib.util import ( - print_blue, print_warning, print_error + print_warning, print_error ) -from cmtklib.process import run + +from cmp.bidsappmanager.gui.globals import modal_width +from cmp.bidsappmanager.pipelines.anatomical import anatomical as anatomical_pipeline +from cmp.bidsappmanager.pipelines.diffusion import diffusion as diffusion_pipeline +from cmp.bidsappmanager.pipelines.functional import fMRI as fMRI_pipeline warnings.filterwarnings( "ignore", message="No valid root directory found for domain 'derivatives'." @@ -162,8 +161,8 @@ def init_dmri_project(project_info, bids_layout, is_new_project, gui=True): Parameters ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` class + project_info : ProjectInfoUI + Instance of :class:`ProjectInfoUI` class bids_layout : bids.BIDSLayout PyBIDS BIDS Layout object describing the BIDS dataset @@ -257,8 +256,8 @@ def init_fmri_project(project_info, bids_layout, is_new_project, gui=True): Parameters ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` class + project_info : ProjectInfoUI + Instance of :class:`ProjectInfoUI` class bids_layout : bids.BIDSLayout PyBIDS BIDS Layout object describing the BIDS dataset @@ -349,8 +348,8 @@ def init_anat_project(project_info, is_new_project): Parameters ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` class + project_info : ProjectInfoUI + Instance of :class:`ProjectInfoUI` class is_new_project : bool If True, this is a new project which has been never processed @@ -425,8 +424,8 @@ def update_anat_last_processed(project_info, pipeline): Parameters ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` class + project_info : ProjectInfoUI + Instance of :class:`ProjectInfoUI` class pipeline : AnatomicalPipelineUI Instance of :class:`AnatomicalPipelineUI` @@ -491,8 +490,8 @@ def update_dmri_last_processed(project_info, pipeline): Parameters ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` class + project_info : ProjectInfoUI + Instance of :class:`ProjectInfoUI` class pipeline : DiffusionPipelineUI Instance of :class:`DiffusionPipelineUI` @@ -553,8 +552,8 @@ def update_fmri_last_processed(project_info, pipeline): Parameters ---------- - project_info : CMP_Project_InfoUI - Instance of :class:`CMP_Project_InfoUI` class + project_info : ProjectInfoUI + Instance of :class:`ProjectInfoUI` class pipeline : fMRIPipelineUI Instance of :class:`fMRIPipelineUI` @@ -610,2220 +609,497 @@ def update_fmri_last_processed(project_info, pipeline): project_info.dmri_last_stage_processed = stage -class CMP_ConfigQualityWindowHandler(Handler): - """Event handler of the Configurator and Inspector (Quality Control) windows. - - Attributes - ---------- - project_loaded : traits.Bool - Indicate if project has been successfully loaded - (Default: False) - - anat_pipeline : Instance(HasTraits) - Instance of :class:`AnatomicalPipelineUI` class - - anat_inputs_checked : traits.Bool - Indicate if anatomical pipeline inputs are available - (Default: False) - - anat_outputs_checked : traits.Bool - Indicate if anatomical pipeline outputs are available - (Default: False) - - anatomical_processed : traits.Bool - Indicate if anatomical pipeline was run - (Default: False) - - dmri_pipeline : Instance(HasTraits) - Instance of :class:`DiffusionPipelineUI` class - - dmri_inputs_checked : traits.Bool - Indicate if diffusion pipeline inputs are available - (Default: False) - - dmri_processed : traits.Bool - Indicate if diffusion pipeline was run - (Default: False) - - fmri_pipeline : Instance(HasTraits) - Instance of :class:`fMRIPipelineUI` class - - fmri_inputs_checked : traits.Bool - Indicate if fMRI pipeline inputs are available - (Default: False) - - fmri_processed : traits.Bool - Indicate if fMRI pipeline was run - (Default: False) - """ - - project_loaded = Bool(False) - - anat_pipeline = Instance(HasTraits) - anat_inputs_checked = Bool(False) - anat_outputs_checked = Bool(False) - anatomical_processed = Bool(False) - - dmri_pipeline = Instance(HasTraits) - dmri_inputs_checked = Bool(False) - dmri_processed = Bool(False) - - fmri_pipeline = Instance(HasTraits) - fmri_inputs_checked = Bool(False) - fmri_processed = Bool(False) - - def new_project(self, ui_info): - """Function that creates a new :class:`CMP_Project_InfoUI` instance. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - print("> Load Project") - new_project = gui.CMP_Project_InfoUI() - np_res = new_project.configure_traits(view="create_view") - ui_info.ui.context["object"].handler = self - - if np_res and os.path.exists(new_project.base_directory): - try: - bids_layout = BIDSLayout(new_project.base_directory) - new_project.bids_layout = bids_layout - print(bids_layout) - - for subj in bids_layout.get_subjects(): - if "sub-" + str(subj) not in new_project.subjects: - new_project.subjects.append("sub-" + str(subj)) - - print(" .. INFO: Available subjects : ") - print(new_project.subjects) - new_project.number_of_subjects = len(new_project.subjects) - - np_res = new_project.configure_traits(view="subject_view") - print(" .. INFO: Selected subject : " + new_project.subject) - - subject = new_project.subject.split("-")[1] - new_project.subject_sessions = [""] - new_project.subject_session = "" - - sessions = bids_layout.get( - target="session", return_type="id", subject=subject - ) - - if len(sessions) > 0: - print("Warning: multiple sessions") - for ses in sessions: - new_project.subject_sessions.append("ses-" + str(ses)) - np_res = new_project.configure_traits(view="subject_session_view") - print( - " .. INFO: Selected session : " + new_project.subject_session - ) - - except Exception as e: - msg = "Invalid BIDS dataset. Please see documentation for more details." - print_warning(f" .. EXCEPTION: {msg}") - print_error(f" : {e}") - error(message=msg, title="BIDS error") - return - - self.anat_pipeline = init_anat_project(new_project, True) - if self.anat_pipeline is not None: - anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) - if anat_inputs_checked: - ui_info.ui.context["object"].project_info = new_project - self.anat_pipeline.number_of_cores = new_project.number_of_cores - ui_info.ui.context["object"].anat_pipeline = self.anat_pipeline - self.anat_inputs_checked = anat_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.t1_available = self.anat_inputs_checked - - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_subject_anat_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_session_anat_pipeline, - "subject_session", - ) - anat_save_config( - self.anat_pipeline, - ui_info.ui.context["object"].project_info.anat_config_file, - ) - self.project_loaded = True - - ui_info.ui.context[ - "object" - ].project_info.parcellation_scheme = get_anat_process_detail_json( - new_project, "parcellation_stage", "parcellation_scheme" - ) - ui_info.ui.context[ - "object" - ].project_info.freesurfer_subjects_dir = get_anat_process_detail_json( - new_project, "segmentation_stage", "freesurfer_subjects_dir" - ) - ui_info.ui.context[ - "object" - ].project_info.freesurfer_subject_id = get_anat_process_detail_json( - new_project, "segmentation_stage", "freesurfer_subject_id" - ) - - dmri_inputs_checked, self.dmri_pipeline = init_dmri_project( - new_project, bids_layout, True - ) - if self.dmri_pipeline is not None: - if dmri_inputs_checked: - self.dmri_pipeline.number_of_cores = ( - new_project.number_of_cores - ) - print( - " .. INFO: Number of cores (pipeline) = %s" - % self.dmri_pipeline.number_of_cores - ) - self.dmri_pipeline.parcellation_scheme = ui_info.ui.context[ - "object" - ].project_info.parcellation_scheme - ui_info.ui.context[ - "object" - ].dmri_pipeline = self.dmri_pipeline - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context[ - "object" - ].update_subject_dmri_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context[ - "object" - ].update_session_dmri_pipeline, - "subject_session", - ) - dmri_save_config( - self.dmri_pipeline, - ui_info.ui.context[ - "object" - ].project_info.dmri_config_file, - ) - self.dmri_inputs_checked = dmri_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.dmri_available = self.dmri_inputs_checked - self.project_loaded = True - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context[ - "object" - ].update_diffusion_imaging_model, - "diffusion_imaging_model", - ) - - fmri_inputs_checked, self.fmri_pipeline = init_fmri_project( - new_project, bids_layout, True - ) - if self.fmri_pipeline is not None: - if fmri_inputs_checked: - self.fmri_pipeline.number_of_cores = ( - new_project.number_of_cores - ) - print( - " .. INFO: Number of cores (pipeline) = %s" - % self.fmri_pipeline.number_of_cores - ) - self.fmri_pipeline.parcellation_scheme = ui_info.ui.context[ - "object" - ].project_info.parcellation_scheme - self.fmri_pipeline.subjects_dir = ui_info.ui.context[ - "object" - ].project_info.freesurfer_subjects_dir - self.fmri_pipeline.subject_id = ui_info.ui.context[ - "object" - ].project_info.freesurfer_subject_id - ui_info.ui.context[ - "object" - ].fmri_pipeline = self.fmri_pipeline - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context[ - "object" - ].update_subject_fmri_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context[ - "object" - ].update_session_fmri_pipeline, - "subject_session", - ) - fmri_save_config( - self.fmri_pipeline, - ui_info.ui.context[ - "object" - ].project_info.fmri_config_file, - ) - self.fmri_inputs_checked = fmri_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.fmri_available = self.fmri_inputs_checked - self.project_loaded = True - - def load_project(self, ui_info): - """Function that creates a new :class:`CMP_Project_InfoUI` instance from an existing project. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - print("> Load Project") - loaded_project = gui.CMP_Project_InfoUI() - np_res = loaded_project.configure_traits(view="open_view") - ui_info.ui.context["object"].handler = self - - print(" .. INFO: BIDS directory: %s" % loaded_project.base_directory) - try: - bids_layout = BIDSLayout(loaded_project.base_directory) - loaded_project.bids_layout = bids_layout - - loaded_project.subjects = [] - for subj in bids_layout.get_subjects(): - if "sub-" + str(subj) not in loaded_project.subjects: - loaded_project.subjects.append("sub-" + str(subj)) - loaded_project.subjects.sort() - - print(" .. INFO: Available subjects : ") - print(loaded_project.subjects) - loaded_project.number_of_subjects = len(loaded_project.subjects) - - except ValueError as e: - msg = str(e) - error(message=msg, title="BIDS error") - return - except Exception: - error( - message="Invalid BIDS dataset. Please see documentation for more details.", - title="BIDS error", - ) - return - - self.anat_inputs_checked = False - - if np_res and os.path.exists(loaded_project.base_directory): - sessions = [] - for subj in bids_layout.get_subjects(): - subj_sessions = bids_layout.get( - target="session", return_type="id", subject=subj - ) - for subj_session in subj_sessions: - sessions.append(subj_session) - - loaded_project.anat_available_config = [] - - for subj in bids_layout.get_subjects(): - subj_sessions = bids_layout.get( - target="session", return_type="id", subject=subj - ) - if len(subj_sessions) > 0: - for subj_session in subj_sessions: - config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "sub-%s_ses-%s_anatomical_config.json" - % (subj, subj_session), - ) - if os.path.isfile(config_file): - loaded_project.anat_available_config.append( - "sub-%s_ses-%s" % (subj, subj_session) - ) - else: - config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "sub-%s_anatomical_config.json" % subj, - ) - if os.path.isfile(config_file): - loaded_project.anat_available_config.append("sub-%s" % subj) - - if len(loaded_project.anat_available_config) > 1: - loaded_project.anat_available_config.sort() - loaded_project.anat_config_to_load = ( - loaded_project.anat_available_config[0] - ) - anat_config_selected = loaded_project.configure_traits( - view="anat_select_config_to_load" - ) - - if not anat_config_selected: - return 0 - else: - loaded_project.anat_config_to_load = ( - loaded_project.anat_available_config[0] - ) - - print( - " .. INFO: Anatomical config to load: %s" - % loaded_project.anat_config_to_load - ) - loaded_project.anat_config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "%s_anatomical_config.json" % loaded_project.anat_config_to_load, - ) - print( - " .. INFO: Anatomical config file: %s" - % loaded_project.anat_config_file - ) - - loaded_project.subject = get_anat_process_detail_json( - loaded_project, "Global", "subject" - ) - loaded_project.subject_sessions = [ - "ses-%s" % s - for s in bids_layout.get( - target="session", - return_type="id", - subject=loaded_project.subject.split("-")[1], - ) - ] - if len(loaded_project.subject_sessions) > 0: - print(" .. INFO: Dataset has session(s)") - loaded_project.subject_session = get_anat_process_detail_json( - loaded_project, "Global", "subject_session" - ) - print("Selected session : " + loaded_project.subject_session) - else: - loaded_project.subject_sessions = [""] - loaded_project.subject_session = "" - print(" .. INFO: Dataset has no session") - - loaded_project.parcellation_scheme = get_anat_process_detail_json( - loaded_project, "parcellation_stage", "parcellation_scheme" - ) - loaded_project.atlas_info = get_anat_process_detail_json( - loaded_project, "parcellation_stage", "atlas_info" - ) - loaded_project.freesurfer_subjects_dir = get_anat_process_detail_json( - loaded_project, "segmentation_stage", "freesurfer_subjects_dir" - ) - loaded_project.freesurfer_subject_id = get_anat_process_detail_json( - loaded_project, "segmentation_stage", "freesurfer_subject_id" - ) - - self.anat_pipeline = init_anat_project(loaded_project, False) - if self.anat_pipeline is not None: - anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) - if anat_inputs_checked: - update_anat_last_processed( - loaded_project, self.anat_pipeline - ) # Not required as the project is new, so no update should be done on processing status - ui_info.ui.context["object"].project_info = loaded_project - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_subject_anat_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_session_anat_pipeline, - "subject_session", - ) - ui_info.ui.context["object"].anat_pipeline = self.anat_pipeline - ui_info.ui.context[ - "object" - ].anat_pipeline.number_of_cores = ui_info.ui.context[ - "object" - ].project_info.number_of_cores - self.anat_inputs_checked = anat_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.t1_available = self.anat_inputs_checked - anat_save_config( - self.anat_pipeline, - ui_info.ui.context["object"].project_info.anat_config_file, - ) - self.project_loaded = True - self.anat_outputs_checked, _ = self.anat_pipeline.check_output() - if self.anat_outputs_checked: - print(" .. INFO: Available outputs") - - loaded_project.dmri_available_config = [] - - subjid = loaded_project.subject.split("-")[1] - subj_sessions = bids_layout.get( - target="session", return_type="id", subject=subjid - ) - - if len(subj_sessions) > 0: - for subj_session in subj_sessions: - config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "%s_ses-%s_diffusion_config.json" - % (loaded_project.subject, subj_session), - ) - if ( - os.path.isfile(config_file) - and subj_session == loaded_project.subject_session.split("-")[1] - ): - loaded_project.dmri_available_config.append( - "%s_ses-%s" % (loaded_project.subject, subj_session) - ) - else: - config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "sub-%s_diffusion_config.json" % loaded_project.subject, - ) - if os.path.isfile(config_file): - loaded_project.dmri_available_config.append( - "%s" % loaded_project.subject - ) - - if len(loaded_project.dmri_available_config) > 1: - loaded_project.dmri_available_config.sort() - loaded_project.dmri_config_to_load = ( - loaded_project.dmri_available_config[0] - ) - dmri_config_selected = loaded_project.configure_traits( - view="dmri_select_config_to_load" - ) - if not dmri_config_selected: - return 0 - elif not loaded_project.dmri_available_config: - loaded_project.dmri_config_to_load = ( - "%s_diffusion" % loaded_project.subject - ) - else: - loaded_project.dmri_config_to_load = ( - loaded_project.dmri_available_config[0] - ) - - print( - " .. INFO: Diffusion config to load: %s" - % loaded_project.dmri_config_to_load - ) - loaded_project.dmri_config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "%s_diffusion_config.json" % loaded_project.dmri_config_to_load, - ) - print( - " .. INFO: Diffusion config file: %s" % loaded_project.dmri_config_file - ) - - if os.path.isfile(loaded_project.dmri_config_file): - print(" .. INFO: Load existing diffusion config file") - loaded_project.process_type = get_dmri_process_detail_json( - loaded_project, "Global", "process_type" - ) - loaded_project.diffusion_imaging_model = get_dmri_process_detail_json( - loaded_project, "Global", "diffusion_imaging_model" - ) +class ProjectInfoUI(cmp.project.ProjectInfo): + """Class that extends the :class:`ProjectInfo` with graphical components. - dmri_inputs_checked, self.dmri_pipeline = init_dmri_project( - loaded_project, bids_layout, False - ) - if self.dmri_pipeline is not None: - if dmri_inputs_checked: - update_dmri_last_processed(loaded_project, self.dmri_pipeline) - ui_info.ui.context["object"].project_info = loaded_project - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_subject_dmri_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_session_dmri_pipeline, - "subject_session", - ) - self.dmri_pipeline.parcellation_scheme = ( - loaded_project.parcellation_scheme - ) - self.dmri_pipeline.atlas_info = loaded_project.atlas_info - ui_info.ui.context["object"].dmri_pipeline = self.dmri_pipeline - ui_info.ui.context[ - "object" - ].dmri_pipeline.number_of_cores = ui_info.ui.context[ - "object" - ].project_info.number_of_cores - self.dmri_inputs_checked = dmri_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.dmri_available = self.dmri_inputs_checked - dmri_save_config( - self.dmri_pipeline, - ui_info.ui.context["object"].project_info.dmri_config_file, - ) - self.project_loaded = True - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_diffusion_imaging_model, - "diffusion_imaging_model", - ) - else: - dmri_inputs_checked, self.dmri_pipeline = init_dmri_project( - loaded_project, bids_layout, True - ) - print_warning( - " .. WARNING: No existing config for diffusion pipeline found - " - + "Created new diffusion pipeline with default parameters" - ) - if ( - self.dmri_pipeline is not None - ): # and self.dmri_pipeline is not None: - if dmri_inputs_checked: - ui_info.ui.context["object"].project_info = loaded_project - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_subject_dmri_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_session_dmri_pipeline, - "subject_session", - ) - self.dmri_pipeline.number_of_cores = ( - loaded_project.number_of_cores - ) - print( - " .. INFO: Number of cores (pipeline) = %s" - % self.dmri_pipeline.number_of_cores - ) - self.dmri_pipeline.parcellation_scheme = ( - loaded_project.parcellation_scheme - ) - self.dmri_pipeline.atlas_info = loaded_project.atlas_info - ui_info.ui.context["object"].dmri_pipeline = self.dmri_pipeline - dmri_save_config( - self.dmri_pipeline, - ui_info.ui.context["object"].project_info.dmri_config_file, - ) - self.dmri_inputs_checked = dmri_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.dmri_available = self.dmri_inputs_checked - self.project_loaded = True - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_diffusion_imaging_model, - "diffusion_imaging_model", - ) - - if len(subj_sessions) > 0: - for subj_session in subj_sessions: - config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "%s_ses-%s_fMRI_config.json" - % (loaded_project.subject, subj_session), - ) - if ( - os.path.isfile(config_file) - and subj_session == loaded_project.subject_session.split("-")[1] - ): - loaded_project.fmri_available_config.append( - "%s_ses-%s" % (loaded_project.subject, subj_session) - ) - else: - config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "sub-%s_fMRI_config.json" % loaded_project.subject, - ) - if os.path.isfile(config_file): - loaded_project.fmri_available_config.append( - "sub-%s" % loaded_project.subject - ) - - if len(loaded_project.fmri_available_config) > 1: - loaded_project.fmri_available_config.sort() - loaded_project.fmri_config_to_load = ( - loaded_project.fmri_available_config[0] - ) - fmri_config_selected = loaded_project.configure_traits( - view="fmri_select_config_to_load" - ) - if not fmri_config_selected: - return 0 - elif not loaded_project.fmri_available_config: - loaded_project.fmri_config_to_load = "%s_fMRI" % loaded_project.subject - else: - loaded_project.fmri_config_to_load = ( - loaded_project.fmri_available_config[0] - ) + It supports graphically the setting of all processing properties / attributes + of an :class:`ProjectInfo` instance. - print( - " .. INFO: fMRI config to load: %s" - % loaded_project.fmri_config_to_load - ) - loaded_project.fmri_config_file = os.path.join( - loaded_project.base_directory, - "derivatives", - "%s_fMRI_config.json" % loaded_project.fmri_config_to_load, - ) - print(" .. INFO: fMRI config file: %s" % loaded_project.fmri_config_file) - - if os.path.isfile(loaded_project.fmri_config_file): - print(" .. INFO: Load existing fmri config file") - loaded_project.process_type = get_fmri_process_detail_json( - loaded_project, "Global", "process_type" - ) - - fmri_inputs_checked, self.fmri_pipeline = init_fmri_project( - loaded_project, bids_layout, False - ) - if self.fmri_pipeline is not None: - if fmri_inputs_checked: - update_fmri_last_processed(loaded_project, self.fmri_pipeline) - ui_info.ui.context["object"].project_info = loaded_project - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_subject_fmri_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_session_fmri_pipeline, - "subject_session", - ) - self.fmri_pipeline.parcellation_scheme = ( - loaded_project.parcellation_scheme - ) - self.fmri_pipeline.atlas_info = loaded_project.atlas_info - self.fmri_pipeline.subjects_dir = ( - loaded_project.freesurfer_subjects_dir - ) - self.fmri_pipeline.subject_id = ( - loaded_project.freesurfer_subject_id - ) - ui_info.ui.context["object"].fmri_pipeline = self.fmri_pipeline - ui_info.ui.context[ - "object" - ].fmri_pipeline.number_of_cores = ui_info.ui.context[ - "object" - ].project_info.number_of_cores - self.fmri_inputs_checked = fmri_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.fmri_available = self.fmri_inputs_checked - fmri_save_config( - self.fmri_pipeline, - ui_info.ui.context["object"].project_info.fmri_config_file, - ) - self.project_loaded = True - else: - fmri_inputs_checked, self.fmri_pipeline = init_fmri_project( - loaded_project, bids_layout, True - ) - print_warning( - " .. WARNING: No existing config for fMRI pipeline found - " - + "Created new fMRI pipeline with default parameters" - ) - if self.fmri_pipeline is not None: - if fmri_inputs_checked: - ui_info.ui.context["object"].project_info = loaded_project - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_subject_fmri_pipeline, - "subject", - ) - ui_info.ui.context["object"].project_info.on_trait_change( - ui_info.ui.context["object"].update_session_fmri_pipeline, - "subject_session", - ) - self.fmri_pipeline.number_of_cores = ( - loaded_project.number_of_cores - ) - print( - " .. INFO: Number of cores (pipeline) = %s" - % self.fmri_pipeline.number_of_cores - ) - self.fmri_pipeline.parcellation_scheme = ( - loaded_project.parcellation_scheme - ) - self.fmri_pipeline.atlas_info = loaded_project.atlas_info - self.fmri_pipeline.subjects_dir = ( - loaded_project.freesurfer_subjects_dir - ) - self.fmri_pipeline.subject_id = ( - loaded_project.freesurfer_subject_id - ) - ui_info.ui.context["object"].fmri_pipeline = self.fmri_pipeline - fmri_save_config( - self.fmri_pipeline, - ui_info.ui.context["object"].project_info.fmri_config_file, - ) - self.fmri_inputs_checked = fmri_inputs_checked - ui_info.ui.context[ - "object" - ].project_info.fmri_available = self.fmri_inputs_checked - self.project_loaded = True - - def update_subject_anat_pipeline(self, ui_info): - """Function that updates attributes of the :class:`AnatomicalPipelineUI` instance. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - ui_info.handler = self - - self.anat_pipeline.subject = ui_info.project_info.subject - self.anat_pipeline.global_conf.subject = ui_info.project_info.subject - - updated_project = ui_info.project_info - - bids_layout = BIDSLayout(updated_project.base_directory) - - if len(updated_project.subject_sessions) > 0: - self.anat_pipeline.global_conf.subject_session = ( - updated_project.subject_session - ) - self.anat_pipeline.subject_directory = os.path.join( - updated_project.base_directory, - updated_project.subject, - updated_project.subject_session, - ) - updated_project.anat_config_file = os.path.join( - updated_project.base_directory, - "derivatives", - "%s_%s_anatomical_config.json" - % (updated_project.subject, updated_project.subject_session), - ) - else: - self.anat_pipeline.global_conf.subject_session = "" - self.anat_pipeline.subject_directory = os.path.join( - updated_project.base_directory, updated_project.subject - ) - updated_project.anat_config_file = os.path.join( - updated_project.base_directory, - "derivatives", - "%s_anatomical_config.json" % updated_project.subject, - ) - - self.anat_pipeline.derivatives_directory = os.path.join( - updated_project.base_directory, "derivatives" - ) - - if os.path.isfile(updated_project.anat_config_file): - print( - " .. INFO: Existing anatomical config file for subject %s: %s" - % (updated_project.subject, updated_project.anat_config_file) - ) + Attributes + ----------- + creation_mode : traits.Enum + Mode for loading the dataset. Valid values are + 'Load BIDS dataset', 'Install Datalad BIDS dataset' - updated_project.parcellation_scheme = get_anat_process_detail_json( - updated_project, "parcellation_stage", "parcellation_scheme" - ) - updated_project.atlas_info = get_anat_process_detail_json( - updated_project, "parcellation_stage", "atlas_info" - ) - updated_project.freesurfer_subjects_dir = get_anat_process_detail_json( - updated_project, "segmentation_stage", "freesurfer_subjects_dir" - ) - updated_project.freesurfer_subject_id = get_anat_process_detail_json( - updated_project, "segmentation_stage", "freesurfer_subject_id" - ) + install_datalad_dataset_via_ssh : traits.Bool + If set to True install the datalad dataset from a remote server + via ssh.(True by default) - self.anat_pipeline = init_anat_project(updated_project, False) - if self.anat_pipeline is not None: - anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) - if anat_inputs_checked: - update_anat_last_processed( - updated_project, self.anat_pipeline - ) # Not required as the project is new, so no update should be done on processing status - ui_info.project_info = updated_project - ui_info.project_info.on_trait_change( - ui_info.update_subject_anat_pipeline, "subject" - ) - ui_info.project_info.on_trait_change( - ui_info.update_session_anat_pipeline, "subject_session" - ) - ui_info.anat_pipeline = self.anat_pipeline - ui_info.anat_pipeline.number_of_cores = ( - ui_info.project_info.number_of_cores - ) - self.anat_inputs_checked = anat_inputs_checked - ui_info.project_info.t1_available = self.anat_inputs_checked - anat_save_config( - self.anat_pipeline, ui_info.project_info.anat_config_file - ) - self.project_loaded = True - self.anat_outputs_checked, msg = self.anat_pipeline.check_output() - if self.anat_outputs_checked: - print(" .. INFO: Available outputs") + ssh_user : traits.Str + Remote server username. + (Required if ``install_datalad_dataset_via_ssh`` is True) - else: - print( - " .. INFO: Unprocessed anatomical data for subject %s" - % updated_project.subject - ) - self.anat_pipeline = init_anat_project(updated_project, True) - if self.anat_pipeline is not None: # and self.dmri_pipeline is not None: - anat_inputs_checked = self.anat_pipeline.check_input(bids_layout) - if anat_inputs_checked: - ui_info.project_info = updated_project - ui_info.project_info.on_trait_change( - ui_info.update_subject_anat_pipeline, "subject" - ) - ui_info.project_info.on_trait_change( - ui_info.update_session_anat_pipeline, "subject_session" - ) - self.anat_pipeline.number_of_cores = updated_project.number_of_cores - ui_info.anat_pipeline = self.anat_pipeline - self.anat_inputs_checked = anat_inputs_checked - ui_info.project_info.t1_available = self.anat_inputs_checked - anat_save_config( - self.anat_pipeline, ui_info.project_info.anat_config_file - ) - self.project_loaded = True + ssh_pwd + Remote server password. + (Required if ``install_datalad_dataset_via_ssh`` is True) - ui_info.project_info.parcellation_scheme = get_anat_process_detail_json( - updated_project, "parcellation_stage", "parcellation_scheme" - ) - ui_info.project_info.freesurfer_subjects_dir = get_anat_process_detail_json( - updated_project, "segmentation_stage", "freesurfer_subjects_dir" - ) - ui_info.project_info.freesurfer_subject_id = get_anat_process_detail_json( - updated_project, "segmentation_stage", "freesurfer_subject_id" - ) + ssh_remote : traits.Str + Remote server IP or URL. + (Required if ``install_datalad_dataset_via_ssh`` is True) - return ui_info + datalad_dataset_path : traits.Directory + Path to the datalad dataset on the remote server. + (Required if ``install_datalad_dataset_via_ssh`` is True) - def update_subject_dmri_pipeline(self, ui_info): - """Function that updates attributes of the :class:`DiffusionPipelineUI` instance. + summary_view_button : traits.ui.Button + Button that shows the pipeline processing summary table - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - self.dmri_pipeline.subject = ui_info.project_info.subject - self.dmri_pipeline.global_conf.subject = ui_info.project_info.subject + pipeline_processing_summary_view : traits.ui.VGroup + TraitsUI VGroup that contains ``Item('pipeline_processing_summary')`` - updated_project = ui_info.project_info + dataset_view : traits.ui.View + TraitsUI View that shows a summary of project settings and + modality available for a given subject - bids_layout = BIDSLayout(updated_project.base_directory) + traits_view : QtView + TraitsUI QtView that includes the View 'dataset_view' - if len(updated_project.subject_sessions) > 0: - self.dmri_pipeline.global_conf.subject_session = ( - updated_project.subject_session - ) - self.dmri_pipeline.subject_directory = os.path.join( - updated_project.base_directory, - updated_project.subject, - updated_project.subject_session, - ) - updated_project.dmri_config_file = os.path.join( - updated_project.base_directory, - "derivatives", - "%s_%s_diffusion_config.json" - % (updated_project.subject, updated_project.subject_session), - ) - else: - self.dmri_pipeline.global_conf.subject_session = "" - self.dmri_pipeline.subject_directory = os.path.join( - updated_project.base_directory, updated_project.subject - ) - updated_project.dmri_config_file = os.path.join( - updated_project.base_directory, - "derivatives", - "%s_diffusion_config.json" % updated_project.subject, - ) + create_view : traits.ui.View + Dialog view to create a BIDS Dataset - self.dmri_pipeline.derivatives_directory = os.path.join( - updated_project.base_directory, "derivatives" - ) + subject_view : traits.ui.View + Dialog view to select of subject - if os.path.isfile(updated_project.dmri_config_file): - print(" .. INFO: Load existing diffusion config file") - updated_project.process_type = get_dmri_process_detail_json( - updated_project, "Global", "process_type" - ) - updated_project.diffusion_imaging_model = get_dmri_process_detail_json( - updated_project, "diffusion_stage", "diffusion_imaging_model" - ) + subject_session_view : traits.ui.View + Dialog view to select the subject session - dmri_inputs_checked, self.dmri_pipeline = init_dmri_project( - updated_project, bids_layout, False - ) - if self.dmri_pipeline is not None: # and self.dmri_pipeline is not None: - if dmri_inputs_checked: - update_dmri_last_processed(updated_project, self.dmri_pipeline) - ui_info.project_info = updated_project - ui_info.project_info.on_trait_change( - ui_info.update_subject_dmri_pipeline, "subject" - ) - ui_info.project_info.on_trait_change( - ui_info.update_session_dmri_pipeline, "subject_session" - ) - self.dmri_pipeline.parcellation_scheme = ( - updated_project.parcellation_scheme - ) - self.dmri_pipeline.atlas_info = updated_project.atlas_info - ui_info.dmri_pipeline = self.dmri_pipeline - ui_info.dmri_pipeline.number_of_cores = ( - ui_info.project_info.number_of_cores - ) - self.dmri_inputs_checked = dmri_inputs_checked - ui_info.project_info.dmri_available = self.dmri_inputs_checked - dmri_save_config( - self.dmri_pipeline, ui_info.project_info.dmri_config_file - ) - self.project_loaded = True - ui_info.project_info.on_trait_change( - ui_info.update_diffusion_imaging_model, - "diffusion_imaging_model", - ) - else: - dmri_inputs_checked, self.dmri_pipeline = init_dmri_project( - updated_project, bids_layout, True - ) - print_warning( - " .. WARNING: No existing config for diffusion pipeline found - " - + "Created new diffusion pipeline with default parameters" - ) - if self.dmri_pipeline is not None: # and self.dmri_pipeline is not None: - if dmri_inputs_checked: - ui_info.project_info = updated_project - ui_info.project_info.on_trait_change( - ui_info.update_subject_dmri_pipeline, "subject" - ) - ui_info.project_info.on_trait_change( - ui_info.update_session_dmri_pipeline, "subject_session" - ) - self.dmri_pipeline.number_of_cores = updated_project.number_of_cores - print( - " .. INFO: Number of cores (pipeline) = %s" - % self.dmri_pipeline.number_of_cores - ) - self.dmri_pipeline.parcellation_scheme = ( - updated_project.parcellation_scheme - ) - self.dmri_pipeline.atlas_info = updated_project.atlas_info - ui_info.dmri_pipeline = self.dmri_pipeline - dmri_save_config( - self.dmri_pipeline, ui_info.project_info.dmri_config_file - ) - self.dmri_inputs_checked = dmri_inputs_checked - ui_info.project_info.dmri_available = self.dmri_inputs_checked - self.project_loaded = True - ui_info.project_info.on_trait_change( - ui_info.update_diffusion_imaging_model, - "diffusion_imaging_model", - ) + dmri_bids_acq_view : traits.ui.View + Dialog view to select the diffusion acquisition model - return ui_info + anat_warning_view : traits.ui.View + View that displays a warning message regarding + the anatomical T1w data - def update_subject_fmri_pipeline(self, ui_info): - """Function that updates attributes of the :class:`fMRIPipelineUI` instance. + anat_config_error_view : traits.ui.View + Error view that displays an error message regarding + the configuration of the anatomical pipeline - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - ui_info.handler = self + dmri_warning_view : traits.ui.View + View that displays a warning message regarding + the diffusion MRI data - self.fmri_pipeline.subject = ui_info.project_info.subject - self.fmri_pipeline.global_conf.subject = ui_info.project_info.subject + dmri_config_error_view : traits.ui.View + View that displays an error message regarding + the configuration of the diffusion pipeline - updated_project = ui_info.project_info + fmri_warning_view : traits.ui.View + View that displays a warning message regarding + the functional MRI data - bids_layout = BIDSLayout(updated_project.base_directory) + fmri_config_error_view : traits.ui.View + View that displays an error message regarding + the configuration of the fMRI pipeline - if len(updated_project.subject_sessions) > 0: - self.fmri_pipeline.global_conf.subject_session = ( - updated_project.subject_session - ) - self.fmri_pipeline.subject_directory = os.path.join( - updated_project.base_directory, - ui_info.project_info.subject, - updated_project.subject_session, - ) - updated_project.fmri_config_file = os.path.join( - updated_project.base_directory, - "derivatives", - "%s_%s_fMRI_config.json" - % (updated_project.subject, updated_project.subject_session), - ) - else: - self.fmri_pipeline.global_conf.subject_session = "" - self.fmri_pipeline.subject_directory = os.path.join( - updated_project.base_directory, ui_info.project_info.subject - ) - updated_project.fmri_config_file = os.path.join( - updated_project.base_directory, - "derivatives", - "%s_fMRI_config.json" % updated_project.subject, - ) + open_view : traits.ui.View + Dialog view to load a BIDS Dataset - self.fmri_pipeline.derivatives_directory = os.path.join( - updated_project.base_directory, "derivatives" - ) + anat_select_config_to_load : traits.ui.View + Dialog view to load the configuration file of the anatomical pipeline - print( - " .. INFO: fMRI config file loaded/created : %s" - % updated_project.fmri_config_file - ) + diffusion_imaging_model_select_view : traits.ui.View + Dialog view to select the diffusion acquisition model - if os.path.isfile(updated_project.fmri_config_file): - print( - " .. INFO: Load existing fMRI config file for subject %s" - % updated_project.subject - ) - updated_project.process_type = get_fmri_process_detail_json( - updated_project, "Global", "process_type" - ) + dmri_select_config_to_load : traits.ui.View + Dialog view to load the configuration file of the diffusion MRI pipeline - fmri_inputs_checked, self.fmri_pipeline = init_fmri_project( - updated_project, bids_layout, False - ) - if self.fmri_pipeline is not None: - if fmri_inputs_checked: - update_fmri_last_processed(updated_project, self.fmri_pipeline) - ui_info.project_info = updated_project - ui_info.project_info.on_trait_change( - ui_info.update_subject_fmri_pipeline, "subject" - ) - ui_info.project_info.on_trait_change( - ui_info.update_session_fmri_pipeline, "subject_session" - ) - self.fmri_pipeline.parcellation_scheme = ( - updated_project.parcellation_scheme - ) - self.fmri_pipeline.atlas_info = updated_project.atlas_info - self.fmri_pipeline.subjects_dir = ( - updated_project.freesurfer_subjects_dir - ) - self.fmri_pipeline.subject_id = ( - updated_project.freesurfer_subject_id - ) - ui_info.fmri_pipeline = self.fmri_pipeline + fmri_select_config_to_load : traits.ui.View + Dialog view to load the configuration file of the fMRI pipeline + """ - ui_info.fmri_pipeline.number_of_cores = ( - ui_info.project_info.number_of_cores - ) - self.fmri_inputs_checked = fmri_inputs_checked - ui_info.project_info.fmri_available = self.fmri_inputs_checked - fmri_save_config( - self.fmri_pipeline, ui_info.project_info.fmri_config_file - ) - self.project_loaded = True - else: - fmri_inputs_checked, self.fmri_pipeline = init_fmri_project( - updated_project, bids_layout, True - ) - print_warning( - " .. WARNING: No existing config for fMRI pipeline found but available fMRI data - " - + "Created new fMRI pipeline with default parameters" - ) - if self.fmri_pipeline is not None: - if fmri_inputs_checked: - ui_info.project_info = updated_project - ui_info.project_info.on_trait_change( - ui_info.update_subject_fmri_pipeline, "subject" - ) - ui_info.project_info.on_trait_change( - ui_info.update_session_fmri_pipeline, "subject_session" - ) - self.fmri_pipeline.number_of_cores = updated_project.number_of_cores - print( - " .. INFO: Number of cores (pipeline) = %s" - % self.fmri_pipeline.number_of_cores - ) - self.fmri_pipeline.parcellation_scheme = ( - updated_project.parcellation_scheme - ) - self.fmri_pipeline.atlas_info = updated_project.atlas_info - self.fmri_pipeline.subjects_dir = ( - updated_project.freesurfer_subjects_dir - ) - self.fmri_pipeline.subject_id = ( - updated_project.freesurfer_subject_id - ) - ui_info.fmri_pipeline = self.fmri_pipeline - fmri_save_config( - self.fmri_pipeline, ui_info.project_info.fmri_config_file - ) - self.fmri_inputs_checked = fmri_inputs_checked - ui_info.project_info.fmri_available = self.fmri_inputs_checked - self.project_loaded = True - - return ui_info - - @classmethod - def show_bidsapp_window(ui_info): - """Function that shows the BIDS App Interface Window. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with this handler - """ - print("Show BIDS App interface") - ui_info.ui.context["object"].show_bidsapp_interface() - - @classmethod - def save_anat_config_file(self, ui_info): - """Function that saves the anatomical pipeline configuration file. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - print_blue("[Save anatomical pipeline configuration]") - dialog = FileDialog( - action="save as", - default_filename=os.path.join( - ui_info.ui.context["object"].project_info.base_directory, - "code", - "ref_anatomical_config.json", + creation_mode = Enum("Load BIDS dataset", "Install Datalad BIDS dataset") + install_datalad_dataset_via_ssh = Bool(True) + ssh_user = String("remote_username") + ssh_pwd = Password("") + ssh_remote = String("IP address/ Machine name") + datalad_dataset_path = Directory("/shared/path/to/existing/datalad/dataset") + + anat_runs = List() + anat_run = Enum(values="anat_runs") + + dmri_runs = List() + dmri_run = Enum(values="dmri_runs") + + fmri_runs = List() + fmri_run = Enum(values="fmri_runs") + + summary_view_button = Button("Pipeline processing summary") + + pipeline_processing_summary_view = VGroup(Item("pipeline_processing_summary")) + + dataset_view = VGroup( + VGroup( + HGroup( + Item( + "base_directory", + width=-0.3, + style="readonly", + label="", + resizable=True, + ), + Item( + "number_of_subjects", + width=-0.3, + style="readonly", + label="Number of participants", + resizable=True, + ), + "summary_view_button", ), - ) - dialog.open() - if dialog.return_code == OK: - anat_save_config( - ui_info.ui.context["object"].anat_pipeline, - ui_info.ui.context["object"].project_info.anat_config_file, - ) - if ( - dialog.path - != ui_info.ui.context["object"].project_info.anat_config_file - ): - shutil.copy( - ui_info.ui.context["object"].project_info.anat_config_file, - dialog.path, - ) - - def load_anat_config_file(self, ui_info): - """Function that loads the anatomical pipeline configuration file. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - dialog = FileDialog(action="open", wildcard="*anatomical_config.json") - dialog.open() - if dialog.return_code == OK: - if ( - dialog.path - != ui_info.ui.context["object"].project_info.anat_config_file - ): - shutil.copy( - dialog.path, - ui_info.ui.context["object"].project_info.anat_config_file, - ) - anat_load_config_json( - self.anat_pipeline, - ui_info.ui.context["object"].project_info.anat_config_file, - ) - # TODO: load_config (anat_ or dmri_ ?) - - @classmethod - def save_dmri_config_file(self, ui_info): - """Function that saves the diffusion pipeline configuration file. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - print_blue("[Save anatomical pipeline configuration]") - dialog = FileDialog( - action="save as", - default_filename=os.path.join( - ui_info.ui.context["object"].project_info.base_directory, - "code", - "ref_diffusion_config.json", + label="BIDS Dataset", + ), + spring, + HGroup( + Group(Item("subject", style="simple", show_label=True, resizable=True)), + Group( + Item( + "subject_session", style="simple", label="Session", resizable=True + ), + visible_when='subject_session!=""', ), - ) - dialog.open() - if dialog.return_code == OK: - dmri_save_config( - ui_info.ui.context["object"].dmri_pipeline, - ui_info.ui.context["object"].project_info.dmri_config_file, - ) - if ( - dialog.path - != ui_info.ui.context["object"].project_info.dmri_config_file - ): - shutil.copy( - ui_info.ui.context["object"].project_info.dmri_config_file, - dialog.path, - ) - - def load_dmri_config_file(self, ui_info): - """Function that loads the diffusion pipeline configuration file. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - dialog = FileDialog(action="open", wildcard="*diffusion_config.json") - dialog.open() - if dialog.return_code == OK: - if ( - dialog.path - != ui_info.ui.context["object"].project_info.dmri_config_file - ): - shutil.copy( - dialog.path, - ui_info.ui.context["object"].project_info.dmri_config_file, - ) - dmri_load_config_json( - self.dmri_pipeline, - ui_info.ui.context["object"].project_info.dmri_config_file, - ) - - @classmethod - def save_fmri_config_file(self, ui_info): - """Function that saves the fMRI pipeline configuration file. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - print_blue("[Save anatomical pipeline configuration]") - dialog = FileDialog( - action="save as", - default_filename=os.path.join( - ui_info.ui.context["object"].project_info.base_directory, - "code", - "ref_fMRI_config.json", + springy=True, + ), + spring, + Group( + Item("t1_available", style="readonly", label="T1", resizable=True), + HGroup( + Item( + "dmri_available", + style="readonly", + label="Diffusion", + resizable=True, + ), + Item( + "diffusion_imaging_model", + label="Model", + resizable=True, + enabled_when="dmri_available", + ), ), - ) - dialog.open() - if dialog.return_code == OK: - fmri_save_config( - ui_info.ui.context["object"].fmri_pipeline, - ui_info.ui.context["object"].project_info.fmri_config_file, - ) - if ( - dialog.path - != ui_info.ui.context["object"].project_info.fmri_config_file - ): - shutil.copy( - ui_info.ui.context["object"].project_info.fmri_config_file, - dialog.path, - ) - - def load_fmri_config_file(self, ui_info): - """Function that loads the fMRI pipeline configuration file. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - dialog = FileDialog(action="open", wildcard="*diffusion_config.json") - dialog.open() - if dialog.return_code == OK: - if ( - dialog.path - != ui_info.ui.context["object"].project_info.fmri_config_file - ): - shutil.copy( - dialog.path, - ui_info.ui.context["object"].project_info.fmri_config_file, - ) - fmri_load_config_json( - self.fmri_pipeline, - ui_info.ui.context["object"].project_info.fmri_config_file, - ) - - -class CMP_MainWindowHandler(Handler): - """Event handler of the Configurator and Inspector (Quality Control) windows. - - Attributes - ---------- - project_loaded : traits.Bool - Indicate if project has been successfully loaded - (Default: False) - - anat_pipeline : Instance(HasTraits) - Instance of :class:`AnatomicalPipelineUI` class - - anat_inputs_checked : traits.Bool - Indicate if anatomical pipeline inputs are available - (Default: False) - - anat_outputs_checked : traits.Bool - Indicate if anatomical pipeline outputs are available - (Default: False) - - anatomical_processed : traits.Bool - Indicate if anatomical pipeline was run - (Default: False) - - dmri_pipeline : Instance(HasTraits) - Instance of :class:`DiffusionPipelineUI` class - - dmri_inputs_checked : traits.Bool - Indicate if diffusion pipeline inputs are available - (Default: False) - - dmri_processed : traits.Bool - Indicate if diffusion pipeline was run - (Default: False) - - fmri_pipeline : Instance(HasTraits) - Instance of :class:`fMRIPipelineUI` class - - fmri_inputs_checked : traits.Bool - Indicate if fMRI pipeline inputs are available - (Default: False) - - fmri_processed : traits.Bool - Indicate if fMRI pipeline was run - (Default: False) - """ - - project_loaded = Bool(False) - - anat_pipeline = Instance(HasTraits) - anat_inputs_checked = Bool(False) - anat_outputs_checked = Bool(False) - anatomical_processed = Bool(False) - - dmri_pipeline = Instance(HasTraits) - dmri_inputs_checked = Bool(False) - dmri_processed = Bool(False) - - fmri_pipeline = Instance(HasTraits) - fmri_inputs_checked = Bool(False) - fmri_processed = Bool(False) - - def load_dataset(self, ui_info, debug=False): - """Function that creates a new :class:`CMP_Project_InfoUI` instance from an existing project. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - - debug : bool - If True, print more information for debugging - """ - loaded_project = gui.CMP_Project_InfoUI() - np_res = loaded_project.configure_traits(view="open_view") - loaded_project.output_directory = os.path.join( - loaded_project.base_directory, "derivatives" - ) - - if loaded_project.creation_mode == "Install Datalad BIDS dataset": - datalad_is_available = is_tool("datalad") - - if datalad_is_available: - print(">>> Datalad dataset installation...") - if loaded_project.install_datalad_dataset_via_ssh: - if loaded_project.ssh_pwd != "": - os.environ["REMOTEUSERPWD"] = loaded_project.ssh_pwd - cmd = 'datalad install -D "Dataset {} (remote:{}) installed on {}" -s ssh://{}:$REMOTEUSERPWD@{}:{} {}'.format( - loaded_project.datalad_dataset_path, - loaded_project.ssh_remote, - loaded_project.base_directory, - loaded_project.ssh_user, - loaded_project.ssh_remote, - loaded_project.datalad_dataset_path, - loaded_project.base_directory, - ) - else: - cmd = 'datalad install -D "Dataset {} (remote:{}) installed on {}" -s ssh://{}@{}:{} {}'.format( - loaded_project.datalad_dataset_path, - loaded_project.ssh_remote, - loaded_project.base_directory, - loaded_project.ssh_user, - loaded_project.ssh_remote, - loaded_project.datalad_dataset_path, - loaded_project.base_directory, - ) - try: - print_blue("... cmd: {}".format(cmd)) - run( - cmd, - env={}, - cwd=os.path.abspath(loaded_project.base_directory), - ) - del os.environ["REMOTEUSERPWD"] - except Exception: - print(" ERROR: Failed to install datalad dataset via ssh") - del os.environ["REMOTEUSERPWD"] - else: - cmd = 'datalad install -D "Dataset {} installed on {}" -s {} {}'.format( - loaded_project.datalad_dataset_path, - loaded_project.base_directory, - loaded_project.datalad_dataset_path, - loaded_project.base_directory, - ) - try: - print_blue("... cmd: {}".format(cmd)) - run( - cmd, - env={}, - cwd=os.path.abspath(loaded_project.base_directory), - ) - except Exception: - print(" ERROR: Failed to install datalad dataset via ssh") - else: - print(" ERROR: Datalad is not installed!") - - # Install dataset via datalad - # datalad install -s ssh://user@IP_ADDRESS:/remote/path/to/ds-example /local/path/to/ds-example - # - - t1_available = False - t2_available = False - diffusion_available = False - fmri_available = False - - # print("Local BIDS dataset: %s" % loaded_project.base_directory) - if np_res: - try: - bids_layout = BIDSLayout(loaded_project.base_directory) - print(bids_layout) - - loaded_project.bids_layout = bids_layout - - loaded_project.subjects = [] - for subj in bids_layout.get_subjects(): - if debug: - print("sub: %s" % subj) - if "sub-" + str(subj) not in loaded_project.subjects: - loaded_project.subjects.append("sub-" + str(subj)) - # loaded_project.subjects = ['sub-'+str(subj) for subj in bids_layout.get_subjects()] - loaded_project.subjects.sort() - - if debug: - print("Available subjects : ") - print(loaded_project.subjects) - loaded_project.number_of_subjects = len(loaded_project.subjects) - - loaded_project.subject = loaded_project.subjects[0] - if debug: - print(loaded_project.subject) - - subject = loaded_project.subject.split("-")[1] - - sessions = bids_layout.get( - target="session", return_type="id", subject=subject - ) - - if debug: - print("Sessions: ") - print(sessions) - - if len(sessions) > 0: - loaded_project.subject_sessions = ["ses-{}".format(sessions[0])] - loaded_project.subject_session = "ses-{}".format(sessions[0]) - else: - loaded_project.subject_sessions = [""] - loaded_project.subject_session = "" - - if len(sessions) > 0: - print( - f" ... Check for available input modalities for subject {subject} of session {sessions[0]}..." - ) - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, - session=sessions[0], - suffix="bold", - extensions=["nii", "nii.gz"], - ) - ] - if len(query_files) > 0: - print(" * Available BOLD(s): {}".format(query_files)) - fmri_available = True - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, - session=sessions[0], - suffix="T1w", - extensions=["nii", "nii.gz"], - ) - ] - if len(query_files) > 0: - print(" * Available T1w(s): {}".format(query_files)) - t1_available = True - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, - session=sessions[0], - suffix="T2w", - extensions=["nii", "nii.gz"], - ) - ] - if len(query_files) > 0: - print(" * Available T2w(s): {}".format(query_files)) - t2_available = True - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, - session=sessions[0], - suffix="dwi", - extensions=["nii", "nii.gz"], - ) - ] - if len(query_files) > 0: - print(" * Available DWI(s): {}".format(query_files)) - diffusion_available = True - - else: - print( - f" ... Check for available input modalities for subject {subject}..." - ) - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, suffix="T1w", extensions=["nii", "nii.gz"] - ) - ] - if len(query_files) > 0: - print(" * Available T1w(s): {}".format(query_files)) - t1_available = True - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, suffix="T2w", extensions=["nii", "nii.gz"] - ) - ] - if len(query_files) > 0: - print(" * Available T2w(s): {}".format(query_files)) - t2_available = True - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, suffix="dwi", extensions=["nii", "nii.gz"] - ) - ] - if len(query_files) > 0: - print(" * Available DWI(s): {}".format(query_files)) - diffusion_available = True - - query_files = [ - f.filename - for f in bids_layout.get( - subject=subject, suffix="bold", extensions=["nii", "nii.gz"] - ) - ] - if len(query_files) > 0: - print(" * Available BOLD(s): {}".format(query_files)) - fmri_available = True - except ValueError as e: - msg = str(e) - error(message=msg, title="BIDS error") - except Exception: - error(message="Invalid BIDS dataset. Please see documentation for more details.", - title="BIDS error") - return - - ui_info.ui.context["object"].project_info = loaded_project - - anat_inputs_checked = False - if t1_available: - anat_inputs_checked = True - - dmri_inputs_checked = False - if t1_available and diffusion_available: - dmri_inputs_checked = True - - if t2_available and debug: - print("T2 available") - - fmri_inputs_checked = False - if t1_available and fmri_available: - fmri_inputs_checked = True - if debug: - print("fmri input check : {}".format(fmri_inputs_checked)) - - self.anat_inputs_checked = anat_inputs_checked - self.dmri_inputs_checked = dmri_inputs_checked - self.fmri_inputs_checked = fmri_inputs_checked - - if anat_inputs_checked: - - self.anat_pipeline = anatomical_pipeline.AnatomicalPipelineUI( - loaded_project - ) - self.anat_pipeline.number_of_cores = loaded_project.number_of_cores - - code_directory = os.path.join(loaded_project.base_directory, "code") - - anat_config_file = os.path.join( - code_directory, "ref_anatomical_config.json" - ) - - # Check for old configuration file with INI format - # when there is no existing json configuration file - # and convert it to JSON format if so - if not os.path.isfile(anat_config_file): - anat_config_ini_file = os.path.join( - code_directory, "ref_anatomical_config.ini" - ) - if os.path.isfile(anat_config_ini_file): - anat_config_file = convert_config_ini_2_json( - anat_config_ini_file - ) - - loaded_project.anat_config_file = anat_config_file - - if self.anat_pipeline is not None and not os.path.isfile( - anat_config_file - ): - if not os.path.exists(code_directory): - try: - os.makedirs(code_directory) - except os.error: - print_warning("%s was already existing" % code_directory) - finally: - print("Created directory %s" % code_directory) - - print(">> Create new reference anatomical config file...") - anat_save_config( - self.anat_pipeline, loaded_project.anat_config_file - ) - else: - print(">> Load reference anatomical config file...") - # if datalad_is_available: - # print('... Datalad get anatomical config file : {}'.format(loaded_project.anat_config_file)) - # cmd = 'datalad run -m "Get reference anatomical config file" bash -c "datalad get code/ref_anatomical_config.json"' - # try: - # print('... cmd: {}'.format(cmd)) - # core.run( cmd, env={}, cwd=os.path.abspath(loaded_project.base_directory)) - # except Exception: - # print(" ERROR: Failed to get file") - - anat_load_config_json( - self.anat_pipeline, loaded_project.anat_config_file - ) - - self.anat_pipeline.config_file = loaded_project.anat_config_file - - ui_info.ui.context["object"].anat_pipeline = self.anat_pipeline - loaded_project.t1_available = self.anat_inputs_checked - - loaded_project.parcellation_scheme = self.anat_pipeline.stages[ - "Parcellation" - ].config.parcellation_scheme - loaded_project.freesurfer_subjects_dir = self.anat_pipeline.stages[ - "Segmentation" - ].config.freesurfer_subjects_dir - loaded_project.freesurfer_subject_id = self.anat_pipeline.stages[ - "Segmentation" - ].config.freesurfer_subject_id - - ui_info.ui.context["object"].project_info = loaded_project - - self.project_loaded = True - - if dmri_inputs_checked: - self.dmri_pipeline = diffusion_pipeline.DiffusionPipelineUI( - loaded_project - ) - self.dmri_pipeline.number_of_cores = loaded_project.number_of_cores - self.dmri_pipeline.parcellation_scheme = ui_info.ui.context[ - "object" - ].project_info.parcellation_scheme - - code_directory = os.path.join(loaded_project.base_directory, "code") - dmri_config_file = os.path.join( - code_directory, "ref_diffusion_config.json" - ) - - # Check for old configuration file with INI format - # when there is no existing json configuration file - # and convert it to JSON format if so - if not os.path.isfile(dmri_config_file): - dmri_config_ini_file = os.path.join( - code_directory, "ref_diffusion_config.ini" - ) - if os.path.isfile(dmri_config_ini_file): - dmri_config_file = convert_config_ini_2_json( - dmri_config_ini_file - ) - - loaded_project.dmri_config_file = dmri_config_file - self.dmri_pipeline.config_file = dmri_config_file - - if ( - not os.path.isfile(dmri_config_file) - and self.dmri_pipeline is not None - ): - - # Look for diffusion acquisition model information from filename (acq-*) - if loaded_project.subject_session != "": - session = loaded_project.subject_session.split("-")[1] - diffusion_imaging_models = [ - i - for i in bids_layout.get( - subject=subject, - session=session, - suffix="dwi", - target="acquisition", - return_type="id", - extensions=["nii", "nii.gz"], - ) - ] - if debug: - print( - "DIFFUSION IMAGING MODELS : {}".format( - diffusion_imaging_models - ) - ) - - if len(diffusion_imaging_models) > 0: - if len(diffusion_imaging_models) > 1: - loaded_project.dmri_bids_acqs = ( - diffusion_imaging_models - ) - loaded_project.configure_traits( - view="dmri_bids_acq_view" - ) - else: - loaded_project.dmri_bids_acqs = [ - "{}".format(diffusion_imaging_models[0]) - ] - loaded_project.dmri_bids_acq = ( - diffusion_imaging_models[0] - ) - - if ("dsi" in loaded_project.dmri_bids_acq) or ( - "DSI" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = "DSI" - elif ("dti" in loaded_project.dmri_bids_acq) or ( - "DTI" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = "DTI" - elif ("hardi" in loaded_project.dmri_bids_acq) or ( - "HARDI" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = "HARDI" - elif ("multishell" in loaded_project.dmri_bids_acq) or ( - "MULTISHELL" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = ( - "multishell" - ) - else: - loaded_project.diffusion_imaging_model = "DTI" - else: - loaded_project.dmri_bids_acqs = [""] - loaded_project.dmri_bids_acq = "" - loaded_project.configure_traits( - view="diffusion_imaging_model_select_view" - ) - - files = [ - f.filename - for f in bids_layout.get( - subject=subject, - session=session, - suffix="dwi", - extensions=["nii", "nii.gz"], - ) - ] - - if debug: - print("****************************************") - print(files) - print("****************************************") - - if loaded_project.dmri_bids_acq != "": - for file in files: - if loaded_project.dmri_bids_acq in file: - dwi_file = file - if debug: - print( - "Loaded DWI file: {}".format(dwi_file) - ) - break - else: - dwi_file = files[0] - else: - diffusion_imaging_models = [ - i - for i in bids_layout.get( - subject=subject, - suffix="dwi", - target="acquisition", - return_type="id", - extensions=["nii", "nii.gz"], - ) - ] - - if len(diffusion_imaging_models) > 0: - if len(diffusion_imaging_models) > 1: - loaded_project.dmri_bids_acqs = ( - diffusion_imaging_models - ) - loaded_project.configure_traits( - view="dmri_bids_acq_view" - ) - else: - loaded_project.dmri_bids_acq = ( - diffusion_imaging_models[0] - ) - - if ("dsi" in loaded_project.dmri_bids_acq) or ( - "DSI" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = "DSI" - elif ("dti" in loaded_project.dmri_bids_acq) or ( - "DTI" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = "DTI" - elif ("hardi" in loaded_project.dmri_bids_acq) or ( - "HARDI" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = "HARDI" - elif ("multishell" in loaded_project.dmri_bids_acq) or ( - "MULTISHELL" in loaded_project.dmri_bids_acq - ): - loaded_project.diffusion_imaging_model = ( - "multishell" - ) - else: - loaded_project.diffusion_imaging_model = "DTI" - else: - loaded_project.dmri_bids_acqs = [""] - loaded_project.dmri_bids_acq = "" - loaded_project.configure_traits( - view="diffusion_imaging_model_select_view" - ) - - self.dmri_pipeline.diffusion_imaging_model = ( - loaded_project.diffusion_imaging_model - ) - self.dmri_pipeline.global_conf.diffusion_imaging_model = ( - loaded_project.diffusion_imaging_model - ) - self.dmri_pipeline.global_conf.dmri_bids_acq = ( - loaded_project.dmri_bids_acq - ) - self.dmri_pipeline.stages[ - "Diffusion" - ].diffusion_imaging_model = ( - loaded_project.diffusion_imaging_model - ) - print(">> Create new reference diffusion config file...") - dmri_save_config(self.dmri_pipeline, dmri_config_file) - else: - print(">> Load reference diffusion config file...") - - # if datalad_is_available: - # print('... Datalad get reference diffusion config file : {}'.format(loaded_project.anat_config_file)) - # cmd = 'datalad run -m "Get reference anatomical config file" bash -c "datalad get code/ref_diffusion_config.json"' - # try: - # print('... cmd: {}'.format(cmd)) - # core.run( cmd, env={}, cwd=os.path.abspath(loaded_project.base_directory)) - # except Exception: - # print(" ERROR: Failed to get file") - - dmri_load_config_json( - self.dmri_pipeline, loaded_project.dmri_config_file - ) - # TODO: check if diffusion imaging model (DSI/DTI/HARDI/multishell) is correct/valid. - - ui_info.ui.context["object"].dmri_pipeline = self.dmri_pipeline - loaded_project.dmri_available = self.dmri_inputs_checked - - ui_info.ui.context["object"].project_info = loaded_project - - self.project_loaded = True - - if fmri_inputs_checked: - self.fmri_pipeline = fMRI_pipeline.fMRIPipelineUI(loaded_project) - self.fmri_pipeline.number_of_cores = loaded_project.number_of_cores - self.fmri_pipeline.parcellation_scheme = ui_info.ui.context[ - "object" - ].project_info.parcellation_scheme - - self.fmri_pipeline.stages["Registration"].pipeline_mode = "fMRI" - self.fmri_pipeline.stages[ - "Registration" - ].registration_mode = "FSL (Linear)" - self.fmri_pipeline.stages[ - "Registration" - ].registration_mode_trait = ["FSL (Linear)", "BBregister (FS)"] - - code_directory = os.path.join(loaded_project.base_directory, "code") - fmri_config_file = os.path.join( - code_directory, "ref_fMRI_config.json" - ) - - # Check for old configuration file with INI format - # when there is no existing json configuration file - # and convert it to JSON format if so - if not os.path.isfile(fmri_config_file): - fmri_config_ini_file = os.path.join( - code_directory, "ref_fMRI_config.ini" - ) - if os.path.isfile(fmri_config_ini_file): - fmri_config_file = convert_config_ini_2_json( - fmri_config_ini_file - ) - - loaded_project.fmri_config_file = fmri_config_file - self.fmri_pipeline.config_file = fmri_config_file - - if ( - not os.path.isfile(fmri_config_file) - and self.fmri_pipeline is not None - ): - print(">> Create new reference fMRI config file...") - fmri_save_config(self.fmri_pipeline, fmri_config_file) - else: - print(">> Load reference fMRI config file...") - - # if datalad_is_available: - # print('... Datalad get reference fMRI config file : {}'.format(loaded_project.anat_config_file)) - # cmd = 'datalad run -m "Get reference fMRI config file" bash -c "datalad get code/ref_fMRI_config.json"' - # try: - # print('... cmd: {}'.format(cmd)) - # core.run( cmd, env={}, cwd=os.path.abspath(loaded_project.base_directory)) - # except Exception: - # print(" ERROR: Failed to get file") - - fmri_load_config_json( - self.fmri_pipeline, loaded_project.fmri_config_file - ) - - ui_info.ui.context["object"].fmri_pipeline = self.fmri_pipeline - loaded_project.fmri_available = self.fmri_inputs_checked - - ui_info.ui.context["object"].project_info = loaded_project - - self.project_loaded = True - - -class CMP_BIDSAppWindowHandler(Handler): - """Event handler of the BIDS App Interface window. - - Attributes - ---------- - docker_process : subprocess.Popen - Instance of ``subprocess.Popen`` where BIDS App docker image is run - """ - - docker_process = Instance(Popen) - - def check_settings(self, ui_info): - """Function that checks if all parameters are properly set before execution of the BIDS App. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with ``self`` - """ - ui_info.ui.context["object"].settings_checked = True - ui_info.ui.context["object"].handler = self - - if os.path.isdir(ui_info.ui.context["object"].bids_root): - print( - "BIDS root directory : {}".format( - ui_info.ui.context["object"].bids_root - ) - ) - else: - print_error("Error: BIDS root invalid!") - ui_info.ui.context["object"].settings_checked = False - - if os.path.isfile(ui_info.ui.context["object"].anat_config): - print( - "Anatomical configuration file : {}".format( - ui_info.ui.context["object"].anat_config - ) - ) - else: - print_error( - "Error: Configuration file for anatomical pipeline not existing!" - ) - ui_info.ui.context["object"].settings_checked = False - - if os.path.isfile(ui_info.ui.context["object"].dmri_config): - print( - "Diffusion configuration file : {}".format( - ui_info.ui.context["object"].dmri_config - ) - ) - else: - print_warning( - "Warning: Configuration file for diffusion pipeline not existing!" - ) - - if os.path.isfile(ui_info.ui.context["object"].fmri_config): - print( - "fMRI configuration file : {}".format( - ui_info.ui.context["object"].fmri_config - ) - ) - else: - print_warning("Warning: Configuration file for fMRI pipeline not existing!") - - if os.path.isfile(ui_info.ui.context["object"].fs_license): - print( - "Freesurfer license : {}".format( - ui_info.ui.context["object"].fs_license - ) - ) - else: - print_error( - "Error: Invalid Freesurfer license ({})!".format( - ui_info.ui.context["object"].fs_license - ) - ) - ui_info.ui.context["object"].settings_checked = False - - msg = f'Valid inputs for BIDS App : {ui_info.ui.context["object"].settings_checked}' - if ui_info.ui.context["object"].settings_checked: - print(msg) - else: - print_error(msg) - - print("Docker running ? {}".format(ui_info.ui.context["object"].docker_running)) - return True - - @classmethod - def start_bidsapp_process(ui_info, participant_label): - """Function that runs the BIDS App on a single subject. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with this handler - participant_label : string - Label of the participant / subject (e.g. ``"01"``, no "sub-" prefix) - """ - cmd = [ - "docker", - "run", - "-it", - "--rm", - "-v", - "{}:/bids_dataset".format(ui_info.ui.context["object"].bids_root), - "-v", - "{}/derivatives:/outputs".format(ui_info.ui.context["object"].bids_root), - # '-v', '{}:/bids_dataset/derivatives/freesurfer/fsaverage'.format(ui_info.ui.context["object"].fs_average), - "-v", - "{}:/opt/freesurfer/license.txt".format( - ui_info.ui.context["object"].fs_license + Item("fmri_available", style="readonly", label="BOLD", resizable=True), + label="Modalities", + ), + spring, + Group( + Item( + "anat_last_date_processed", + label="Anatomical pipeline", + style="readonly", + resizable=True, + enabled_when="t1_available", ), - "-v", - "{}:/code/ref_anatomical_config.json".format( - ui_info.ui.context["object"].anat_config + Item( + "dmri_last_date_processed", + label="Diffusion pipeline", + style="readonly", + resizable=True, + enabled_when="dmri_available", ), - ] - - if ui_info.ui.context["object"].run_dmri_pipeline: - cmd.append("-v") - cmd.append( - "{}:/code/ref_diffusion_config.json".format( - ui_info.ui.context["object"].dmri_config - ) - ) - - if ui_info.ui.context["object"].run_fmri_pipeline: - cmd.append("-v") - cmd.append( - "{}:/code/ref_fMRI_config.json".format( - ui_info.ui.context["object"].fmri_config - ) - ) - - cmd.append("-u") - cmd.append("{}:{}".format(os.geteuid(), os.getegid())) - - cmd.append("sebastientourbier/connectomemapper-bidsapp:latest") - cmd.append("/bids_dataset") - cmd.append("/outputs") - cmd.append("participant") - - cmd.append("--participant_label") - cmd.append("{}".format(participant_label)) - - cmd.append("--anat_pipeline_config") - cmd.append("/code/ref_anatomical_config.json") - - if ui_info.ui.context["object"].run_dmri_pipeline: - cmd.append("--dwi_pipeline_config") - cmd.append("/code/ref_diffusion_config.json") - - if ui_info.ui.context["object"].run_fmri_pipeline: - cmd.append("--func_pipeline_config") - cmd.append("/code/ref_fMRI_config.json") - - print_blue(" ".join(cmd)) - - log_filename = os.path.join( - ui_info.ui.context["object"].bids_root, - "derivatives/cmp", - "sub-{}_log-cmpbidsapp.txt".format(participant_label), - ) - - with open(log_filename, "w+") as log: - proc = Popen(cmd, stdout=log, stderr=log) - - return proc - - @classmethod - def manage_bidsapp_procs(self, proclist): - """Function that managed the parallelized BIDS App Popen process. - - Parameters - ---------- - proclist - List of ``Popen`` processes running the BIDS App on a single subject - """ - for proc in proclist: - if proc.poll() is not None: - proclist.remove(proc) - - def start_bids_app(self, ui_info): - """Main function that runs the BIDS App on a set or sub-set of participants. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with this handler - """ - print("[Start BIDS App]") - - maxprocs = multiprocessing.cpu_count() - processes = [] - - ui_info.ui.context["object"].docker_running = True - - for label in ui_info.ui.context["object"].list_of_subjects_to_be_processed: - while len(processes) == maxprocs: - self.manage_bidsapp_procs(processes) - - proc = self.start_bidsapp_process(ui_info, label=label) - processes.append(proc) - - while len(processes) > 0: - self.manage_bidsapp_procs(processes) - - print("Processing with BIDS App Finished") - - ui_info.ui.context["object"].docker_running = False - - return True - - @classmethod - def stop_bids_app(ui_info): - """Function that stops the BIDS execution. - - Parameters - ---------- - ui_info : QtView - TraitsUI QtView associated with this handler - """ - print("Stop BIDS App") - # self.docker_process.kill() - ui_info.ui.context["object"].docker_running = False - return True + Item( + "fmri_last_date_processed", + label="fMRI pipeline", + style="readonly", + resizable=True, + enabled_when="fmri_available", + ), + label="Last date processed", + ), + spring, + Group( + Item("number_of_cores", resizable=True), label="Processing configuration" + ), + "550", + spring, + springy=True, + ) + + traits_view = QtView(Include("dataset_view")) + + create_view = View( + Item("creation_mode", style="custom"), + Group( + Group( + Item("base_directory", label="BIDS Dataset"), + visible_when='creation_mode=="Load BIDS dataset"', + ), + Group( + Item("install_datalad_dataset_via_ssh"), + visible_when='creation_mode=="Install Datalad/BIDS dataset"', + ), + Group( + Item( + "ssh_remote", + label="Remote ssh server", + visible_when="install_datalad_dataset_via_ssh", + ), + Item( + "ssh_user", + label="Remote username", + visible_when="install_datalad_dataset_via_ssh", + ), + Item( + "ssh_pwd", + label="Remote password", + visible_when="install_datalad_dataset_via_ssh", + ), + Item( + "datalad_dataset_path", + label="Datalad/BIDS Dataset Path/URL to be installed", + ), + Item("base_directory", label="Installation directory"), + visible_when='creation_mode=="Install Datalad/BIDS dataset"', + ), + ), + kind="livemodal", + title="Data creation: BIDS dataset selection", + # style_sheet=style_sheet, + width=modal_width, + buttons=["OK", "Cancel"], + ) + + subject_view = View( + Group(Item("subject", label="Selected Subject")), + kind="modal", + title="Subject and session selection", + # style_sheet=style_sheet, + width=modal_width, + buttons=["OK", "Cancel"], + ) + + subject_session_view = View( + Group( + Item("subject", style="readonly", label="Selected Subject"), + Item("subject_session", label="Selected Session"), + ), + kind="modal", + title="Session selection", + # style_sheet=style_sheet, + width=modal_width, + buttons=["OK", "Cancel"], + ) + + dmri_bids_acq_view = View( + Group( + Item("dmri_bids_acq", label="Selected model"), + ), + kind="modal", + title="Selection of diffusion acquisition model", + # style_sheet=style_sheet, + width=modal_width, + buttons=["OK", "Cancel"], + ) + + anat_warning_view = View( + Group( + Item("anat_warning_msg", style="readonly", show_label=False), + ), + title="Warning : Anatomical T1w data", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + anat_config_error_view = View( + Group( + Item("anat_config_error_msg", style="readonly", show_label=False), + ), + title="Error", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + dmri_warning_view = View( + Group( + Item("dmri_warning_msg", style="readonly", show_label=False), + ), + title="Warning : Diffusion MRI data", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + dmri_config_error_view = View( + Group( + Item("dmri_config_error_msg", style="readonly", show_label=False), + ), + title="Error", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + fmri_warning_view = View( + Group( + Item("fmri_warning_msg", style="readonly", show_label=False), + ), + title="Warning : fMRI data", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + fmri_config_error_view = View( + Group( + Item("fmri_config_error_msg", style="readonly", show_label=False), + ), + title="Error", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + open_view = View( + Item("creation_mode", label="Mode"), + Group( + Item("install_datalad_dataset_via_ssh"), + Item( + "ssh_remote", + label="Remote ssh server", + visible_when="install_datalad_dataset_via_ssh", + ), + Item( + "ssh_user", + label="Remote username", + visible_when="install_datalad_dataset_via_ssh", + ), + Item( + "ssh_pwd", + label="Remote password", + visible_when="install_datalad_dataset_via_ssh", + ), + Item( + "datalad_dataset_path", + label="Datalad/BIDS Dataset Path/URL to be installed", + ), + Item("base_directory", label="Installation directory"), + visible_when='creation_mode=="Install Datalad BIDS dataset"', + ), + Group( + Item("base_directory", label="BIDS Dataset"), + visible_when='creation_mode=="Load BIDS dataset"', + ), + kind="livemodal", + title="BIDS Dataset Creation/Loading", + # style_sheet=style_sheet, + width=600, + height=250, + buttons=["OK", "Cancel"], + ) + + anat_select_config_to_load = View( + Group( + Item("anat_config_to_load_msg", style="readonly", show_label=False), + Item( + "anat_config_to_load", + style="custom", + editor=EnumEditor(name="anat_available_config"), + show_label=False, + ), + ), + title="Select configuration for anatomical pipeline", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + anat_custom_map_view = View( + Group( + Item( + "anat_custom_last_stage", + editor=EnumEditor(name="anat_stage_names"), + style="custom", + show_label=False, + ), + ), + title="Select until which stage to process the anatomical pipeline.", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + diffusion_imaging_model_select_view = View( + Group( + Item("diffusion_imaging_model", label="Diffusion MRI modality"), + ), + title="Please select diffusion MRI modality", + kind="modal", + width=modal_width, + buttons=["OK", "Cancel"], + ) + + dmri_select_config_to_load = View( + Group( + Item("dmri_config_to_load_msg", style="readonly", show_label=False), + ), + Item( + "dmri_config_to_load", + style="custom", + editor=EnumEditor(name="dmri_available_config"), + show_label=False, + ), + title="Select configuration for diffusion pipeline", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + dmri_custom_map_view = View( + Group( + Item( + "dmri_custom_last_stage", + editor=EnumEditor(name="dmri_stage_names"), + style="custom", + show_label=False, + ), + ), + title="Select until which stage to process the diffusion pipeline.", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + fmri_select_config_to_load = View( + Group( + Item("fmri_config_to_load_msg", style="readonly", show_label=False), + ), + Item( + "fmri_config_to_load", + style="custom", + editor=EnumEditor(name="fmri_available_config"), + show_label=False, + ), + title="Select configuration for fMRI pipeline", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + fmri_custom_map_view = View( + Group( + Item( + "fmri_custom_last_stage", + editor=EnumEditor(name="fmri_stage_names"), + style="custom", + show_label=False, + ), + ), + title="Select until which stage to process the fMRI pipeline.", + kind="modal", + width=modal_width, + # style_sheet=style_sheet, + buttons=["OK", "Cancel"], + ) + + def _summary_view_button_fired(self): + self.configure_traits(view="pipeline_processing_summary_view") \ No newline at end of file diff --git a/cmp/cli/cmpbidsappmanager.py b/cmp/cli/cmpbidsappmanager.py index d1449b0ff..10f5cbf99 100644 --- a/cmp/cli/cmpbidsappmanager.py +++ b/cmp/cli/cmpbidsappmanager.py @@ -5,23 +5,20 @@ # This software is distributed under the open-source license Modified BSD. """This module defines the `cmpbidsappmanager` script that launches the Graphical User Interface.""" -from traits.etsconfig.api import ETSConfig +import sys # Setup Qt5/Pyside2 backend for traitsui -ETSConfig.toolkit = "qt" - +from traits.etsconfig.api import ETSConfig +ETSConfig.toolkit = "qt" # pylint: disable=E402 # noqa import os - -os.environ["ETS_TOOLKIT"] = "qt" +os.environ["ETS_TOOLKIT"] = "qt" # pylint: disable=E402 # noqa # os.environ['QT_API'] = 'pyqt5' -os.environ["QT_API"] = "pyside2" - -import sys +os.environ["QT_API"] = "pyside2" # pylint: disable=E402 # noqa # CMP imports -from cmp.bidsappmanager import gui from cmp.info import __version__, __copyright__ from cmtklib.util import print_warning +import cmp.bidsappmanager.gui.principal def info(): @@ -79,7 +76,7 @@ def main(): argc = len(sys.argv) if argc == 1: # no args, launch the GUI - mw = gui.CMP_MainWindow() + mw = cmp.bidsappmanager.gui.principal.MainWindow() _ = mw.configure_traits() exit_code = 0 else: diff --git a/cmp/cli/connectomemapper3.py b/cmp/cli/connectomemapper3.py index 1e73e464b..4d6626876 100644 --- a/cmp/cli/connectomemapper3.py +++ b/cmp/cli/connectomemapper3.py @@ -184,7 +184,7 @@ def main(): # Version and copyright message info() - project = cmp.project.CMP_Project_Info() + project = cmp.project.ProjectInfo() project.base_directory = os.path.abspath(args.bids_dir) project.output_directory = os.path.abspath(args.output_dir) project.subjects = ["{}".format(args.participant_label)] diff --git a/cmp/project.py b/cmp/project.py index 34e839ad2..6f65c37d4 100644 --- a/cmp/project.py +++ b/cmp/project.py @@ -46,7 +46,7 @@ ) -class CMP_Project_Info(HasTraits): +class ProjectInfo(HasTraits): """Class used to store all properties of a processing project. Attributes @@ -323,7 +323,7 @@ def init_dmri_project(project_info, bids_layout, is_new_project, gui=True, debug Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of ``cmp.project.CMP_Project_Info`` object bids_layout : bids.BIDSLayout @@ -434,7 +434,7 @@ def init_fmri_project(project_info, bids_layout, is_new_project, gui=True, debug Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of ``cmp.project.CMP_Project_Info`` object bids_layout : bids.BIDSLayout @@ -542,7 +542,7 @@ def init_anat_project(project_info, is_new_project, debug=False): Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of ``cmp.project.CMP_Project_Info`` object is_new_project : bool @@ -638,7 +638,7 @@ def update_anat_last_processed(project_info, pipeline): Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of `CMP_Project_Info` object pipeline : cmp.pipelines.anatomical.anatomical.AnatomicalPipeline @@ -698,7 +698,7 @@ def update_dmri_last_processed(project_info, pipeline): Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of `CMP_Project_Info` object pipeline : cmp.pipelines.diffusion.diffusion.DiffusionPipeline @@ -754,7 +754,7 @@ def update_fmri_last_processed(project_info, pipeline): Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of `CMP_Project_Info` object pipeline : cmp.pipelines.functional.fMRI.fMRIPipeline @@ -843,7 +843,7 @@ def run_individual( number_of_threads : int Number of threads used by programs relying on the OpenMP library """ - project = CMP_Project_Info() + project = ProjectInfo() project.base_directory = os.path.abspath(bids_dir) project.output_directory = os.path.abspath(output_dir) project.subjects = ["{}".format(participant_label)] diff --git a/run.py b/run.py index 8cc197c76..0bfb8e239 100644 --- a/run.py +++ b/run.py @@ -37,7 +37,7 @@ __freesurfer_directory__, __nipype_directory__ ) -from cmp.project import CMP_Project_Info, run_individual +from cmp.project import ProjectInfo, run_individual warnings.filterwarnings("ignore", message="numpy.dtype size changed") warnings.filterwarnings("ignore", message="numpy.ufunc size changed") @@ -84,7 +84,7 @@ def create_cmp_command(project, run_anat, run_dmri, run_fmri, number_of_threads= Parameters ---------- - project : cmp.project.CMP_Project_Info + project : cmp.project.ProjectInfo Instance of `cmp.project.CMP_Project_Info` run_anat : bool @@ -424,7 +424,7 @@ def run(command, env={}, log_filename={}): # find all T1s and skullstrip them for subject_label in subjects_to_analyze: - project = CMP_Project_Info() + project = ProjectInfo() project.base_directory = args.bids_dir project.output_directory = args.output_dir diff --git a/setup.py b/setup.py index 004b0ae5a..179faf382 100644 --- a/setup.py +++ b/setup.py @@ -48,6 +48,7 @@ def run(self): "cmp.pipelines.diffusion", "cmp.pipelines.functional", "cmp.bidsappmanager", + "cmp.bidsappmanager.gui", "cmp.bidsappmanager.stages", "cmp.bidsappmanager.stages.preprocessing", "cmp.bidsappmanager.stages.segmentation", @@ -130,7 +131,7 @@ def main(): """Main function of CMP3 ``setup.py``""" # Setup configuration setuptools.setup( - name="cmp", + name="connectomemapper", version=__version__, description="Connectome Mapper 3: A Flexible and Open-Source Pipeline Software for Multiscale Multimodal Human Connectome Mapping", long_description=long_description, diff --git a/setup_pypi.py b/setup_pypi.py index 70b1d54ba..64115d9b8 100644 --- a/setup_pypi.py +++ b/setup_pypi.py @@ -48,6 +48,7 @@ def run(self): "cmp.pipelines.diffusion", "cmp.pipelines.functional", "cmp.bidsappmanager", + "cmp.bidsappmanager.gui", "cmp.bidsappmanager.stages", "cmp.bidsappmanager.stages.preprocessing", "cmp.bidsappmanager.stages.segmentation", @@ -120,7 +121,7 @@ def main(): """Main function of CMP3 ``setup.py``""" # Setup configuration setuptools.setup( - name="cmp", + name="connectomemapper", version=__version__, description="Connectome Mapper 3: A Flexible and Open-Source Pipeline Software for Multiscale Multimodal Human Connectome Mapping", long_description=long_description, From 245e945972637cf093fb5d2d0935fa730af81e1b Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 15:59:10 +0100 Subject: [PATCH 02/15] STY: Correct class names to be MixedCased --- .../stages/diffusion/reconstruction.py | 14 +++--- .../stages/diffusion/tracking.py | 14 +++--- cmp/pipelines/anatomical/anatomical.py | 15 ++----- cmp/pipelines/diffusion/diffusion.py | 6 +-- cmp/pipelines/functional/fMRI.py | 6 +-- cmp/project.py | 13 +++--- cmp/stages/connectome/connectome.py | 2 +- cmp/stages/connectome/fmri_connectome.py | 2 +- cmp/stages/diffusion/diffusion.py | 24 +++++----- cmp/stages/diffusion/reconstruction.py | 8 ++-- cmp/stages/diffusion/tracking.py | 8 ++-- cmp/stages/functional/functionalMRI.py | 4 +- .../preprocessing/fmri_preprocessing.py | 4 +- cmtklib/config.py | 18 ++++---- cmtklib/connectome.py | 28 ++++++------ cmtklib/diffusion.py | 26 +++++------ cmtklib/functionalMRI.py | 44 +++++++++---------- cmtklib/process.py | 2 +- 18 files changed, 115 insertions(+), 123 deletions(-) diff --git a/cmp/bidsappmanager/stages/diffusion/reconstruction.py b/cmp/bidsappmanager/stages/diffusion/reconstruction.py index c48e3d1a9..805c4941c 100644 --- a/cmp/bidsappmanager/stages/diffusion/reconstruction.py +++ b/cmp/bidsappmanager/stages/diffusion/reconstruction.py @@ -15,12 +15,12 @@ from traitsui.api import * # Own imports -from cmp.stages.diffusion.reconstruction import Dipy_recon_config, MRtrix_recon_config +from cmp.stages.diffusion.reconstruction import DipyReconConfig, MRtrixReconConfig # Reconstruction configuration -class Dipy_recon_configUI(Dipy_recon_config): - """Class that extends the :class:`Dipy_recon_config` with graphical components. +class Dipy_recon_configUI(DipyReconConfig): + """Class that extends the :class:`DipyReconConfig` with graphical components. Attributes ---------- @@ -34,7 +34,7 @@ class Dipy_recon_configUI(Dipy_recon_config): See also --------- - cmp.stages.diffusion.reconstruction.Dipy_recon_config + cmp.stages.diffusion.reconstruction.DipyReconConfig """ flip_table_axis = List(editor=CheckListEditor(values=["x", "y", "z"], cols=3)) @@ -84,8 +84,8 @@ class Dipy_recon_configUI(Dipy_recon_config): ) -class MRtrix_recon_configUI(MRtrix_recon_config): - """Class that extends the :class:`MRtrix_recon_config` with graphical components. +class MRtrix_recon_configUI(MRtrixReconConfig): + """Class that extends the :class:`MRtrixReconConfig` with graphical components. Attributes ---------- @@ -99,7 +99,7 @@ class MRtrix_recon_configUI(MRtrix_recon_config): See also --------- - cmp.stages.diffusion.reconstruction.MRtrix_recon_config + cmp.stages.diffusion.reconstruction.MRtrixReconConfig """ flip_table_axis = List(editor=CheckListEditor(values=["x", "y", "z"], cols=3)) diff --git a/cmp/bidsappmanager/stages/diffusion/tracking.py b/cmp/bidsappmanager/stages/diffusion/tracking.py index 86912464a..5b583b491 100644 --- a/cmp/bidsappmanager/stages/diffusion/tracking.py +++ b/cmp/bidsappmanager/stages/diffusion/tracking.py @@ -9,11 +9,11 @@ from traits.api import * from traitsui.api import * -from cmp.stages.diffusion.tracking import Dipy_tracking_config, MRtrix_tracking_config +from cmp.stages.diffusion.tracking import DipyTrackingConfig, MRtrixTrackingConfig -class Dipy_tracking_configUI(Dipy_tracking_config): - """Class that extends the :class:`Dipy_tracking_config` with graphical components. +class Dipy_tracking_configUI(DipyTrackingConfig): + """Class that extends the :class:`DipyTrackingConfig` with graphical components. Attributes ---------- @@ -23,7 +23,7 @@ class Dipy_tracking_configUI(Dipy_tracking_config): See also --------- - cmp.stages.diffusion.tracking.Dipy_tracking_config + cmp.stages.diffusion.tracking.DipyTrackingConfig """ traits_view = View( @@ -52,8 +52,8 @@ class Dipy_tracking_configUI(Dipy_tracking_config): ) -class MRtrix_tracking_configUI(MRtrix_tracking_config): - """Class that extends the :class:`MRtrix_tracking_config` with graphical components. +class MRtrix_tracking_configUI(MRtrixTrackingConfig): + """Class that extends the :class:`MRtrixTrackingConfig` with graphical components. Attributes ---------- @@ -63,7 +63,7 @@ class MRtrix_tracking_configUI(MRtrix_tracking_config): See also --------- - cmp.stages.diffusion.reconstruction.MRtrix_tracking_config + cmp.stages.diffusion.reconstruction.MRtrixTrackingConfig """ traits_view = View( diff --git a/cmp/pipelines/anatomical/anatomical.py b/cmp/pipelines/anatomical/anatomical.py index d5c1f7eb9..bba4bdb82 100644 --- a/cmp/pipelines/anatomical/anatomical.py +++ b/cmp/pipelines/anatomical/anatomical.py @@ -28,7 +28,7 @@ from cmp.stages.parcellation.parcellation import ParcellationStage -class Global_Configuration(HasTraits): +class GlobalConfiguration(HasTraits): """Global pipeline configurations. Attributes @@ -52,15 +52,6 @@ class Global_Configuration(HasTraits): subject_session = Str -class Check_Input_Notification(HasTraits): - message = Str - diffusion_imaging_model_options = List(["DSI", "DTI", "HARDI"]) - diffusion_imaging_model = Str - diffusion_imaging_model_message = Str( - "\nMultiple diffusion inputs available. Please select desired diffusion modality." - ) - - class AnatomicalPipeline(cmp_common.Pipeline): """Class that extends a :class:`Pipeline` and represents the processing pipeline for structural MRI. @@ -84,7 +75,7 @@ class AnatomicalPipeline(cmp_common.Pipeline): derivatives_directory = Directory ordered_stage_list = ["Segmentation", "Parcellation"] custom_last_stage = Enum("Parcellation", ["Segmentation", "Parcellation"]) - global_conf = Global_Configuration() + global_conf = GlobalConfiguration() config_file = Str flow = Instance(pe.Workflow) @@ -93,7 +84,7 @@ def __init__(self, project_info): Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of `CMP_Project_Info` object. See Also diff --git a/cmp/pipelines/diffusion/diffusion.py b/cmp/pipelines/diffusion/diffusion.py index c78a4aa26..668ec52c3 100644 --- a/cmp/pipelines/diffusion/diffusion.py +++ b/cmp/pipelines/diffusion/diffusion.py @@ -24,7 +24,7 @@ from cmp.stages.registration.registration import RegistrationStage -class Global_Configuration(HasTraits): +class GlobalConfiguration(HasTraits): """Global pipeline configurations. Attributes @@ -90,7 +90,7 @@ class DiffusionPipeline(Pipeline): custom_atlas_name = Str custom_atlas_res = Str atlas_info = Dict() - global_conf = Global_Configuration() + global_conf = GlobalConfiguration() config_file = Str def __init__(self, project_info): @@ -98,7 +98,7 @@ def __init__(self, project_info): Parameters ---------- - project_info : cmp.project.CMP_Project_Info + project_info : cmp.project.ProjectInfo Instance of `CMP_Project_Info` object. See Also diff --git a/cmp/pipelines/functional/fMRI.py b/cmp/pipelines/functional/fMRI.py index d5e072607..4ff6b6fc5 100644 --- a/cmp/pipelines/functional/fMRI.py +++ b/cmp/pipelines/functional/fMRI.py @@ -21,7 +21,7 @@ from cmp.stages.registration.registration import RegistrationStage -class Global_Configuration(HasTraits): +class GlobalConfiguration(HasTraits): """Global pipeline configurations. Attributes @@ -74,7 +74,7 @@ class fMRIPipeline(Pipeline): "FunctionalMRI", "Connectome", ] - global_conf = Global_Configuration() + global_conf = GlobalConfiguration() config_file = Str parcellation_scheme = Str atlas_info = Dict() @@ -88,7 +88,7 @@ def __init__(self, project_info): Parameters ---------- - project_info: cmp.project.CMP_Project_Info + project_info: cmp.project.ProjectInfo Instance of `CMP_Project_Info` object. See Also diff --git a/cmp/project.py b/cmp/project.py index 6f65c37d4..8094c0122 100644 --- a/cmp/project.py +++ b/cmp/project.py @@ -19,15 +19,10 @@ from bids import BIDSLayout # Own imports -from cmtklib.bids.io import ( - __cmp_directory__, - __nipype_directory__, - __freesurfer_directory__ -) -from cmtklib.bids.utils import write_derivative_description from cmp.pipelines.anatomical import anatomical as Anatomical_pipeline from cmp.pipelines.diffusion import diffusion as Diffusion_pipeline from cmp.pipelines.functional import fMRI as FMRI_pipeline + from cmtklib.config import ( anat_load_config_json, anat_save_config, @@ -36,6 +31,12 @@ fmri_load_config_json, fmri_save_config, ) +from cmtklib.bids.io import ( + __cmp_directory__, + __nipype_directory__, + __freesurfer_directory__ +) +from cmtklib.bids.utils import write_derivative_description # Ignore some warnings warnings.filterwarnings( diff --git a/cmp/stages/connectome/connectome.py b/cmp/stages/connectome/connectome.py index 2f1a68643..3735c8560 100644 --- a/cmp/stages/connectome/connectome.py +++ b/cmp/stages/connectome/connectome.py @@ -135,7 +135,7 @@ def create_workflow(self, flow, inputnode, outputnode): Identity interface describing the outputs of the stage """ cmtk_cmat = pe.Node( - interface=cmtklib.connectome.CMTK_cmat(), name="compute_matrice" + interface=cmtklib.connectome.DmriCmat(), name="compute_matrice" ) cmtk_cmat.inputs.compute_curvature = self.config.compute_curvature cmtk_cmat.inputs.output_types = self.config.output_types diff --git a/cmp/stages/connectome/fmri_connectome.py b/cmp/stages/connectome/fmri_connectome.py index f8a707068..6d932f9f1 100644 --- a/cmp/stages/connectome/fmri_connectome.py +++ b/cmp/stages/connectome/fmri_connectome.py @@ -122,7 +122,7 @@ def create_workflow(self, flow, inputnode, outputnode): Identity interface describing the outputs of the stage """ cmtk_cmat = pe.Node( - interface=cmtklib.connectome.CMTK_rsfmri_cmat(), name="compute_matrice" + interface=cmtklib.connectome.RsfmriCmat(), name="compute_matrice" ) cmtk_cmat.inputs.output_types = self.config.output_types diff --git a/cmp/stages/diffusion/diffusion.py b/cmp/stages/diffusion/diffusion.py index bd591777e..c23e0a92b 100644 --- a/cmp/stages/diffusion/diffusion.py +++ b/cmp/stages/diffusion/diffusion.py @@ -82,10 +82,10 @@ class DiffusionConfig(HasTraits): See Also -------- - cmp.stages.diffusion.reconstruction.Dipy_recon_config - cmp.stages.diffusion.reconstruction.MRtrix_recon_config - cmp.stages.diffusion.tracking.Dipy_tracking_config - cmp.stages.diffusion.tracking.MRtrix_tracking_config + cmp.stages.diffusion.reconstruction.DipyReconConfig + cmp.stages.diffusion.reconstruction.MRtrixReconConfig + cmp.stages.diffusion.tracking.DipyTrackingConfig + cmp.stages.diffusion.tracking.MRtrixTrackingConfig cmp.stages.diffusion.diffusion.DiffusionStage """ @@ -111,20 +111,20 @@ class DiffusionConfig(HasTraits): def __init__(self): """Constructor of an :class:`cmp.stages.diffusion.diffusion.DiffusionConfig` object.""" - self.dipy_recon_config = Dipy_recon_config( + self.dipy_recon_config = DipyReconConfig( imaging_model=self.diffusion_imaging_model, recon_mode=self.diffusion_model, tracking_processing_tool=self.tracking_processing_tool, ) - self.mrtrix_recon_config = MRtrix_recon_config( + self.mrtrix_recon_config = MRtrixReconConfig( imaging_model=self.diffusion_imaging_model, recon_mode=self.diffusion_model ) - self.dipy_tracking_config = Dipy_tracking_config( + self.dipy_tracking_config = DipyTrackingConfig( imaging_model=self.diffusion_imaging_model, tracking_mode=self.diffusion_model, SD=self.mrtrix_recon_config.local_model, ) - self.mrtrix_tracking_config = MRtrix_tracking_config( + self.mrtrix_tracking_config = MRtrixTrackingConfig( tracking_mode=self.diffusion_model, SD=self.mrtrix_recon_config.local_model ) @@ -314,10 +314,10 @@ class DiffusionStage(Stage): -------- cmp.pipelines.diffusion.diffusion.DiffusionPipeline cmp.stages.diffusion.diffusion.DiffusionConfig - cmp.stages.diffusion.reconstruction.Dipy_recon_config - cmp.stages.diffusion.reconstruction.MRtrix_recon_config - cmp.stages.diffusion.tracking.Dipy_tracking_config - cmp.stages.diffusion.tracking.MRtrix_tracking_config + cmp.stages.diffusion.reconstruction.DipyReconConfig + cmp.stages.diffusion.reconstruction.MRtrixReconConfig + cmp.stages.diffusion.tracking.DipyTrackingConfig + cmp.stages.diffusion.tracking.MRtrixTrackingConfig cmp.stages.diffusion.reconstruction.create_dipy_recon_flow cmp.stages.diffusion.reconstruction.create_mrtrix_recon_flow cmp.stages.diffusion.tracking.create_dipy_tracking_flow diff --git a/cmp/stages/diffusion/reconstruction.py b/cmp/stages/diffusion/reconstruction.py index 28e92d14d..007255b7e 100644 --- a/cmp/stages/diffusion/reconstruction.py +++ b/cmp/stages/diffusion/reconstruction.py @@ -39,7 +39,7 @@ iflogger = logging.getLogger("nipype.interface") -class Dipy_recon_config(HasTraits): +class DipyReconConfig(HasTraits): """Class used to store Dipy diffusion reconstruction sub-workflow configuration parameters. Attributes @@ -234,7 +234,7 @@ def _recon_mode_changed(self, new): } -class MRtrix_recon_config(HasTraits): +class MRtrixReconConfig(HasTraits): """Class used to store Dipy diffusion reconstruction sub-workflow configuration parameters. Attributes @@ -310,7 +310,7 @@ def create_dipy_recon_flow(config): Parameters ---------- - config : Dipy_recon_config + config : DipyReconConfig Workflow configuration Returns @@ -526,7 +526,7 @@ def create_mrtrix_recon_flow(config): Parameters ---------- - config : Dipy_recon_config + config : DipyReconConfig Workflow configuration Returns diff --git a/cmp/stages/diffusion/tracking.py b/cmp/stages/diffusion/tracking.py index 3a3d7e043..542afe43d 100644 --- a/cmp/stages/diffusion/tracking.py +++ b/cmp/stages/diffusion/tracking.py @@ -27,7 +27,7 @@ iflogger = logging.getLogger("nipype.interface") -class Dipy_tracking_config(HasTraits): +class DipyTrackingConfig(HasTraits): """Class used to store Dipy diffusion reconstruction sub-workflow configuration parameters. Attributes @@ -160,7 +160,7 @@ def _use_act_changed(self, new): self.seed_from_gmwmi = False -class MRtrix_tracking_config(HasTraits): +class MRtrixTrackingConfig(HasTraits): """Class used to store Dipy diffusion reconstruction sub-workflow configuration parameters. Attributes @@ -303,7 +303,7 @@ def create_dipy_tracking_flow(config): Parameters ---------- - config : Dipy_tracking_config + config : DipyTrackingConfig Sub-workflow configuration object Returns @@ -470,7 +470,7 @@ def create_mrtrix_tracking_flow(config): Parameters ---------- - config : MRtrix_tracking_config + config : MRtrixTrackingConfig Sub-workflow configuration object Returns diff --git a/cmp/stages/functional/functionalMRI.py b/cmp/stages/functional/functionalMRI.py index 3b6416a72..39c83368f 100644 --- a/cmp/stages/functional/functionalMRI.py +++ b/cmp/stages/functional/functionalMRI.py @@ -20,7 +20,7 @@ # Own imports from cmp.stages.common import Stage -from cmtklib.functionalMRI import Scrubbing, Detrending, Nuisance_regression +from cmtklib.functionalMRI import Scrubbing, Detrending, NuisanceRegression class FunctionalMRIConfig(HasTraits): @@ -183,7 +183,7 @@ def create_workflow(self, flow, inputnode, outputnode): or self.config.motion ): nuisance = pe.Node( - interface=Nuisance_regression(), name="nuisance_regression" + interface=NuisanceRegression(), name="nuisance_regression" ) nuisance.inputs.global_nuisance = self.config.global_nuisance nuisance.inputs.csf_nuisance = self.config.csf diff --git a/cmp/stages/preprocessing/fmri_preprocessing.py b/cmp/stages/preprocessing/fmri_preprocessing.py index 9071a7a2e..b52c34866 100644 --- a/cmp/stages/preprocessing/fmri_preprocessing.py +++ b/cmp/stages/preprocessing/fmri_preprocessing.py @@ -20,7 +20,7 @@ # Own imports from cmtklib.interfaces.afni import Despike from cmp.stages.common import Stage -from cmtklib.functionalMRI import Discard_tp +from cmtklib.functionalMRI import DiscardTP class PreprocessingConfig(HasTraits): @@ -116,7 +116,7 @@ def create_workflow(self, flow, inputnode, outputnode): ) if self.config.discard_n_volumes > 0: discard = pe.Node( - interface=Discard_tp(n_discard=self.config.discard_n_volumes), + interface=DiscardTP(n_discard=self.config.discard_n_volumes), name="discard_volumes", ) # fmt:off diff --git a/cmtklib/config.py b/cmtklib/config.py index 38a5f57ba..d023e69a7 100644 --- a/cmtklib/config.py +++ b/cmtklib/config.py @@ -293,7 +293,7 @@ def create_subject_configuration_from_ref( Parameters ---------- - project : cmp.project.CMP_Project_Info + project : cmp.project.ProjectInfo Instance of `cmp.project.CMP_Project_Info` ref_conf_file : string @@ -372,8 +372,8 @@ def get_process_detail_json(project_info, section, detail): Parameters ---------- - project_info : Instance(cmp.project.CMP_Project_Info) - Instance of :class:`cmp.project.CMP_Project_Info` class + project_info : Instance(cmp.project.ProjectInfo) + Instance of :class:`cmp.project.ProjectInfo` class section : string Stage section name @@ -395,8 +395,8 @@ def get_anat_process_detail_json(project_info, section, detail): Parameters ---------- - project_info : Instance(cmp.project.CMP_Project_Info) - Instance of :class:`cmp.project.CMP_Project_Info` class + project_info : Instance(cmp.project.ProjectInfo) + Instance of :class:`cmp.project.ProjectInfo` class section : string Stage section name @@ -423,8 +423,8 @@ def get_dmri_process_detail_json(project_info, section, detail): Parameters ---------- - project_info : Instance(cmp.project.CMP_Project_Info) - Instance of :class:`cmp.project.CMP_Project_Info` class + project_info : Instance(cmp.project.ProjectInfo) + Instance of :class:`cmp.project.ProjectInfo` class section : string Stage section name @@ -446,8 +446,8 @@ def get_fmri_process_detail_json(project_info, section, detail): Parameters ---------- - project_info : Instance(cmp.project.CMP_Project_Info) - Instance of :class:`cmp.project.CMP_Project_Info` class + project_info : Instance(cmp.project.ProjectInfo) + Instance of :class:`cmp.project.ProjectInfo` class section : string Stage section name diff --git a/cmtklib/connectome.py b/cmtklib/connectome.py index 7d314dd8a..99b62dc66 100644 --- a/cmtklib/connectome.py +++ b/cmtklib/connectome.py @@ -660,7 +660,7 @@ def cmat( print("========================") -class CMTK_cmatInputSpec(BaseInterfaceInputSpec): +class DmriCmatInputSpec(BaseInterfaceInputSpec): track_file = InputMultiPath( File(exists=True), desc="Tractography result", mandatory=True ) @@ -693,7 +693,7 @@ class CMTK_cmatInputSpec(BaseInterfaceInputSpec): ) -class CMTK_cmatOutputSpec(TraitedSpec): +class DmriCmatOutputSpec(TraitedSpec): endpoints_file = File(desc="Numpy files storing the list of fiber endpoint") endpoints_mm_file = File( @@ -717,13 +717,13 @@ class CMTK_cmatOutputSpec(TraitedSpec): connectivity_matrices = OutputMultiPath(File(), desc="Connectivity matrices") -class CMTK_cmat(BaseInterface): +class DmriCmat(BaseInterface): """Creates the structural connectivity matrices for a given parcellation scheme. Examples -------- - >>> from cmtklib.connectome import CMTK_cmat - >>> cmat = CMTK_cmat() + >>> from cmtklib.connectome import DmriCmat + >>> cmat = DmriCmat() >>> cmat.inputs.base_dir = '/my_directory' >>> cmat.inputs.track_file = '/path/to/sub-01_tractogram.trk' >>> cmat.inputs.roi_volumes = ['/path/to/sub-01_space-DWI_atlas-L2018_desc-scale1_dseg.nii.gz', @@ -742,8 +742,8 @@ class CMTK_cmat(BaseInterface): """ - input_spec = CMTK_cmatInputSpec - output_spec = CMTK_cmatOutputSpec + input_spec = DmriCmatInputSpec + output_spec = DmriCmatOutputSpec def _run_interface(self, runtime): if isdefined(self.inputs.additional_maps): @@ -787,7 +787,7 @@ def _list_outputs(self): return outputs -class CMTK_rsfmri_cmat_InputSpec(BaseInterfaceInputSpec): +class RsfmriCmatInputSpec(BaseInterfaceInputSpec): func_file = File(exists=True, mandatory=True, desc="fMRI volume") roi_volumes = InputMultiPath( @@ -821,7 +821,7 @@ class CMTK_rsfmri_cmat_InputSpec(BaseInterfaceInputSpec): output_types = traits.List(Str, desc="Output types of the connectivity matrices") -class CMTK_rsfmri_cmat_OutputSpec(TraitedSpec): +class RsfmriCmatOutputSpec(TraitedSpec): avg_timeseries = OutputMultiPath(File(exists=True), desc="ROI average timeseries") scrubbed_idx = File(exists=True, desc="Scrubbed indices") @@ -831,7 +831,7 @@ class CMTK_rsfmri_cmat_OutputSpec(TraitedSpec): ) -class CMTK_rsfmri_cmat(BaseInterface): +class RsfmriCmat(BaseInterface): """Creates the functional connectivity matrices for a given parcellation scheme. It applies scrubbing (if enabled), computes the average GM ROI time-series and computes @@ -839,8 +839,8 @@ class CMTK_rsfmri_cmat(BaseInterface): Examples -------- - >>> from cmtklib.connectome import CMTK_rsfmri_cmat - >>> cmat = CMTK_rsfmri_cmat() + >>> from cmtklib.connectome import RsfmriCmat + >>> cmat = RsfmriCmat() >>> cmat.inputs.base_dir = '/my_directory' >>> cmat.inputs.func_file = '/path/to/sub-01_task-rest_desc-preproc_bold.nii.gz' >>> cmat.inputs.roi_volumes = ['/path/to/sub-01_space-meanBOLD_atlas-L2018_desc-scale1_dseg.nii.gz', @@ -860,8 +860,8 @@ class CMTK_rsfmri_cmat(BaseInterface): """ - input_spec = CMTK_rsfmri_cmat_InputSpec - output_spec = CMTK_rsfmri_cmat_OutputSpec + input_spec = RsfmriCmatInputSpec + output_spec = RsfmriCmatOutputSpec def _run_interface(self, runtime): print("================================================") diff --git a/cmtklib/diffusion.py b/cmtklib/diffusion.py index 4bd380889..ad58b2472 100644 --- a/cmtklib/diffusion.py +++ b/cmtklib/diffusion.py @@ -663,7 +663,7 @@ def _list_outputs(self): return outputs -class Make_SeedsInputSpec(BaseInterfaceInputSpec): +class MakeSeedsInputSpec(BaseInterfaceInputSpec): ROI_files = InputMultiPath( File(exists=True), desc="ROI files registered to diffusion space" ) @@ -672,19 +672,19 @@ class Make_SeedsInputSpec(BaseInterfaceInputSpec): # DWI = File(mandatory=True,desc='Diffusion data file for probabilistic tractography') -class Make_SeedsOutputSpec(TraitedSpec): +class MakeSeedsOutputSpec(TraitedSpec): seed_files = OutputMultiPath( File(exists=True), desc="Seed files for probabilistic tractography" ) -class Make_Seeds(BaseInterface): +class MakeSeeds(BaseInterface): """Creates seeding ROIs by intersecting dilated ROIs with WM mask for `Dipy`. Examples -------- - >>> from cmtklib.diffusion import Make_Seeds - >>> make_dipy_seeds = Make_Seeds() + >>> from cmtklib.diffusion import MakeSeeds + >>> make_dipy_seeds = MakeSeeds() >>> make_dipy_seeds.inputs.ROI_files = ['sub-01_space-DWI_atlas-L2018_desc-scale1_dseg.nii.gz', >>> 'sub-01_space-DWI_atlas-L2018_desc-scale2_dseg.nii.gz', >>> 'sub-01_space-DWI_atlas-L2018_desc-scale3_dseg.nii.gz', @@ -695,8 +695,8 @@ class Make_Seeds(BaseInterface): """ - input_spec = Make_SeedsInputSpec - output_spec = Make_SeedsOutputSpec + input_spec = MakeSeedsInputSpec + output_spec = MakeSeedsOutputSpec ROI_idx = [] base_name = "" @@ -778,8 +778,8 @@ class Make_Mrtrix_Seeds(BaseInterface): """ - input_spec = Make_SeedsInputSpec - output_spec = Make_SeedsOutputSpec + input_spec = MakeSeedsInputSpec + output_spec = MakeSeedsOutputSpec ROI_idx = [] base_name = "" @@ -823,7 +823,7 @@ def _list_outputs(self): return outputs -class SplitDiffusion_InputSpec(BaseInterfaceInputSpec): +class SplitDiffusionInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, desc="Input diffusion MRI file") start = Int(0, desc="Volume index to start the split") @@ -831,7 +831,7 @@ class SplitDiffusion_InputSpec(BaseInterfaceInputSpec): end = Int(desc="Volume index to end the split") -class SplitDiffusion_OutputSpec(TraitedSpec): +class SplitDiffusionOutputSpec(TraitedSpec): data = File(exists=True, desc="Extracted volumes") padding1 = File(exists=False, desc="Extracted volumes with padding 1") @@ -853,8 +853,8 @@ class SplitDiffusion(BaseInterface): """ - input_spec = SplitDiffusion_InputSpec - output_spec = SplitDiffusion_OutputSpec + input_spec = SplitDiffusionInputSpec + output_spec = SplitDiffusionOutputSpec def _run_interface(self, runtime): diffusion_file = nib.load(self.inputs.in_file) diff --git a/cmtklib/functionalMRI.py b/cmtklib/functionalMRI.py index b1cc24150..70e5a17a1 100644 --- a/cmtklib/functionalMRI.py +++ b/cmtklib/functionalMRI.py @@ -20,23 +20,23 @@ ) -class Discard_tp_InputSpec(BaseInterfaceInputSpec): +class DiscardTPInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="Input 4D fMRI image") n_discard = Int(mandatory=True, desc="Number of n first frames to discard") -class Discard_tp_OutputSpec(TraitedSpec): +class DiscardTPOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output 4D fMRI image with discarded frames") -class Discard_tp(BaseInterface): +class DiscardTP(BaseInterface): """Discards the n first time frame in functional MRI data. Examples -------- - >>> from cmtklib.functionalMRI import Discard_tp - >>> discard = Discard_tp() + >>> from cmtklib.functionalMRI import DiscardTP + >>> discard = DiscardTP() >>> discard.inputs.base_dir = '/my_directory' >>> discard.inputs.in_file = '/path/to/sub-01_task-rest_desc-preproc_bold.nii.gz' >>> discard.inputs.n_discard = 5 @@ -44,8 +44,8 @@ class Discard_tp(BaseInterface): """ - input_spec = Discard_tp_InputSpec - output_spec = Discard_tp_OutputSpec + input_spec = DiscardTPInputSpec + output_spec = DiscardTPOutputSpec def _run_interface(self, runtime): dataimg = nib.load(self.inputs.in_file) @@ -75,7 +75,7 @@ def _list_outputs(self): return outputs -class Nuisance_InputSpec(BaseInterfaceInputSpec): +class NuisanceRegressionInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, desc="Input fMRI volume") brainfile = File(desc="Eroded brain mask registered to fMRI space") @@ -105,7 +105,7 @@ class Nuisance_InputSpec(BaseInterfaceInputSpec): ) -class Nuisance_OutputSpec(TraitedSpec): +class NuisanceRegressionOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Output fMRI Volume") averageGlobal_npy = File(desc="Output of global regression in `.npy` format") @@ -121,13 +121,13 @@ class Nuisance_OutputSpec(TraitedSpec): averageWM_mat = File(desc="Output matrix of WM regression") -class Nuisance_regression(BaseInterface): +class NuisanceRegression(BaseInterface): """Regress out nuisance signals (WM, CSF, movements) through GLM. Examples -------- - >>> from cmtklib.functionalMRI import Nuisance_regression - >>> nuisance = Nuisance_regression() + >>> from cmtklib.functionalMRI import NuisanceRegression + >>> nuisance = NuisanceRegression() >>> nuisance.inputs.base_dir = '/my_directory' >>> nuisance.inputs.in_file = '/path/to/sub-01_task-rest_desc-preproc_bold.nii.gz' >>> nuisance.inputs.wm_file = '/path/to/sub-01_task-rest_desc-preproc_bold.nii.gz' @@ -148,8 +148,8 @@ class Nuisance_regression(BaseInterface): """ - input_spec = Nuisance_InputSpec - output_spec = Nuisance_OutputSpec + input_spec = NuisanceRegressionInputSpec + output_spec = NuisanceRegressionOutputSpec def _run_interface(self, runtime): # Output from previous preprocessing step @@ -321,7 +321,7 @@ def _list_outputs(self): return outputs -class Detrending_InputSpec(BaseInterfaceInputSpec): +class DetrendingInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="fMRI volume to detrend") gm_file = InputMultiPath( @@ -331,7 +331,7 @@ class Detrending_InputSpec(BaseInterfaceInputSpec): mode = Enum(["linear", "quadratic", "cubic"], desc="Detrending order") -class Detrending_OutputSpec(TraitedSpec): +class DetrendingOutputSpec(TraitedSpec): out_file = File(exists=True, desc="Detrended fMRI volume") @@ -354,8 +354,8 @@ class Detrending(BaseInterface): """ - input_spec = Detrending_InputSpec - output_spec = Detrending_OutputSpec + input_spec = DetrendingInputSpec + output_spec = DetrendingOutputSpec def _run_interface(self, runtime): print("Linear detrending") @@ -432,7 +432,7 @@ def _list_outputs(self): return outputs -class Scrubbing_InputSpec(BaseInterfaceInputSpec): +class ScrubbingInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True, desc="fMRI volume to scrubb") wm_mask = File(exists=True, desc="WM mask registered to fMRI space") @@ -446,7 +446,7 @@ class Scrubbing_InputSpec(BaseInterfaceInputSpec): ) -class Scrubbing_OutputSpec(TraitedSpec): +class ScrubbingOutputSpec(TraitedSpec): fd_mat = File(exists=True, desc="FD matrix for scrubbing") dvars_mat = File(exists=True, desc="DVARS matrix for scrubbing") @@ -477,8 +477,8 @@ class Scrubbing(BaseInterface): """ - input_spec = Scrubbing_InputSpec - output_spec = Scrubbing_OutputSpec + input_spec = ScrubbingInputSpec + output_spec = ScrubbingOutputSpec def _run_interface(self, runtime): print("Precompute FD and DVARS for scrubbing") diff --git a/cmtklib/process.py b/cmtklib/process.py index 5e4027184..08c19931b 100644 --- a/cmtklib/process.py +++ b/cmtklib/process.py @@ -11,7 +11,7 @@ def run(command, env=None, cwd=None): - """Function calls by `CMP_MainWindowHandler` to run datalad commands. + """Function calls by `MainWindowHandler` to run datalad commands. It runs the command specified as input via ``subprocess.run()``. From c3d929805db5c83fde38b4ee1cbcea6aa9fe9df4 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 15:59:39 +0100 Subject: [PATCH 03/15] VERSION: Update version to 3.0.1 --- cmp/info.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmp/info.py b/cmp/info.py index 2bfb006ae..1c5019986 100644 --- a/cmp/info.py +++ b/cmp/info.py @@ -7,9 +7,9 @@ _version_major = 3 _version_minor = 0 -_version_micro = 0 +_version_micro = 1 _version_extra = "" -__release_date__ = "24.12.2021" +__release_date__ = "05.01.2022" __minor_version__ = "%s.%s" % (_version_major, _version_minor) From 5efe1e7493b613865c67120a87e7b1a5ff4d0d92 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 16:01:53 +0100 Subject: [PATCH 04/15] UPD: Update notebook with API refactoring change --- notebooks/start_bidsappmanager_from_notebook.ipynb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/notebooks/start_bidsappmanager_from_notebook.ipynb b/notebooks/start_bidsappmanager_from_notebook.ipynb index e6baf3eaf..5b65c6f92 100644 --- a/notebooks/start_bidsappmanager_from_notebook.ipynb +++ b/notebooks/start_bidsappmanager_from_notebook.ipynb @@ -74,7 +74,7 @@ "\n", "# launch the GUI\n", "#traitsui.wx.constants.WindowColor = wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNFACE)\n", - "mw = gui.CMP_MainWindow()\n", + "mw = gui.MainWindow()\n", "mw_res = mw.configure_traits()\n" ] } From ee193d74d76d6b07e3d593fd4b3951d27f0c44f7 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 16:09:03 +0100 Subject: [PATCH 05/15] CI: Update CMP version in list of test outputs --- .../ds-sample_test-01_outputs.txt | 80 ++++----- .../ds-sample_test-02_outputs.txt | 38 ++-- .../ds-sample_test-03_outputs.txt | 76 ++++---- .../ds-sample_test-04_outputs.txt | 152 ++++++++-------- .../ds-sample_test-05_outputs.txt | 156 ++++++++-------- .../ds-sample_test-06_outputs.txt | 156 ++++++++-------- .../ds-sample_test-07_outputs.txt | 168 +++++++++--------- .../ds-sample_test-08_outputs.txt | 160 ++++++++--------- .../ds-sample_test-09-simg_outputs.txt | 80 ++++----- 9 files changed, 533 insertions(+), 533 deletions(-) diff --git a/.circleci/tests/expected_outputs/ds-sample_test-01_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-01_outputs.txt index 7cd522044..051cbf034 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-01_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-01_outputs.txt @@ -1,43 +1,43 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-02_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-02_outputs.txt index be325594f..9205629e5 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-02_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-02_outputs.txt @@ -1,22 +1,22 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-Desikan_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-03_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-03_outputs.txt index 0b9ddb31b..a526c9c95 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-03_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-03_outputs.txt @@ -38,44 +38,44 @@ cmp-custom/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz cmp-custom/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz cmp-custom/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz cmp-custom/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.graphml -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-DET_tractogram.trk -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_diffusion_config.json -cmp-v3.0.0/sub-01/ses-01/xfm/sub-01_ses-01_space-DWI_desc-head_T1w.mat +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.graphml +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-DET_tractogram.trk +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_diffusion_config.json +cmp-v3.0.1/sub-01/ses-01/xfm/sub-01_ses-01_space-DWI_desc-head_T1w.mat freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt index 6ba63b1f0..4cdbfc352 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt @@ -1,79 +1,79 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_mask -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-5TT_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-GMWMI_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-grad_dwi.txt -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-PROB_tractogram.trk -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_diffusion_config.json -cmp-v3.0.0/sub-01/ses-01/xfm/final0GenericAffine.mat -cmp-v3.0.0/sub-01/ses-01/xfm/final1Warp.nii.gz +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_mask +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-5TT_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-GMWMI_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-grad_dwi.txt +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-PROB_tractogram.trk +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_diffusion_config.json +cmp-v3.0.1/sub-01/ses-01/xfm/final0GenericAffine.mat +cmp-v3.0.1/sub-01/ses-01/xfm/final1Warp.nii.gz freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt index a49ce08de..19e33c7ce 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt @@ -1,81 +1,81 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_mask -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-5TT_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-CSF_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-GM_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-grad_dwi.txt -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-DET_tractogram.trk -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_diffusion_config.json -cmp-v3.0.0/sub-01/ses-01/xfm/final0GenericAffine.mat -cmp-v3.0.0/sub-01/ses-01/xfm/final1Warp.nii.gz +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_mask +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-5TT_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-CSF_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-GM_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-grad_dwi.txt +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-DET_tractogram.trk +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_diffusion_config.json +cmp-v3.0.1/sub-01/ses-01/xfm/final0GenericAffine.mat +cmp-v3.0.1/sub-01/ses-01/xfm/final1Warp.nii.gz freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt index 1de0a01cc..b93aa0755 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt @@ -1,81 +1,81 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_mask -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-5TT_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-CSF_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-GM_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_probseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-grad_dwi.txt -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-PROB_tractogram.trk -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz -cmp-v3.0.0/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_diffusion_config.json -cmp-v3.0.0/sub-01/ses-01/xfm/final0GenericAffine.mat -cmp-v3.0.0/sub-01/ses-01/xfm/final1Warp.nii.gz +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_mask +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-5TT_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-CSF_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-GM_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-DWI_label-WM_probseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bval +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.bvec +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.json +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-cmp_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-grad_dwi.txt +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_desc-preproc_dwi.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_desc-PROB_tractogram.trk +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_FOD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_GFA.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_MSD.nii.gz +cmp-v3.0.1/sub-01/ses-01/dwi/sub-01_ses-01_model-SHORE_RTOP.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_diffusion_config.json +cmp-v3.0.1/sub-01/ses-01/xfm/final0GenericAffine.mat +cmp-v3.0.1/sub-01/ses-01/xfm/final1Warp.nii.gz freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-07_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-07_outputs.txt index fab1aaf3b..bf06c9078 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-07_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-07_outputs.txt @@ -1,87 +1,87 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.graphml -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.graphml -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.graphml -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.graphml -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.graphml -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_meanBOLD.nii.gz -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_motion.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-bandpass_bold.nii.gz -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.json -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_fMRI_config.json +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.graphml +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.graphml +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.graphml +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.graphml +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.graphml +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_meanBOLD.nii.gz +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_motion.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-bandpass_bold.nii.gz +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.json +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_fMRI_config.json freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-08_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-08_outputs.txt index 0967c8d55..ab29103c1 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-08_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-08_outputs.txt @@ -1,83 +1,83 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-brain_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.mat -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.npy -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_meanBOLD.nii.gz -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_motion.tsv -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-bandpass_bold.nii.gz -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.json -cmp-v3.0.0/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_fMRI_config.json +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-brain_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_desc-eroded_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_space-meanBOLD_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale1_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale2_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale3_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale4_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.gpickle +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_conndata-network_connectivity.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.mat +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_atlas-L2018_res-scale5_timeseries.npy +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_meanBOLD.nii.gz +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_motion.tsv +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-bandpass_bold.nii.gz +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.json +cmp-v3.0.1/sub-01/ses-01/func/sub-01_ses-01_task-rest_desc-cmp_bold.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_fMRI_config.json freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab diff --git a/.circleci/tests/expected_outputs/ds-sample_test-09-simg_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-09-simg_outputs.txt index 7cd522044..051cbf034 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-09-simg_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-09-simg_outputs.txt @@ -1,43 +1,43 @@ -cmp-v3.0.0/dataset_description.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz -cmp-v3.0.0/sub-01/ses-01/sub-01_ses-01_anatomical_config.json +cmp-v3.0.1/dataset_description.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale1_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale2_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale3_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale4_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.graphml +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_dseg.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_FreeSurferColorLUT.txt +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_atlas-L2018_res-scale5_stats.tsv +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aparcaseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-aseg_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_mask.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-brain_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.json +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-cmp_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_desc-head_T1w.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-brain_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-CSF_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-GM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_desc-eroded_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/anat/sub-01_ses-01_label-WM_dseg.nii.gz +cmp-v3.0.1/sub-01/ses-01/sub-01_ses-01_anatomical_config.json freesurfer-6.0.1/dataset_description.json freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.a2009s.ctab freesurfer-6.0.1/sub-01_ses-01/label/aparc.annot.ctab From 35ae4979b2bbc108f426774b9645ac815256d3d8 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 16:09:28 +0100 Subject: [PATCH 06/15] DOC: Update docs/changes --- docs/changes.rst | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/docs/changes.rst b/docs/changes.rst index 414f510d4..cf56737f3 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -2,6 +2,36 @@ Changes ======== +**************************** +Version 3.0.1 +**************************** + +Date: Jan 05, 2021 + +This version is mostly a bug fix release that allows the python packages of Connectome Mapper 3 to be available on PyPI. +It incorporates `Pull Request #132 `_ which includes the following changes. + +*Bug fix* + +* Rename the project name in `setup.py` and `setup_pypi.py` from `"cmp"` to `"connectomemapper"`. + Such a `"cmp"` project name was already existing on PyPI, that caused continuous integration on CircleCI to fail during the last `v3.0.0` release, while uploading the python packages of CMP3 to PyPI. + +*Code refactoring* + +* Make `cmp.bidsappmanager.gui.py` more lightweight by splitting the classes defined there in different files. + (See `Issue #129 `_ for more discussion details) + +*Code style* + +* Correct a number of code style issues with class names + +*Contributors* + +* `Sebastien Tourbier `_ + +Please check the `main pull request 132 page `_ for more details. + + **************************** Version 3.0.0 **************************** From 1466a149d228570a05aa36d1ce995f8f14f5cf65 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Mon, 3 Jan 2022 19:46:39 +0100 Subject: [PATCH 07/15] REF: Add create_ants_workflow() to RegistrationStage (#95) --- cmp/stages/registration/registration.py | 1078 ++++++++++++----------- 1 file changed, 554 insertions(+), 524 deletions(-) diff --git a/cmp/stages/registration/registration.py b/cmp/stages/registration/registration.py index 0a7d4c04e..1d9c21274 100644 --- a/cmp/stages/registration/registration.py +++ b/cmp/stages/registration/registration.py @@ -410,530 +410,7 @@ def create_workflow(self, flow, inputnode, outputnode): # fmt:on if self.config.registration_mode == "ANTs": - # [SUB-STEP 1] Linear register "T1" onto"Target_FA_resampled" - # [1.1] Convert diffusion data to mrtrix format using rotated bvecs - mr_convert = pe.Node( - interface=MRConvert( - out_filename="diffusion.mif", stride=[+1, +2, +3, +4] - ), - name="mr_convert", - ) - mr_convert.inputs.quiet = True - mr_convert.inputs.force_writing = True - - concatnode = pe.Node(interface=util.Merge(2), name="concatnode") - - # fmt:off - flow.connect( - [ - (inputnode, concatnode, [("bvecs", "in1")]), - (inputnode, concatnode, [("bvals", "in2")]), - (concatnode, mr_convert, [(("out", convert_list_to_tuple), "grad_fsl")],), - (inputnode, mr_convert, [("target", "in_file")]), - ] - ) - # fmt:on - - grad_mrtrix = pe.Node( - ExtractMRTrixGrad(out_grad_mrtrix="grad.txt"), name="extract_grad" - ) - - # fmt:off - flow.connect( - [ - (mr_convert, grad_mrtrix, [("converted", "in_file")]), - (grad_mrtrix, outputnode, [("out_grad_mrtrix", "grad")]), - ] - ) - # fmt:on - - mr_convert_b0 = pe.Node( - interface=MRConvert(out_filename="b0.nii.gz", stride=[+1, +2, +3]), - name="mr_convert_b0", - ) - mr_convert_b0.inputs.extract_at_axis = 3 - mr_convert_b0.inputs.extract_at_coordinate = [0] - - flow.connect([(inputnode, mr_convert_b0, [("target", "in_file")])]) - - dwi2tensor = pe.Node( - interface=DWI2Tensor(out_filename="dt_corrected.mif"), name="dwi2tensor" - ) - dwi2tensor_unmasked = pe.Node( - interface=DWI2Tensor(out_filename="dt_corrected_unmasked.mif"), - name="dwi2tensor_unmasked", - ) - - tensor2FA = pe.Node( - interface=TensorMetrics(out_fa="fa_corrected.mif"), name="tensor2FA" - ) - tensor2FA_unmasked = pe.Node( - interface=TensorMetrics(out_fa="fa_corrected_unmasked.mif"), - name="tensor2FA_unmasked", - ) - - mr_convert_FA = pe.Node( - interface=MRConvert( - out_filename="fa_corrected.nii.gz", stride=[+1, +2, +3] - ), - name="mr_convert_FA", - ) - mr_convert_FA_unmasked = pe.Node( - interface=MRConvert( - out_filename="fa_corrected_unmasked.nii.gz", stride=[+1, +2, +3] - ), - name="mr_convert_FA_unmasked", - ) - - FA_noNaN = pe.Node( - interface=cmp_fsl.MathsCommand( - out_file="fa_corrected_nonan.nii.gz", nan2zeros=True - ), - name="FA_noNaN", - ) - FA_noNaN_unmasked = pe.Node( - interface=cmp_fsl.MathsCommand( - out_file="fa_corrected_unmasked_nonan.nii.gz", nan2zeros=True - ), - name="FA_noNaN_unmasked", - ) - - # fmt:off - flow.connect( - [ - (mr_convert, dwi2tensor, [("converted", "in_file")]), - (inputnode, dwi2tensor, [("target_mask", "in_mask_file")]), - (dwi2tensor, tensor2FA, [("tensor", "in_file")]), - (inputnode, tensor2FA, [("target_mask", "in_mask")]), - (tensor2FA, mr_convert_FA, [("out_fa", "in_file")]), - (mr_convert_FA, FA_noNaN, [("converted", "in_file")]), - (mr_convert, dwi2tensor_unmasked, [("converted", "in_file")]), - (dwi2tensor_unmasked, tensor2FA_unmasked, [("tensor", "in_file")]), - (tensor2FA_unmasked, mr_convert_FA_unmasked, [("out_fa", "in_file")],), - (mr_convert_FA_unmasked, FA_noNaN_unmasked, [("converted", "in_file")],), - ] - ) - # fmt:on - - b0_masking = pe.Node( - interface=ApplyMask(out_file="b0_masked.nii.gz"), name="b0_masking" - ) - - # fmt:off - flow.connect( - [ - (mr_convert_b0, b0_masking, [("converted", "in_file")]), - (inputnode, b0_masking, [("target_mask", "mask_file")]), - ] - ) - # fmt:on - - # [1.2] Linear registration of the DW data to the T1 data - affine_registration = pe.Node( - interface=ants.Registration(), name="linear_registration" - ) - affine_registration.inputs.collapse_output_transforms = True - affine_registration.inputs.initial_moving_transform_com = True - affine_registration.inputs.output_transform_prefix = "initial" - affine_registration.inputs.num_threads = 8 - affine_registration.inputs.output_inverse_warped_image = True - affine_registration.inputs.output_warped_image = ( - "linear_warped_image.nii.gz" - ) - affine_registration.inputs.sigma_units = ["vox"] * 2 - affine_registration.inputs.transforms = ["Rigid", "Affine"] - - affine_registration.inputs.interpolation = self.config.ants_interpolation - if self.config.ants_interpolation == "BSpline": - affine_registration.inputs.interpolation_parameters = ( - self.config.ants_bspline_interpolation_parameters - ) # Default: (3,) - elif self.config.ants_interpolation == "Gaussian": - affine_registration.inputs.interpolation_parameters = ( - self.config.ants_gauss_interpolation_parameters - ) # Default: (5,5,) - elif self.config.ants_interpolation == "MultiLabel": - affine_registration.inputs.interpolation_parameters = ( - self.config.ants_multilab_interpolation_parameters - ) # Default: (5,5,) - - affine_registration.inputs.winsorize_lower_quantile = ( - self.config.ants_lower_quantile - ) # Default: 0.005 - affine_registration.inputs.winsorize_upper_quantile = ( - self.config.ants_upper_quantile - ) # Default: 0.995 - affine_registration.inputs.convergence_threshold = [ - self.config.ants_convergence_thresh - ] * 2 # Default: [1e-06]*2 - affine_registration.inputs.convergence_window_size = [ - self.config.ants_convergence_winsize - ] * 2 # Default: [10]*2 - affine_registration.inputs.metric = [ - self.config.ants_linear_cost, - self.config.ants_linear_cost, - ] # Default: ['MI','MI'] - affine_registration.inputs.metric_weight = [1.0] * 2 - affine_registration.inputs.number_of_iterations = [ - [1000, 500, 250, 100], - [1000, 500, 250, 100], - ] - affine_registration.inputs.radius_or_number_of_bins = [32, 32] - affine_registration.inputs.sampling_percentage = [ - self.config.ants_linear_sampling_perc, - self.config.ants_linear_sampling_perc, - ] # Default: [0.25, 0.25] - affine_registration.inputs.sampling_strategy = [ - self.config.ants_linear_sampling_strategy, - self.config.ants_linear_sampling_strategy, - ] # Default: ['Regular','Regular'] - affine_registration.inputs.shrink_factors = [[8, 4, 2, 1]] * 2 - affine_registration.inputs.smoothing_sigmas = [[3, 2, 1, 0]] * 2 - affine_registration.inputs.transform_parameters = [ - (self.config.ants_linear_gradient_step,), - (self.config.ants_linear_gradient_step,), - ] # Default: [(0.1,),(0.1,)] - affine_registration.inputs.use_histogram_matching = True - if self.config.ants_perform_syn: - affine_registration.inputs.write_composite_transform = True - affine_registration.inputs.verbose = True - - affine_registration.inputs.float = self.config.use_float_precision - - # fmt:off - flow.connect( - [ - (b0_masking, affine_registration, [("out_file", "fixed_image")]), - (inputnode, affine_registration, [("brain", "moving_image")]) - ] - ) - # fmt:on - - SyN_registration = pe.Node( - interface=ants.Registration(), name="SyN_registration" - ) - - if self.config.ants_perform_syn: - - SyN_registration.inputs.collapse_output_transforms = True - SyN_registration.inputs.write_composite_transform = False - SyN_registration.inputs.output_transform_prefix = "final" - SyN_registration.inputs.num_threads = 8 - SyN_registration.inputs.output_inverse_warped_image = True - SyN_registration.inputs.output_warped_image = "Syn_warped_image.nii.gz" - SyN_registration.inputs.sigma_units = ["vox"] * 1 - SyN_registration.inputs.transforms = ["SyN"] - SyN_registration.inputs.restrict_deformation = [[0, 1, 0]] - - SyN_registration.inputs.interpolation = ( - self.config.ants_interpolation - ) # Default: 'BSpline' - if self.config.ants_interpolation == "BSpline": - SyN_registration.inputs.interpolation_parameters = ( - self.config.ants_bspline_interpolation_parameters - ) # Default: (3,) - elif self.config.ants_interpolation == "Gaussian": - SyN_registration.inputs.interpolation_parameters = ( - self.config.ants_gauss_interpolation_parameters - ) # Default: (5,5,) - elif self.config.ants_interpolation == "MultiLabel": - SyN_registration.inputs.interpolation_parameters = ( - self.config.ants_multilab_interpolation_parameters - ) # Default: (5,5,) - - SyN_registration.inputs.winsorize_lower_quantile = ( - self.config.ants_lower_quantile - ) # Default: 0.005 - SyN_registration.inputs.winsorize_upper_quantile = ( - self.config.ants_upper_quantile - ) # Default: 0.995 - SyN_registration.inputs.convergence_threshold = [ - self.config.ants_convergence_thresh - ] * 1 # Default: [1e-06]*1 - SyN_registration.inputs.convergence_window_size = [ - self.config.ants_convergence_winsize - ] * 1 # Default: [10]*1 - SyN_registration.inputs.metric = [ - self.config.ants_nonlinear_cost - ] # Default: ['CC'] - SyN_registration.inputs.metric_weight = [1.0] * 1 - SyN_registration.inputs.number_of_iterations = [[20]] - SyN_registration.inputs.radius_or_number_of_bins = [4] - SyN_registration.inputs.sampling_percentage = [1] - SyN_registration.inputs.sampling_strategy = ["None"] - SyN_registration.inputs.shrink_factors = [[1]] * 1 - SyN_registration.inputs.smoothing_sigmas = [[0]] * 1 - SyN_registration.inputs.transform_parameters = [ - ( - self.config.ants_nonlinear_gradient_step, - self.config.ants_nonlinear_update_field_variance, - self.config.ants_nonlinear_total_field_variance, - ) - ] # Default: [(0.1, 3.0, 0.0)] - SyN_registration.inputs.use_histogram_matching = True - SyN_registration.inputs.verbose = True - - SyN_registration.inputs.float = self.config.use_float_precision - - # fmt:off - flow.connect( - [ - ( - affine_registration, - SyN_registration, - [("composite_transform", "initial_moving_transform")], - ), - (b0_masking, SyN_registration, [("out_file", "fixed_image")]), - (inputnode, SyN_registration, [("brain", "moving_image")]) - ] - ) - # fmt:on - - ants_applywarp_T1 = pe.Node( - interface=ants.ApplyTransforms( - default_value=0, interpolation="Gaussian", out_postfix="_warped" - ), - name="apply_warp_T1", - ) - ants_applywarp_brain = pe.Node( - interface=ants.ApplyTransforms( - default_value=0, interpolation="Gaussian", out_postfix="_warped" - ), - name="apply_warp_brain", - ) - ants_applywarp_brainmask = pe.Node( - interface=ants.ApplyTransforms( - default_value=0, - interpolation="NearestNeighbor", - out_postfix="_warped", - ), - name="apply_warp_brainmask", - ) - ants_applywarp_wm = pe.Node( - interface=ants.ApplyTransforms( - default_value=0, - interpolation="NearestNeighbor", - out_postfix="_warped", - ), - name="apply_warp_wm", - ) - ants_applywarp_rois = pe.Node( - interface=MultipleANTsApplyTransforms( - interpolation="NearestNeighbor", - default_value=0, - out_postfix="_warped", - ), - name="apply_warp_roivs", - ) - - if self.config.act_tracking: - ants_applywarp_5tt = pe.Node( - interface=ants.ApplyTransforms( - default_value=0, interpolation="Gaussian", out_postfix="_warped" - ), - name="apply_warp_5tt", - ) - ants_applywarp_5tt.inputs.dimension = 3 - ants_applywarp_5tt.inputs.input_image_type = 3 - ants_applywarp_5tt.inputs.float = True - - if self.config.tracking_tool == "Dipy": - ants_applywarp_pves = pe.Node( - interface=MultipleANTsApplyTransforms( - interpolation="Gaussian", default_value=0, out_postfix="_warped" - ), - name="apply_warp_pves", - ) - - if self.config.gmwmi_seeding: - ants_applywarp_gmwmi = pe.Node( - interface=ants.ApplyTransforms( - default_value=0, interpolation="Gaussian", out_postfix="_warped" - ), - name="apply_warp_gmwmi", - ) - ants_applywarp_gmwmi.inputs.dimension = 3 - ants_applywarp_gmwmi.inputs.input_image_type = 3 - ants_applywarp_gmwmi.inputs.float = True - - def reverse_order_transforms(transforms): - """Reverse the order of the transformations estimated by linear and SyN registration. - - Parameters - ---------- - transforms : list of File - List of transformation files - - Returns - ------- - out_transforms : list of File - Reversed list of transformation files - (``transforms[::-1]``) - """ - out_transforms = transforms[::-1] - return out_transforms - - def extract_affine_transform(transforms): - """Extract affine transformation file from a list a transformation files generated by linear and SyN registration. - - Parameters - ---------- - transforms : list of File - List of transformation files - - Returns - ------- - t : File - Affine transformation file - """ - for t in transforms: - if "Affine" in t: - return t - - def extract_warp_field(transforms): - """Extract the warpfield file from a list a transformation files generated by linear and SyN registration. - - Parameters - ---------- - transforms : list of File - List of transformation files - - Returns - ------- - t : File - Warp field (Non-linear transformation) file - """ - for t in transforms: - if "Warp" in t: - return t - - if self.config.ants_perform_syn: - # fmt:off - flow.connect( - [ - (SyN_registration, ants_applywarp_T1, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (SyN_registration, ants_applywarp_brain, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (SyN_registration, ants_applywarp_brainmask, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (SyN_registration, ants_applywarp_wm, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (SyN_registration, ants_applywarp_rois, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (SyN_registration, outputnode, [(("forward_transforms", extract_affine_transform), "affine_transform")]), - (SyN_registration, outputnode, [(("forward_transforms", extract_warp_field), "warp_field")]), - ] - ) - # fmt:on - if self.config.act_tracking: - # fmt:off - flow.connect( - [ - (SyN_registration, ants_applywarp_5tt, [(("forward_transforms", reverse_order_transforms), "transforms")]), - ] - ) - # fmt:on - if self.config.tracking_tool == "Dipy": - # fmt:off - flow.connect( - [ - (SyN_registration, ants_applywarp_pves, [(("forward_transforms", reverse_order_transforms), "transforms")]), - ] - ) - # fmt:on - if self.config.gmwmi_seeding: - # fmt:off - flow.connect( - [ - (SyN_registration, ants_applywarp_gmwmi, [(("forward_transforms", reverse_order_transforms), "transforms")]), - ] - ) - # fmt:on - else: - # fmt:off - flow.connect( - [ - (affine_registration, ants_applywarp_T1, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (affine_registration, ants_applywarp_brain, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (affine_registration, ants_applywarp_brainmask, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (affine_registration, ants_applywarp_wm, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (affine_registration, ants_applywarp_rois, [(("forward_transforms", reverse_order_transforms), "transforms")]), - (affine_registration, outputnode, [(("forward_transforms", extract_affine_transform), "affine_transform")]), - ] - ) - # fmt:on - if self.config.act_tracking: - # fmt:off - flow.connect( - [ - (affine_registration, ants_applywarp_5tt, [(("forward_transforms", reverse_order_transforms), "transforms")]) - ] - ) - # fmt:on - if self.config.tracking_tool == "Dipy": - # fmt:off - flow.connect( - [ - (affine_registration, ants_applywarp_pves, [(("forward_transforms", reverse_order_transforms), "transforms")]) - ] - ) - # fmt:on - if self.config.gmwmi_seeding: - # fmt:off - flow.connect( - [ - (affine_registration, ants_applywarp_gmwmi, [(("forward_transforms", reverse_order_transforms), "transforms")]) - ] - ) - # fmt:on - - # fmt:off - flow.connect( - [ - (inputnode, ants_applywarp_T1, [("T1", "input_image")]), - (mr_convert_b0, ants_applywarp_T1, [("converted", "reference_image")],), - (ants_applywarp_T1, outputnode, [("output_image", "T1_registered_crop")],), - (inputnode, ants_applywarp_brain, [("brain", "input_image")]), - (mr_convert_b0, ants_applywarp_brain, [("converted", "reference_image")],), - (ants_applywarp_brain, outputnode, [("output_image", "brain_registered_crop")],), - (inputnode, ants_applywarp_brainmask, [("brain_mask", "input_image")],), - (mr_convert_b0, ants_applywarp_brainmask, [("converted", "reference_image")],), - (ants_applywarp_brainmask, outputnode, [("output_image", "brain_mask_registered_crop")],), - (inputnode, ants_applywarp_wm, [("wm_mask", "input_image")]), - (mr_convert_b0, ants_applywarp_wm, [("converted", "reference_image")],), - (ants_applywarp_wm, outputnode, [("output_image", "wm_mask_registered_crop")],), - (inputnode, ants_applywarp_rois, [("roi_volumes", "input_images")]), - (mr_convert_b0, ants_applywarp_rois, [("converted", "reference_image")],), - (ants_applywarp_rois, outputnode, [("output_images", "roi_volumes_registered_crop")],), - ] - ) - # fmt:on - if self.config.act_tracking: - # fmt:off - flow.connect( - [ - (inputnode, ants_applywarp_5tt, [("act_5TT", "input_image")]), - (mr_convert_b0, ants_applywarp_5tt, [("converted", "reference_image")]), - (ants_applywarp_5tt, outputnode, [("output_image", "act_5tt_registered_crop")]), - ] - ) - # fmt:on - if self.config.tracking_tool == "Dipy": - # fmt:off - flow.connect( - [ - (inputnode, ants_applywarp_pves, [("partial_volume_files", "input_images")]), - (mr_convert_b0, ants_applywarp_pves, [("converted", "reference_image")]), - (ants_applywarp_pves, outputnode, [("output_images", "partial_volumes_registered_crop")]) - ] - ) - # fmt:on - if self.config.gmwmi_seeding: - # fmt:off - flow.connect( - [ - (inputnode, ants_applywarp_gmwmi, [("gmwmi", "input_image")]), - (mr_convert_b0, ants_applywarp_gmwmi, [("converted", "reference_image")]), - (ants_applywarp_gmwmi, outputnode, [("output_image", "gmwmi_registered_crop")]), - ] - ) - # fmt:on + flow = self.create_ants_workflow(flow, inputnode, outputnode) if self.config.registration_mode == "FSL (Linear)": fsl_flirt = pe.Node( @@ -1531,6 +1008,559 @@ def extract_warp_field(transforms): ) # fmt:on + def create_ants_workflow(self, flow, inputnode, outputnode): + """Create the registration workflow using ANTS. + + Parameters + ---------- + flow : nipype.pipeline.engine.Workflow + The nipype.pipeline.engine.Workflow instance of the + Diffusion pipeline + + inputnode : nipype.interfaces.utility.IdentityInterface + Identity interface describing the inputs of the stage + + outputnode : nipype.interfaces.utility.IdentityInterface + Identity interface describing the outputs of the stage + """ + # [SUB-STEP 1] Linear register "T1" onto"Target_FA_resampled" + # [1.1] Convert diffusion data to mrtrix format using rotated bvecs + mr_convert = pe.Node( + interface=MRConvert( + out_filename="diffusion.mif", stride=[+1, +2, +3, +4] + ), + name="mr_convert", + ) + mr_convert.inputs.quiet = True + mr_convert.inputs.force_writing = True + + concatnode = pe.Node(interface=util.Merge(2), name="concatnode") + + # fmt:off + flow.connect( + [ + (inputnode, concatnode, [("bvecs", "in1")]), + (inputnode, concatnode, [("bvals", "in2")]), + (concatnode, mr_convert, [(("out", convert_list_to_tuple), "grad_fsl")],), + (inputnode, mr_convert, [("target", "in_file")]), + ] + ) + # fmt:on + + grad_mrtrix = pe.Node( + ExtractMRTrixGrad(out_grad_mrtrix="grad.txt"), name="extract_grad" + ) + + # fmt:off + flow.connect( + [ + (mr_convert, grad_mrtrix, [("converted", "in_file")]), + (grad_mrtrix, outputnode, [("out_grad_mrtrix", "grad")]), + ] + ) + # fmt:on + + mr_convert_b0 = pe.Node( + interface=MRConvert(out_filename="b0.nii.gz", stride=[+1, +2, +3]), + name="mr_convert_b0", + ) + mr_convert_b0.inputs.extract_at_axis = 3 + mr_convert_b0.inputs.extract_at_coordinate = [0] + + flow.connect([(inputnode, mr_convert_b0, [("target", "in_file")])]) + + dwi2tensor = pe.Node( + interface=DWI2Tensor(out_filename="dt_corrected.mif"), name="dwi2tensor" + ) + dwi2tensor_unmasked = pe.Node( + interface=DWI2Tensor(out_filename="dt_corrected_unmasked.mif"), + name="dwi2tensor_unmasked", + ) + + tensor2FA = pe.Node( + interface=TensorMetrics(out_fa="fa_corrected.mif"), name="tensor2FA" + ) + tensor2FA_unmasked = pe.Node( + interface=TensorMetrics(out_fa="fa_corrected_unmasked.mif"), + name="tensor2FA_unmasked", + ) + + mr_convert_FA = pe.Node( + interface=MRConvert( + out_filename="fa_corrected.nii.gz", stride=[+1, +2, +3] + ), + name="mr_convert_FA", + ) + mr_convert_FA_unmasked = pe.Node( + interface=MRConvert( + out_filename="fa_corrected_unmasked.nii.gz", stride=[+1, +2, +3] + ), + name="mr_convert_FA_unmasked", + ) + + FA_noNaN = pe.Node( + interface=cmp_fsl.MathsCommand( + out_file="fa_corrected_nonan.nii.gz", nan2zeros=True + ), + name="FA_noNaN", + ) + FA_noNaN_unmasked = pe.Node( + interface=cmp_fsl.MathsCommand( + out_file="fa_corrected_unmasked_nonan.nii.gz", nan2zeros=True + ), + name="FA_noNaN_unmasked", + ) + + # fmt:off + flow.connect( + [ + (mr_convert, dwi2tensor, [("converted", "in_file")]), + (inputnode, dwi2tensor, [("target_mask", "in_mask_file")]), + (dwi2tensor, tensor2FA, [("tensor", "in_file")]), + (inputnode, tensor2FA, [("target_mask", "in_mask")]), + (tensor2FA, mr_convert_FA, [("out_fa", "in_file")]), + (mr_convert_FA, FA_noNaN, [("converted", "in_file")]), + (mr_convert, dwi2tensor_unmasked, [("converted", "in_file")]), + (dwi2tensor_unmasked, tensor2FA_unmasked, [("tensor", "in_file")]), + (tensor2FA_unmasked, mr_convert_FA_unmasked, [("out_fa", "in_file")],), + (mr_convert_FA_unmasked, FA_noNaN_unmasked, [("converted", "in_file")],), + ] + ) + # fmt:on + + b0_masking = pe.Node( + interface=ApplyMask(out_file="b0_masked.nii.gz"), name="b0_masking" + ) + + # fmt:off + flow.connect( + [ + (mr_convert_b0, b0_masking, [("converted", "in_file")]), + (inputnode, b0_masking, [("target_mask", "mask_file")]), + ] + ) + # fmt:on + + # [1.2] Linear registration of the DW data to the T1 data + affine_registration = pe.Node( + interface=ants.Registration(), name="linear_registration" + ) + affine_registration.inputs.collapse_output_transforms = True + affine_registration.inputs.initial_moving_transform_com = True + affine_registration.inputs.output_transform_prefix = "initial" + affine_registration.inputs.num_threads = 8 + affine_registration.inputs.output_inverse_warped_image = True + affine_registration.inputs.output_warped_image = ( + "linear_warped_image.nii.gz" + ) + affine_registration.inputs.sigma_units = ["vox"] * 2 + affine_registration.inputs.transforms = ["Rigid", "Affine"] + + affine_registration.inputs.interpolation = self.config.ants_interpolation + if self.config.ants_interpolation == "BSpline": + affine_registration.inputs.interpolation_parameters = ( + self.config.ants_bspline_interpolation_parameters + ) # Default: (3,) + elif self.config.ants_interpolation == "Gaussian": + affine_registration.inputs.interpolation_parameters = ( + self.config.ants_gauss_interpolation_parameters + ) # Default: (5,5,) + elif self.config.ants_interpolation == "MultiLabel": + affine_registration.inputs.interpolation_parameters = ( + self.config.ants_multilab_interpolation_parameters + ) # Default: (5,5,) + + affine_registration.inputs.winsorize_lower_quantile = (self.config.ants_lower_quantile) # Default: 0.005 + affine_registration.inputs.winsorize_upper_quantile = (self.config.ants_upper_quantile) # Default: 0.995 + affine_registration.inputs.convergence_threshold = [self.config.ants_convergence_thresh] * 2 # Default: [1e-06]*2 + affine_registration.inputs.convergence_window_size = [self.config.ants_convergence_winsize] * 2 # Default: [10]*2 + affine_registration.inputs.metric = [ + self.config.ants_linear_cost, + self.config.ants_linear_cost, + ] # Default: ['MI','MI'] + affine_registration.inputs.metric_weight = [1.0] * 2 + affine_registration.inputs.number_of_iterations = [ + [1000, 500, 250, 100], + [1000, 500, 250, 100], + ] + affine_registration.inputs.radius_or_number_of_bins = [32, 32] + affine_registration.inputs.sampling_percentage = [ + self.config.ants_linear_sampling_perc, + self.config.ants_linear_sampling_perc, + ] # Default: [0.25, 0.25] + affine_registration.inputs.sampling_strategy = [ + self.config.ants_linear_sampling_strategy, + self.config.ants_linear_sampling_strategy, + ] # Default: ['Regular','Regular'] + affine_registration.inputs.shrink_factors = [[8, 4, 2, 1]] * 2 + affine_registration.inputs.smoothing_sigmas = [[3, 2, 1, 0]] * 2 + affine_registration.inputs.transform_parameters = [ + (self.config.ants_linear_gradient_step,), + (self.config.ants_linear_gradient_step,), + ] # Default: [(0.1,),(0.1,)] + affine_registration.inputs.use_histogram_matching = True + if self.config.ants_perform_syn: + affine_registration.inputs.write_composite_transform = True + affine_registration.inputs.verbose = True + + affine_registration.inputs.float = self.config.use_float_precision + + # fmt:off + flow.connect( + [ + (b0_masking, affine_registration, [("out_file", "fixed_image")]), + (inputnode, affine_registration, [("brain", "moving_image")]) + ] + ) + # fmt:on + + SyN_registration = pe.Node( + interface=ants.Registration(), name="SyN_registration" + ) + + if self.config.ants_perform_syn: + + SyN_registration.inputs.collapse_output_transforms = True + SyN_registration.inputs.write_composite_transform = False + SyN_registration.inputs.output_transform_prefix = "final" + SyN_registration.inputs.num_threads = 8 + SyN_registration.inputs.output_inverse_warped_image = True + SyN_registration.inputs.output_warped_image = "Syn_warped_image.nii.gz" + SyN_registration.inputs.sigma_units = ["vox"] * 1 + SyN_registration.inputs.transforms = ["SyN"] + SyN_registration.inputs.restrict_deformation = [[0, 1, 0]] + + SyN_registration.inputs.interpolation = ( + self.config.ants_interpolation + ) # Default: 'BSpline' + if self.config.ants_interpolation == "BSpline": + SyN_registration.inputs.interpolation_parameters = ( + self.config.ants_bspline_interpolation_parameters + ) # Default: (3,) + elif self.config.ants_interpolation == "Gaussian": + SyN_registration.inputs.interpolation_parameters = ( + self.config.ants_gauss_interpolation_parameters + ) # Default: (5,5,) + elif self.config.ants_interpolation == "MultiLabel": + SyN_registration.inputs.interpolation_parameters = ( + self.config.ants_multilab_interpolation_parameters + ) # Default: (5,5,) + + SyN_registration.inputs.winsorize_lower_quantile = (self.config.ants_lower_quantile) # Default: 0.005 + SyN_registration.inputs.winsorize_upper_quantile = (self.config.ants_upper_quantile) # Default: 0.995 + SyN_registration.inputs.convergence_threshold = [self.config.ants_convergence_thresh] * 1 # Default: [1e-06]*1 + SyN_registration.inputs.convergence_window_size = [self.config.ants_convergence_winsize] * 1 # Default: [10]*1 + SyN_registration.inputs.metric = [self.config.ants_nonlinear_cost] # Default: ['CC'] + SyN_registration.inputs.metric_weight = [1.0] * 1 + SyN_registration.inputs.number_of_iterations = [[20]] + SyN_registration.inputs.radius_or_number_of_bins = [4] + SyN_registration.inputs.sampling_percentage = [1] + SyN_registration.inputs.sampling_strategy = ["None"] + SyN_registration.inputs.shrink_factors = [[1]] * 1 + SyN_registration.inputs.smoothing_sigmas = [[0]] * 1 + SyN_registration.inputs.transform_parameters = [ + ( + self.config.ants_nonlinear_gradient_step, + self.config.ants_nonlinear_update_field_variance, + self.config.ants_nonlinear_total_field_variance, + ) + ] # Default: [(0.1, 3.0, 0.0)] + SyN_registration.inputs.use_histogram_matching = True + SyN_registration.inputs.verbose = True + + SyN_registration.inputs.float = self.config.use_float_precision + + # fmt:off + flow.connect( + [ + (affine_registration, SyN_registration, [("composite_transform", "initial_moving_transform")]), + (b0_masking, SyN_registration, [("out_file", "fixed_image")]), + (inputnode, SyN_registration, [("brain", "moving_image")]) + ] + ) + # fmt:on + + ants_applywarp_T1 = pe.Node( + interface=ants.ApplyTransforms( + default_value=0, interpolation="Gaussian", out_postfix="_warped" + ), + name="apply_warp_T1", + ) + ants_applywarp_brain = pe.Node( + interface=ants.ApplyTransforms( + default_value=0, interpolation="Gaussian", out_postfix="_warped" + ), + name="apply_warp_brain", + ) + ants_applywarp_brainmask = pe.Node( + interface=ants.ApplyTransforms( + default_value=0, + interpolation="NearestNeighbor", + out_postfix="_warped", + ), + name="apply_warp_brainmask", + ) + ants_applywarp_wm = pe.Node( + interface=ants.ApplyTransforms( + default_value=0, + interpolation="NearestNeighbor", + out_postfix="_warped", + ), + name="apply_warp_wm", + ) + ants_applywarp_rois = pe.Node( + interface=MultipleANTsApplyTransforms( + interpolation="NearestNeighbor", + default_value=0, + out_postfix="_warped", + ), + name="apply_warp_roivs", + ) + + if self.config.act_tracking: + ants_applywarp_5tt = pe.Node( + interface=ants.ApplyTransforms( + default_value=0, interpolation="Gaussian", out_postfix="_warped" + ), + name="apply_warp_5tt", + ) + ants_applywarp_5tt.inputs.dimension = 3 + ants_applywarp_5tt.inputs.input_image_type = 3 + ants_applywarp_5tt.inputs.float = True + + if self.config.tracking_tool == "Dipy": + ants_applywarp_pves = pe.Node( + interface=MultipleANTsApplyTransforms( + interpolation="Gaussian", default_value=0, out_postfix="_warped" + ), + name="apply_warp_pves", + ) + + if self.config.gmwmi_seeding: + ants_applywarp_gmwmi = pe.Node( + interface=ants.ApplyTransforms( + default_value=0, interpolation="Gaussian", out_postfix="_warped" + ), + name="apply_warp_gmwmi", + ) + ants_applywarp_gmwmi.inputs.dimension = 3 + ants_applywarp_gmwmi.inputs.input_image_type = 3 + ants_applywarp_gmwmi.inputs.float = True + + def reverse_order_transforms(transforms): + """Reverse the order of the transformations estimated by linear and SyN registration. + + Parameters + ---------- + transforms : list of File + List of transformation files + + Returns + ------- + out_transforms : list of File + Reversed list of transformation files + (``transforms[::-1]``) + """ + out_transforms = transforms[::-1] + return out_transforms + + def extract_affine_transform(transforms): + """Extract affine transformation file from a list a transformation files generated by linear and SyN registration. + + Parameters + ---------- + transforms : list of File + List of transformation files + + Returns + ------- + t : File + Affine transformation file + """ + for t in transforms: + if "Affine" in t: + return t + + def extract_warp_field(transforms): + """Extract the warpfield file from a list a transformation files generated by linear and SyN registration. + + Parameters + ---------- + transforms : list of File + List of transformation files + + Returns + ------- + t : File + Warp field (Non-linear transformation) file + """ + for t in transforms: + if "Warp" in t: + return t + + if self.config.ants_perform_syn: + # fmt:off + flow.connect( + [ + (SyN_registration, ants_applywarp_T1, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (SyN_registration, ants_applywarp_brain, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (SyN_registration, ants_applywarp_brainmask, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (SyN_registration, ants_applywarp_wm, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (SyN_registration, ants_applywarp_rois, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (SyN_registration, outputnode, [(("forward_transforms", extract_affine_transform), "affine_transform")]), + (SyN_registration, outputnode, [(("forward_transforms", extract_warp_field), "warp_field")]), + ] + ) + # fmt:on + if self.config.act_tracking: + # fmt:off + flow.connect( + [ + (SyN_registration, ants_applywarp_5tt, [(("forward_transforms", reverse_order_transforms), "transforms")]), + ] + ) + # fmt:on + if self.config.tracking_tool == "Dipy": + # fmt:off + flow.connect( + [ + (SyN_registration, ants_applywarp_pves, [(("forward_transforms", reverse_order_transforms), "transforms")]), + ] + ) + # fmt:on + if self.config.gmwmi_seeding: + # fmt:off + flow.connect( + [ + (SyN_registration, ants_applywarp_gmwmi, [(("forward_transforms", reverse_order_transforms), "transforms")]), + ] + ) + # fmt:on + else: + # fmt:off + flow.connect( + [ + (affine_registration, ants_applywarp_T1, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (affine_registration, ants_applywarp_brain, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (affine_registration, ants_applywarp_brainmask, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (affine_registration, ants_applywarp_wm, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (affine_registration, ants_applywarp_rois, [(("forward_transforms", reverse_order_transforms), "transforms")]), + (affine_registration, outputnode, [(("forward_transforms", extract_affine_transform), "affine_transform")]), + ] + ) + # fmt:on + if self.config.act_tracking: + # fmt:off + flow.connect( + [ + (affine_registration, ants_applywarp_5tt, [(("forward_transforms", reverse_order_transforms), "transforms")]) + ] + ) + # fmt:on + if self.config.tracking_tool == "Dipy": + # fmt:off + flow.connect( + [ + (affine_registration, ants_applywarp_pves, [(("forward_transforms", reverse_order_transforms), "transforms")]) + ] + ) + # fmt:on + if self.config.gmwmi_seeding: + # fmt:off + flow.connect( + [ + (affine_registration, ants_applywarp_gmwmi, [(("forward_transforms", reverse_order_transforms), "transforms")]) + ] + ) + # fmt:on + + # fmt:off + flow.connect( + [ + (inputnode, ants_applywarp_T1, [("T1", "input_image")]), + (mr_convert_b0, ants_applywarp_T1, [("converted", "reference_image")],), + (ants_applywarp_T1, outputnode, [("output_image", "T1_registered_crop")],), + (inputnode, ants_applywarp_brain, [("brain", "input_image")]), + (mr_convert_b0, ants_applywarp_brain, [("converted", "reference_image")],), + (ants_applywarp_brain, outputnode, [("output_image", "brain_registered_crop")],), + (inputnode, ants_applywarp_brainmask, [("brain_mask", "input_image")],), + (mr_convert_b0, ants_applywarp_brainmask, [("converted", "reference_image")],), + (ants_applywarp_brainmask, outputnode, [("output_image", "brain_mask_registered_crop")],), + (inputnode, ants_applywarp_wm, [("wm_mask", "input_image")]), + (mr_convert_b0, ants_applywarp_wm, [("converted", "reference_image")],), + (ants_applywarp_wm, outputnode, [("output_image", "wm_mask_registered_crop")],), + (inputnode, ants_applywarp_rois, [("roi_volumes", "input_images")]), + (mr_convert_b0, ants_applywarp_rois, [("converted", "reference_image")],), + (ants_applywarp_rois, outputnode, [("output_images", "roi_volumes_registered_crop")],), + ] + ) + # fmt:on + if self.config.act_tracking: + # fmt:off + flow.connect( + [ + (inputnode, ants_applywarp_5tt, [("act_5TT", "input_image")]), + (mr_convert_b0, ants_applywarp_5tt, [("converted", "reference_image")]), + (ants_applywarp_5tt, outputnode, [("output_image", "act_5tt_registered_crop")]), + ] + ) + # fmt:on + if self.config.tracking_tool == "Dipy": + # fmt:off + flow.connect( + [ + (inputnode, ants_applywarp_pves, [("partial_volume_files", "input_images")]), + (mr_convert_b0, ants_applywarp_pves, [("converted", "reference_image")]), + (ants_applywarp_pves, outputnode, [("output_images", "partial_volumes_registered_crop")]) + ] + ) + # fmt:on + if self.config.gmwmi_seeding: + # fmt:off + flow.connect( + [ + (inputnode, ants_applywarp_gmwmi, [("gmwmi", "input_image")]), + (mr_convert_b0, ants_applywarp_gmwmi, [("converted", "reference_image")]), + (ants_applywarp_gmwmi, outputnode, [("output_image", "gmwmi_registered_crop")]), + ] + ) + # fmt:on + return flow + + # def create_flirt_workflow(self, flow, inputnode, outputnode): + # """Create the workflow of the registration stage using FSL FLIRT. + # + # Parameters + # ---------- + # flow : nipype.pipeline.engine.Workflow + # The nipype.pipeline.engine.Workflow instance of either the + # Diffusion pipeline or the fMRI pipeline + # + # inputnode : nipype.interfaces.utility.IdentityInterface + # Identity interface describing the inputs of the stage + # + # outputnode : nipype.interfaces.utility.IdentityInterface + # Identity interface describing the outputs of the stage + # """ + # return NotImplementedError + # + # def create_bbregister_workflow(self, flow, inputnode, outputnode): + # """Create the workflow of the registration stage using FreeSurfer BBRegister. + # + # Parameters + # ---------- + # flow : nipype.pipeline.engine.Workflow + # The nipype.pipeline.engine.Workflow instance of the + # fMRI pipeline + # + # inputnode : nipype.interfaces.utility.IdentityInterface + # Identity interface describing the inputs of the stage + # + # outputnode : nipype.interfaces.utility.IdentityInterface + # Identity interface describing the outputs of the stage + # """ + # return NotImplementedError + def define_inspect_outputs(self): """Update the `inspect_outputs` class attribute. From 19ae414038fa498d19a677acf5d2866fc47c5abc Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 09:16:35 +0100 Subject: [PATCH 08/15] REF: Add create_flirt_workflow() and create_bbregister_workflow() to RegistrationStage (#95) --- cmp/stages/registration/registration.py | 1049 ++++++++--------------- 1 file changed, 377 insertions(+), 672 deletions(-) diff --git a/cmp/stages/registration/registration.py b/cmp/stages/registration/registration.py index 1d9c21274..2f5d4fb3c 100644 --- a/cmp/stages/registration/registration.py +++ b/cmp/stages/registration/registration.py @@ -372,644 +372,19 @@ def create_workflow(self, flow, inputnode, outputnode): outputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the outputs of the stage """ - # Extract first volume and resample it to 1x1x1mm3 - if self.config.pipeline == "Diffusion": - extract_first = pe.Node( - interface=fsl.ExtractROI(t_min=0, t_size=1, roi_file="first.nii.gz"), - name="extract_first", - ) - flow.connect([(inputnode, extract_first, [("target", "in_file")])]) - fs_mriconvert = pe.Node( - interface=fs.MRIConvert( - out_file="target_first.nii.gz" - ), - name="target_resample", - ) - flow.connect([(extract_first, fs_mriconvert, [("roi_file", "in_file")])]) - - # fmt:off - flow.connect( - [ - (inputnode, outputnode, [("target", "target_epicorrected")]), - (inputnode, outputnode, [("bvals", "bvals"), - ("bvecs", "bvecs")]), - ] - ) - # fmt:on - - elif self.config.pipeline == "fMRI": - fmri_bet = pe.Node(interface=fsl.BET(), name="fMRI_skullstrip") - T1_bet = pe.Node(interface=fsl.BET(), name="T1_skullstrip") - # fmt:off - flow.connect( - [ - (inputnode, fmri_bet, [("target", "in_file")]), - (inputnode, T1_bet, [("T1", "in_file")]), - ] - ) - # fmt:on - - if self.config.registration_mode == "ANTs": - flow = self.create_ants_workflow(flow, inputnode, outputnode) - - if self.config.registration_mode == "FSL (Linear)": - fsl_flirt = pe.Node( - interface=fsl.FLIRT( - out_file="T1-TO-TARGET.nii.gz", out_matrix_file="T1-TO-TARGET.mat" - ), - name="linear_registration", - ) - fsl_flirt.inputs.uses_qform = self.config.uses_qform - fsl_flirt.inputs.dof = self.config.dof - fsl_flirt.inputs.cost = self.config.fsl_cost - fsl_flirt.inputs.no_search = self.config.no_search - fsl_flirt.inputs.args = self.config.flirt_args - - fsl_applyxfm_wm = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="wm_mask_registered.nii.gz", - ), - name="apply_registration_wm", - ) - fsl_applyxfm_rois = pe.Node( - interface=ApplymultipleXfm(interp='nearestneighbour'), - name="apply_registration_roivs" - ) - - # TODO apply xfm to gmwmi / 5tt and pves - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_wm, [("wm_mask", "in_file")]), - (fsl_flirt, outputnode, [("out_file", "T1_registered_crop")]), - (fsl_flirt, fsl_applyxfm_wm, [("out_matrix_file", "in_matrix_file")],), - (fsl_applyxfm_wm, outputnode, [("out_file", "wm_mask_registered_crop")],), - (inputnode, fsl_applyxfm_rois, [("roi_volumes", "in_files")]), - (fsl_flirt, fsl_applyxfm_rois, [("out_matrix_file", "xfm_file")]), - (fsl_flirt, outputnode, [("out_matrix_file", "affine_transform")]), - (fsl_applyxfm_rois, outputnode, [("out_files", "roi_volumes_registered_crop")],), - ] - ) - # fmt:on - - if self.config.pipeline == "fMRI": - # fmt:off - flow.connect( - [ - (T1_bet, fsl_flirt, [("out_file", "in_file")]), - (fmri_bet, fsl_flirt, [("out_file", "reference")]), - (fmri_bet, fsl_applyxfm_wm, [("out_file", "reference")]), - (fmri_bet, fsl_applyxfm_rois, [("out_file", "reference")]), - ] - ) - # fmt:on - - fsl_applyxfm_eroded_wm = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="eroded_wm_registered.nii.gz", - ), - name="apply_registration_wm_eroded", - ) - if self.config.apply_to_eroded_csf: - fsl_applyxfm_eroded_csf = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="eroded_csf_registered.nii.gz", - ), - name="apply_registration_csf_eroded", - ) - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_eroded_csf, [("eroded_csf", "in_file")],), - (fmri_bet, fsl_applyxfm_eroded_csf, [("out_file", "reference")],), - (fsl_flirt, fsl_applyxfm_eroded_csf, [("out_matrix_file", "in_matrix_file")],), - (fsl_applyxfm_eroded_csf, outputnode, [("out_file", "eroded_csf_registered_crop")],), - ] - ) - # fmt:on - if self.config.apply_to_eroded_brain: - fsl_applyxfm_eroded_brain = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="eroded_brain_registered.nii.gz", - ), - name="apply_registration_brain_eroded", - ) - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_eroded_brain, [("eroded_brain", "in_file")],), - (fmri_bet, fsl_applyxfm_eroded_brain, [("out_file", "reference")],), - (fsl_flirt, fsl_applyxfm_eroded_brain, [("out_matrix_file", "in_matrix_file")],), - (fsl_applyxfm_eroded_brain, outputnode, [("out_file", "eroded_brain_registered_crop")],), - ] - ) - # fmt:on - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_eroded_wm, [("eroded_wm", "in_file")]), - (fmri_bet, fsl_applyxfm_eroded_wm, [("out_file", "reference")]), - (fsl_flirt, fsl_applyxfm_eroded_wm, [("out_matrix_file", "in_matrix_file")],), - (fsl_applyxfm_eroded_wm, outputnode, [("out_file", "eroded_wm_registered_crop")],), - ] - ) - # fmt:on - else: - # fmt:off - flow.connect( - [ - (inputnode, fsl_flirt, [("T1", "in_file")]), - (fs_mriconvert, fsl_flirt, [("out_file", "reference")]), - (fs_mriconvert, fsl_applyxfm_wm, [("out_file", "reference")]), - (fs_mriconvert, fsl_applyxfm_rois, [("out_file", "reference")]), - ] - ) - # fmt:on - - if (self.config.pipeline == "fMRI") and (self.config.registration_mode == "BBregister (FS)"): - - fs_bbregister = pe.Node( - interface=cmp_fs.BBRegister(out_fsl_file="target-TO-orig.mat"), - name="bbregister", - ) - fs_bbregister.inputs.init = self.config.init - fs_bbregister.inputs.contrast_type = self.config.contrast_type - fs_bbregister.inputs.subjects_dir = self.fs_subjects_dir - fs_bbregister.inputs.subject_id = self.fs_subject_id - - fsl_invertxfm = pe.Node( - interface=fsl.ConvertXFM(invert_xfm=True), name="fsl_invertxfm" - ) - - fs_source = pe.Node( - interface=fs.preprocess.FreeSurferSource(), name="get_fs_files" - ) - fs_source.inputs.subjects_dir = self.fs_subjects_dir - fs_source.inputs.subject_id = self.fs_subject_id - - fs_tkregister2 = pe.Node( - interface=cmp_fs.Tkregister2(regheader=True, noedit=True), - name="fs_tkregister2", - ) - fs_tkregister2.inputs.reg_out = "T1-TO-orig.dat" - fs_tkregister2.inputs.fslreg_out = "T1-TO-orig.mat" - fs_tkregister2.inputs.subjects_dir = self.fs_subjects_dir - fs_tkregister2.inputs.subject_id = self.fs_subject_id - - fsl_concatxfm = pe.Node( - interface=fsl.ConvertXFM(concat_xfm=True), name="fsl_concatxfm" - ) - - fsl_applyxfm = pe.Node( - interface=fsl.ApplyXFM(apply_xfm=True, out_file="T1-TO-TARGET.nii.gz"), - name="linear_registration", - ) - fsl_applyxfm_wm = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="wm_mask_registered.nii.gz", - ), - name="apply_registration_wm", - ) - fsl_applyxfm_rois = pe.Node( - interface=ApplymultipleXfm(), name="apply_registration_roivs" - ) - - # fmt:off - flow.connect( - [ - (fs_bbregister, fsl_invertxfm, [("out_fsl_file", "in_file")]), - (fsl_invertxfm, fsl_concatxfm, [("out_file", "in_file2")]), - (fs_source, fs_tkregister2, [("orig", "target_file"), ("rawavg", "in_file")],), - (fs_tkregister2, fsl_concatxfm, [("fslregout_file", "in_file")]), - (T1_bet, fsl_applyxfm, [("out_file", "in_file")]), - (fsl_concatxfm, fsl_applyxfm, [("out_file", "in_matrix_file")]), - (fsl_applyxfm, outputnode, [("out_file", "T1_registered_crop")]), - (inputnode, fsl_applyxfm_wm, [("wm_mask", "in_file")]), - (fsl_concatxfm, fsl_applyxfm_wm, [("out_file", "in_matrix_file")]), - (fsl_applyxfm_wm, outputnode, [("out_file", "wm_mask_registered_crop")],), - (inputnode, fsl_applyxfm_rois, [("roi_volumes", "in_files")]), - (fsl_concatxfm, fsl_applyxfm_rois, [("out_file", "xfm_file")]), - (fsl_applyxfm_rois, outputnode, [("out_files", "roi_volumes_registered_crop")],), - (fmri_bet, fs_bbregister, [("out_file", "source_file")]), - (inputnode, fsl_applyxfm, [("target", "reference")]), - (inputnode, fsl_applyxfm_wm, [("target", "reference")]), - (inputnode, fsl_applyxfm_rois, [("target", "reference")]), - ] - ) - # fmt:on - - fsl_applyxfm_eroded_wm = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="eroded_wm_registered.nii.gz", - ), - name="apply_registration_wm_eroded", - ) - if self.config.apply_to_eroded_csf: - fsl_applyxfm_eroded_csf = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="eroded_csf_registered.nii.gz", - ), - name="apply_registration_csf_eroded", - ) - - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_eroded_csf, [("eroded_csf", "in_file")],), - (inputnode, fsl_applyxfm_eroded_csf, [("target", "reference")]), - (fsl_concatxfm, fsl_applyxfm_eroded_csf, [("out_file", "in_matrix_file")],), - (fsl_applyxfm_eroded_csf, outputnode, [("out_file", "eroded_csf_registered_crop")],), - ] - ) - # fmt:on - - if self.config.apply_to_eroded_brain: - fsl_applyxfm_eroded_brain = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="eroded_brain_registered.nii.gz", - ), - name="apply_registration_brain_eroded", - ) - - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_eroded_brain, [("eroded_brain", "in_file")],), - (inputnode, fsl_applyxfm_eroded_brain, [("target", "reference")],), - (fsl_concatxfm, fsl_applyxfm_eroded_brain, [("out_file", "in_matrix_file")],), - (fsl_applyxfm_eroded_brain, outputnode, [("out_file", "eroded_brain_registered_crop")],), - ] - ) - # fmt:on - - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_eroded_wm, [("eroded_wm", "in_file")]), - (inputnode, fsl_applyxfm_eroded_wm, [("target", "reference")]), - (fsl_concatxfm, fsl_applyxfm_eroded_wm, [("out_file", "in_matrix_file")],), - (fsl_applyxfm_eroded_wm, outputnode, [("out_file", "eroded_wm_registered_crop")],), - ] - ) - # fmt:on - - if self.config.registration_mode == "FSL": - # [SUB-STEP 1] Linear register "T1" onto"Target_FA_resampled" - # [1.1] Convert diffusion data to mrtrix format using rotated bvecs - mr_convert = pe.Node( - interface=MRConvert( - out_filename="diffusion.mif", stride=[+1, +2, +3, +4] - ), - name="mr_convert", - ) - mr_convert.inputs.quiet = True - mr_convert.inputs.force_writing = True - - concatnode = pe.Node(interface=util.Merge(2), name="concatnode") - - # fmt:off - flow.connect( - [ - (inputnode, concatnode, [("bvecs", "in1")]), - (inputnode, concatnode, [("bvals", "in2")]), - (concatnode, mr_convert, [(("out", convert_list_to_tuple), "grad_fsl")],), - (inputnode, mr_convert, [("target", "in_file")]), - ] - ) - # fmt:on - - grad_mrtrix = pe.Node( - ExtractMRTrixGrad(out_grad_mrtrix="grad.txt"), name="extract_grad" - ) - - # fmt:off - flow.connect( - [ - (mr_convert, grad_mrtrix, [("converted", "in_file")]), - (grad_mrtrix, outputnode, [("out_grad_mrtrix", "grad")]) - ] - ) - # fmt:on - - mr_convert_b0 = pe.Node( - interface=MRConvert(out_filename="b0.nii.gz", stride=[+1, +2, +3]), - name="mr_convert_b0", - ) - mr_convert_b0.inputs.extract_at_axis = 3 - mr_convert_b0.inputs.extract_at_coordinate = [0] - - # fmt:off - flow.connect( - [ - (inputnode, mr_convert_b0, [("target", "in_file")]) - ] - ) - # fmt:on - - dwi2tensor = pe.Node( - interface=DWI2Tensor(out_filename="dt_corrected.mif"), name="dwi2tensor" - ) - dwi2tensor_unmasked = pe.Node( - interface=DWI2Tensor(out_filename="dt_corrected_unmasked.mif"), - name="dwi2tensor_unmasked", - ) - - tensor2FA = pe.Node( - interface=TensorMetrics(out_fa="fa_corrected.mif"), name="tensor2FA" - ) - tensor2FA_unmasked = pe.Node( - interface=TensorMetrics(out_fa="fa_corrected_unmasked.mif"), - name="tensor2FA_unmasked", - ) - - mr_convert_FA = pe.Node( - interface=MRConvert( - out_filename="fa_corrected.nii.gz", stride=[+1, +2, +3] - ), - name="mr_convert_FA", - ) - mr_convert_FA_unmasked = pe.Node( - interface=MRConvert( - out_filename="fa_corrected_unmasked.nii.gz", stride=[+1, +2, +3] - ), - name="mr_convert_FA_unmasked", - ) - - FA_noNaN = pe.Node( - interface=cmp_fsl.MathsCommand( - out_file="fa_corrected_nonan.nii.gz", nan2zeros=True - ), - name="FA_noNaN", - ) - FA_noNaN_unmasked = pe.Node( - interface=cmp_fsl.MathsCommand( - out_file="fa_corrected_unmasked_nonan.nii.gz", nan2zeros=True - ), - name="FA_noNaN_unmasked", - ) - - # fmt:off - flow.connect( - [ - (mr_convert, dwi2tensor, [("converted", "in_file")]), - (inputnode, dwi2tensor, [("target_mask", "in_mask_file")]), - (dwi2tensor, tensor2FA, [("tensor", "in_file")]), - (inputnode, tensor2FA, [("target_mask", "in_mask")]), - (tensor2FA, mr_convert_FA, [("out_fa", "in_file")]), - (mr_convert_FA, FA_noNaN, [("converted", "in_file")]), - (mr_convert, dwi2tensor_unmasked, [("converted", "in_file")]), - (dwi2tensor_unmasked, tensor2FA_unmasked, [("tensor", "in_file")]), - (tensor2FA_unmasked, mr_convert_FA_unmasked, [("out_fa", "in_file")],), - (mr_convert_FA_unmasked, FA_noNaN_unmasked, [("converted", "in_file")],), - ] - ) - # fmt:on - - # [1.2] Linear registration of the DW data to the T1 data - fsl_flirt = pe.Node( - interface=fsl.FLIRT( - out_file="T1-TO-B0.nii.gz", out_matrix_file="T12DWIaff.mat" - ), - name="linear_registration", - ) - fsl_flirt.inputs.dof = self.config.dof - fsl_flirt.inputs.cost = self.config.fsl_cost - fsl_flirt.inputs.cost_func = self.config.fsl_cost - fsl_flirt.inputs.no_search = self.config.no_search - fsl_flirt.inputs.verbose = False - - # fmt:off - flow.connect( - [ - (inputnode, fsl_flirt, [("brain", "in_file")]), - (mr_convert_b0, fsl_flirt, [("converted", "reference")]), - ] - ) - # fmt:on - - # [1.3] Transforming T1-space images to avoid rotation of bvecs - T12DWIaff = pe.Node( - interface=fsl.ConvertXFM(invert_xfm=False), name="T12DWIaff" - ) - - # fmt:off - flow.connect( - [ - (fsl_flirt, T12DWIaff, [("out_matrix_file", "in_file")]), - (T12DWIaff, outputnode, [("out_file", "affine_transform")]), - ] - ) - # fmt:on - - fsl_applyxfm_wm = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="nearestneighbour", - out_file="wm_mask_registered.nii.gz", - ), - name="apply_registration_wm", - ) - fsl_applyxfm_rois = pe.Node( - interface=ApplymultipleXfm(), name="apply_registration_roivs" - ) - fsl_applyxfm_brain_mask = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="spline", - out_file="brain_mask_registered_temp.nii.gz", - ), - name="apply_registration_brain_mask", - ) - fsl_applyxfm_brain_mask_full = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, - interp="spline", - out_file="brain_mask_full_registered_temp.nii.gz", - ), - name="apply_registration_brain_mask_full", - ) - fsl_applyxfm_brain = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, interp="spline", out_file="brain_registered.nii.gz" - ), - name="apply_registration_brain", - ) - fsl_applyxfm_T1 = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, interp="spline", out_file="T1_registered.nii.gz" - ), - name="apply_registration_T1", - ) - if self.config.act_tracking: - fsl_applyxfm_5tt = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, interp="spline", out_file="5tt_registered.nii.gz" - ), - name="apply_registration_5tt", - ) - if self.config.gmwmi_seeding: - fsl_applyxfm_gmwmi = pe.Node( - interface=fsl.ApplyXFM( - apply_xfm=True, interp="spline", out_file="gmwmi_registered.nii.gz" - ), - name="apply_registration_gmwmi", - ) - - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_wm, [("wm_mask", "in_file")]), - (T12DWIaff, fsl_applyxfm_wm, [("out_file", "in_matrix_file")]), - (mr_convert_b0, fsl_applyxfm_wm, [("converted", "reference")]), - (inputnode, fsl_applyxfm_rois, [("roi_volumes", "in_files")]), - (T12DWIaff, fsl_applyxfm_rois, [("out_file", "xfm_file")]), - (mr_convert_b0, fsl_applyxfm_rois, [("converted", "reference")]), - (inputnode, fsl_applyxfm_brain_mask, [("brain_mask", "in_file")]), - (T12DWIaff, fsl_applyxfm_brain_mask, [("out_file", "in_matrix_file")],), - (mr_convert_b0, fsl_applyxfm_brain_mask, [("converted", "reference")],), - (inputnode, fsl_applyxfm_brain_mask_full, [("brain_mask_full", "in_file")],), - (T12DWIaff, fsl_applyxfm_brain_mask_full, [("out_file", "in_matrix_file")],), - (mr_convert_b0, fsl_applyxfm_brain_mask_full, [("converted", "reference")],), - (inputnode, fsl_applyxfm_brain, [("brain", "in_file")]), - (T12DWIaff, fsl_applyxfm_brain, [("out_file", "in_matrix_file")]), - (mr_convert_b0, fsl_applyxfm_brain, [("converted", "reference")]), - (inputnode, fsl_applyxfm_T1, [("T1", "in_file")]), - (T12DWIaff, fsl_applyxfm_T1, [("out_file", "in_matrix_file")]), - (mr_convert_b0, fsl_applyxfm_T1, [("converted", "reference")]), - (fsl_applyxfm_brain_mask, outputnode, [("out_file", "brain_mask_registered_crop")],), - ] - ) - # fmt:on - if self.config.act_tracking: - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_5tt, [("act_5TT", "in_file")]), - (T12DWIaff, fsl_applyxfm_5tt, [("out_file", "in_matrix_file")]), - (mr_convert_b0, fsl_applyxfm_5tt, [("converted", "reference")]) - ] - ) - # fmt:on - if self.config.gmwmi_seeding: - # fmt:off - flow.connect( - [ - (inputnode, fsl_applyxfm_gmwmi, [("gmwmi", "in_file")]), - (T12DWIaff, fsl_applyxfm_gmwmi, [("out_file", "in_matrix_file")]), - (mr_convert_b0, fsl_applyxfm_gmwmi, [("converted", "reference")]) - ] - ) - # fmt:on - - fsl_fnirt_crop = pe.Node( - interface=fsl.FNIRT(fieldcoeff_file=True), name="fsl_fnirt_crop" - ) - - # fmt:off - flow.connect( - [ - (mr_convert_b0, fsl_fnirt_crop, [("converted", "ref_file")]), - (inputnode, fsl_fnirt_crop, [("brain", "in_file")]), - (fsl_flirt, fsl_fnirt_crop, [("out_matrix_file", "affine_file")]), - (fsl_fnirt_crop, outputnode, [("fieldcoeff_file", "warp_field")]), - # (inputnode, fsl_fnirt_crop, [('target_mask','refmask_file')]) - ] - ) - # fmt:on - - fsl_applywarp_T1 = pe.Node( - interface=fsl.ApplyWarp(interp="spline", out_file="T1_warped.nii.gz"), - name="apply_warp_T1", - ) - if self.config.act_tracking: - fsl_applywarp_5tt = pe.Node( - interface=fsl.ApplyWarp(interp="spline", out_file="act_5tt_resampled_warped.nii.gz"), - name="apply_warp_5tt", - ) - if self.config.gmwmi_seeding: - fsl_applywarp_gmwmi = pe.Node( - interface=fsl.ApplyWarp( - interp="spline", out_file="gmwmi_resampled_warped.nii.gz" - ), - name="apply_warp_gmwmi", - ) - fsl_applywarp_brain = pe.Node( - interface=fsl.ApplyWarp( - interp="spline", out_file="brain_warped.nii.gz" - ), - name="apply_warp_brain", - ) - fsl_applywarp_wm = pe.Node( - interface=fsl.ApplyWarp(interp="nn", out_file="wm_mask_warped.nii.gz"), - name="apply_warp_wm", - ) - fsl_applywarp_rois = pe.Node( - interface=ApplymultipleWarp(interp="nn"), name="apply_warp_roivs" - ) - - # fmt:off - flow.connect( - [ - (inputnode, fsl_applywarp_T1, [("T1", "in_file")]), - (inputnode, fsl_applywarp_T1, [("target", "ref_file")]), - (fsl_fnirt_crop, fsl_applywarp_T1, [("fieldcoeff_file", "field_file")],), - (fsl_applywarp_T1, outputnode, [("out_file", "T1_registered_crop")],), - (inputnode, fsl_applywarp_brain, [("brain", "in_file")]), - (inputnode, fsl_applywarp_brain, [("target", "ref_file")]), - (fsl_fnirt_crop, fsl_applywarp_brain, [("fieldcoeff_file", "field_file")],), - (fsl_applywarp_brain, outputnode, [("out_file", "brain_registered_crop")],), - (inputnode, fsl_applywarp_wm, [("wm_mask", "in_file")]), - (inputnode, fsl_applywarp_wm, [("target", "ref_file")]), - (fsl_fnirt_crop, fsl_applywarp_wm, [("fieldcoeff_file", "field_file")],), - (fsl_applywarp_wm, outputnode, [("out_file", "wm_mask_registered_crop")],), - (inputnode, fsl_applywarp_rois, [("roi_volumes", "in_files")]), - (inputnode, fsl_applywarp_rois, [("target", "ref_file")]), - (fsl_fnirt_crop, fsl_applywarp_rois, [("fieldcoeff_file", "field_file")],), - (fsl_applywarp_rois, outputnode, [("out_files", "roi_volumes_registered_crop")],) - ] - ) - # fmt:on - if self.config.act_tracking: - # fmt:off - flow.connect( - [ - (inputnode, fsl_applywarp_5tt, [("act_5TT", "in_file")]), - (inputnode, fsl_applywarp_5tt, [("target", "ref_file")]), - (fsl_fnirt_crop, fsl_applywarp_5tt, [("fieldcoeff_file", "field_file")]), - (fsl_applywarp_5tt, outputnode, [("out_file", "act_5tt_registered_crop")]) - ] - ) - # fmt:on - if self.config.gmwmi_seeding: - # fmt:off - flow.connect( - [ - (inputnode, fsl_applywarp_gmwmi, [("gmwmi", "in_file")]), - (inputnode, fsl_applywarp_gmwmi, [("target", "ref_file")]), - (fsl_fnirt_crop, fsl_applywarp_gmwmi, [("fieldcoeff_file", "field_file")]), - (fsl_applywarp_gmwmi, outputnode, [("out_file", "gmwmi_registered_crop")]) - ] - ) - # fmt:on + if self.config.registration_mode == "ANTs": + flow = self.create_ants_workflow(flow, inputnode, outputnode) + + if self.config.registration_mode == "FSL (Linear)": + flow = self.create_flirt_workflow(flow, inputnode, outputnode) + + if (self.config.pipeline == "fMRI") and (self.config.registration_mode == "BBregister (FS)"): + flow = self.create_bbregister_workflow(flow, inputnode, outputnode) + + return flow def create_ants_workflow(self, flow, inputnode, outputnode): - """Create the registration workflow using ANTS. + """Create the registration workflow using `ANTs `_. Parameters ---------- @@ -1023,7 +398,7 @@ def create_ants_workflow(self, flow, inputnode, outputnode): outputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the outputs of the stage """ - # [SUB-STEP 1] Linear register "T1" onto"Target_FA_resampled" + # [SUB-STEP 1] Linear register "T1" onto "Target_B0_resampled" # [1.1] Convert diffusion data to mrtrix format using rotated bvecs mr_convert = pe.Node( interface=MRConvert( @@ -1141,7 +516,7 @@ def create_ants_workflow(self, flow, inputnode, outputnode): ) # fmt:on - # [1.2] Linear registration of the DW data to the T1 data + # [1.2] Linear registration of the B0 volume to the T1 data affine_registration = pe.Node( interface=ants.Registration(), name="linear_registration" ) @@ -1215,9 +590,10 @@ def create_ants_workflow(self, flow, inputnode, outputnode): # fmt:on SyN_registration = pe.Node( - interface=ants.Registration(), name="SyN_registration" + interface=ants.Registration(), name="SyN_registration" ) + # [SUB-STEP 2] Non-linear registration of the B0 volume to the T1 data if self.config.ants_perform_syn: SyN_registration.inputs.collapse_output_transforms = True @@ -1527,39 +903,368 @@ def extract_warp_field(transforms): # fmt:on return flow - # def create_flirt_workflow(self, flow, inputnode, outputnode): - # """Create the workflow of the registration stage using FSL FLIRT. - # - # Parameters - # ---------- - # flow : nipype.pipeline.engine.Workflow - # The nipype.pipeline.engine.Workflow instance of either the - # Diffusion pipeline or the fMRI pipeline - # - # inputnode : nipype.interfaces.utility.IdentityInterface - # Identity interface describing the inputs of the stage - # - # outputnode : nipype.interfaces.utility.IdentityInterface - # Identity interface describing the outputs of the stage - # """ - # return NotImplementedError - # - # def create_bbregister_workflow(self, flow, inputnode, outputnode): - # """Create the workflow of the registration stage using FreeSurfer BBRegister. - # - # Parameters - # ---------- - # flow : nipype.pipeline.engine.Workflow - # The nipype.pipeline.engine.Workflow instance of the - # fMRI pipeline - # - # inputnode : nipype.interfaces.utility.IdentityInterface - # Identity interface describing the inputs of the stage - # - # outputnode : nipype.interfaces.utility.IdentityInterface - # Identity interface describing the outputs of the stage - # """ - # return NotImplementedError + def create_flirt_workflow(self, flow, inputnode, outputnode): + """Create the workflow of the registration stage using `FSL FLIRT `_. + + Parameters + ---------- + flow : nipype.pipeline.engine.Workflow + The nipype.pipeline.engine.Workflow instance of either the + Diffusion pipeline or the fMRI pipeline + + inputnode : nipype.interfaces.utility.IdentityInterface + Identity interface describing the inputs of the stage + + outputnode : nipype.interfaces.utility.IdentityInterface + Identity interface describing the outputs of the stage + """ + # Extract first volume and resample it to 1x1x1mm3 + if self.config.pipeline == "Diffusion": + extract_first = pe.Node( + interface=fsl.ExtractROI(t_min=0, t_size=1, roi_file="first.nii.gz"), + name="extract_first", + ) + flow.connect([(inputnode, extract_first, [("target", "in_file")])]) + fs_mriconvert = pe.Node( + interface=fs.MRIConvert( + out_file="target_first.nii.gz" + ), + name="target_resample", + ) + flow.connect([(extract_first, fs_mriconvert, [("roi_file", "in_file")])]) + + # fmt:off + flow.connect( + [ + (inputnode, outputnode, [("target", "target_epicorrected")]), + (inputnode, outputnode, [("bvals", "bvals"), + ("bvecs", "bvecs")]), + ] + ) + # fmt:on + + elif self.config.pipeline == "fMRI": + fmri_bet = pe.Node(interface=fsl.BET(), name="fMRI_skullstrip") + T1_bet = pe.Node(interface=fsl.BET(), name="T1_skullstrip") + # fmt:off + flow.connect( + [ + (inputnode, fmri_bet, [("target", "in_file")]), + (inputnode, T1_bet, [("T1", "in_file")]), + ] + ) + # fmt:on + + fsl_flirt = pe.Node( + interface=fsl.FLIRT( + out_file="T1-TO-TARGET.nii.gz", + out_matrix_file="T1-TO-TARGET.mat" + ), + name="linear_registration", + ) + fsl_flirt.inputs.uses_qform = self.config.uses_qform + fsl_flirt.inputs.dof = self.config.dof + fsl_flirt.inputs.cost = self.config.fsl_cost + fsl_flirt.inputs.no_search = self.config.no_search + fsl_flirt.inputs.args = self.config.flirt_args + + fsl_applyxfm_wm = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="wm_mask_registered.nii.gz", + ), + name="apply_registration_wm", + ) + fsl_applyxfm_rois = pe.Node( + interface=ApplymultipleXfm(interp='nearestneighbour'), + name="apply_registration_roivs" + ) + + # TODO apply xfm to gmwmi / 5tt and pves + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_wm, [("wm_mask", "in_file")]), + (fsl_flirt, outputnode, [("out_file", "T1_registered_crop")]), + (fsl_flirt, fsl_applyxfm_wm, [("out_matrix_file", "in_matrix_file")],), + (fsl_applyxfm_wm, outputnode, [("out_file", "wm_mask_registered_crop")],), + (inputnode, fsl_applyxfm_rois, [("roi_volumes", "in_files")]), + (fsl_flirt, fsl_applyxfm_rois, [("out_matrix_file", "xfm_file")]), + (fsl_flirt, outputnode, [("out_matrix_file", "affine_transform")]), + (fsl_applyxfm_rois, outputnode, [("out_files", "roi_volumes_registered_crop")],), + ] + ) + # fmt:on + + if self.config.pipeline == "fMRI": + # fmt:off + flow.connect( + [ + (T1_bet, fsl_flirt, [("out_file", "in_file")]), + (fmri_bet, fsl_flirt, [("out_file", "reference")]), + (fmri_bet, fsl_applyxfm_wm, [("out_file", "reference")]), + (fmri_bet, fsl_applyxfm_rois, [("out_file", "reference")]), + ] + ) + # fmt:on + + fsl_applyxfm_eroded_wm = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="eroded_wm_registered.nii.gz", + ), + name="apply_registration_wm_eroded", + ) + if self.config.apply_to_eroded_csf: + fsl_applyxfm_eroded_csf = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="eroded_csf_registered.nii.gz", + ), + name="apply_registration_csf_eroded", + ) + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_eroded_csf, [("eroded_csf", "in_file")],), + (fmri_bet, fsl_applyxfm_eroded_csf, [("out_file", "reference")],), + (fsl_flirt, fsl_applyxfm_eroded_csf, [("out_matrix_file", "in_matrix_file")],), + (fsl_applyxfm_eroded_csf, outputnode, [("out_file", "eroded_csf_registered_crop")],), + ] + ) + # fmt:on + if self.config.apply_to_eroded_brain: + fsl_applyxfm_eroded_brain = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="eroded_brain_registered.nii.gz", + ), + name="apply_registration_brain_eroded", + ) + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_eroded_brain, [("eroded_brain", "in_file")],), + (fmri_bet, fsl_applyxfm_eroded_brain, [("out_file", "reference")],), + (fsl_flirt, fsl_applyxfm_eroded_brain, [("out_matrix_file", "in_matrix_file")],), + (fsl_applyxfm_eroded_brain, outputnode, [("out_file", "eroded_brain_registered_crop")],), + ] + ) + # fmt:on + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_eroded_wm, [("eroded_wm", "in_file")]), + (fmri_bet, fsl_applyxfm_eroded_wm, [("out_file", "reference")]), + (fsl_flirt, fsl_applyxfm_eroded_wm, [("out_matrix_file", "in_matrix_file")],), + (fsl_applyxfm_eroded_wm, outputnode, [("out_file", "eroded_wm_registered_crop")],), + ] + ) + # fmt:on + else: + # fmt:off + flow.connect( + [ + (inputnode, fsl_flirt, [("T1", "in_file")]), + (fs_mriconvert, fsl_flirt, [("out_file", "reference")]), + (fs_mriconvert, fsl_applyxfm_wm, [("out_file", "reference")]), + (fs_mriconvert, fsl_applyxfm_rois, [("out_file", "reference")]), + ] + ) + # fmt:on + + return flow + + def create_bbregister_workflow(self, flow, inputnode, outputnode): + """Create the workflow of the registration stage using `FreeSurfer BBRegister `_. + + Parameters + ---------- + flow : nipype.pipeline.engine.Workflow + The nipype.pipeline.engine.Workflow instance of the + fMRI pipeline + + inputnode : nipype.interfaces.utility.IdentityInterface + Identity interface describing the inputs of the stage + + outputnode : nipype.interfaces.utility.IdentityInterface + Identity interface describing the outputs of the stage + """ + # Extract first volume and resample it to 1x1x1mm3 + if self.config.pipeline == "Diffusion": + extract_first = pe.Node( + interface=fsl.ExtractROI(t_min=0, t_size=1, roi_file="first.nii.gz"), + name="extract_first", + ) + flow.connect([(inputnode, extract_first, [("target", "in_file")])]) + fs_mriconvert = pe.Node( + interface=fs.MRIConvert( + out_file="target_first.nii.gz" + ), + name="target_resample", + ) + flow.connect([(extract_first, fs_mriconvert, [("roi_file", "in_file")])]) + + # fmt:off + flow.connect( + [ + (inputnode, outputnode, [("target", "target_epicorrected")]), + (inputnode, outputnode, [("bvals", "bvals"), + ("bvecs", "bvecs")]), + ] + ) + # fmt:on + + elif self.config.pipeline == "fMRI": + fmri_bet = pe.Node(interface=fsl.BET(), name="fMRI_skullstrip") + T1_bet = pe.Node(interface=fsl.BET(), name="T1_skullstrip") + # fmt:off + flow.connect( + [ + (inputnode, fmri_bet, [("target", "in_file")]), + (inputnode, T1_bet, [("T1", "in_file")]), + ] + ) + # fmt:on + + fs_bbregister = pe.Node( + interface=cmp_fs.BBRegister(out_fsl_file="target-TO-orig.mat"), + name="bbregister", + ) + fs_bbregister.inputs.init = self.config.init + fs_bbregister.inputs.contrast_type = self.config.contrast_type + fs_bbregister.inputs.subjects_dir = self.fs_subjects_dir + fs_bbregister.inputs.subject_id = self.fs_subject_id + + fsl_invertxfm = pe.Node( + interface=fsl.ConvertXFM(invert_xfm=True), name="fsl_invertxfm" + ) + + fs_source = pe.Node( + interface=fs.preprocess.FreeSurferSource(), name="get_fs_files" + ) + fs_source.inputs.subjects_dir = self.fs_subjects_dir + fs_source.inputs.subject_id = self.fs_subject_id + + fs_tkregister2 = pe.Node( + interface=cmp_fs.Tkregister2(regheader=True, noedit=True), + name="fs_tkregister2", + ) + fs_tkregister2.inputs.reg_out = "T1-TO-orig.dat" + fs_tkregister2.inputs.fslreg_out = "T1-TO-orig.mat" + fs_tkregister2.inputs.subjects_dir = self.fs_subjects_dir + fs_tkregister2.inputs.subject_id = self.fs_subject_id + + fsl_concatxfm = pe.Node( + interface=fsl.ConvertXFM(concat_xfm=True), name="fsl_concatxfm" + ) + + fsl_applyxfm = pe.Node( + interface=fsl.ApplyXFM(apply_xfm=True, out_file="T1-TO-TARGET.nii.gz"), + name="linear_registration", + ) + fsl_applyxfm_wm = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="wm_mask_registered.nii.gz", + ), + name="apply_registration_wm", + ) + fsl_applyxfm_rois = pe.Node( + interface=ApplymultipleXfm(), name="apply_registration_roivs" + ) + + # fmt:off + flow.connect( + [ + (fs_bbregister, fsl_invertxfm, [("out_fsl_file", "in_file")]), + (fsl_invertxfm, fsl_concatxfm, [("out_file", "in_file2")]), + (fs_source, fs_tkregister2, [("orig", "target_file"), ("rawavg", "in_file")],), + (fs_tkregister2, fsl_concatxfm, [("fslregout_file", "in_file")]), + (T1_bet, fsl_applyxfm, [("out_file", "in_file")]), + (fsl_concatxfm, fsl_applyxfm, [("out_file", "in_matrix_file")]), + (fsl_applyxfm, outputnode, [("out_file", "T1_registered_crop")]), + (inputnode, fsl_applyxfm_wm, [("wm_mask", "in_file")]), + (fsl_concatxfm, fsl_applyxfm_wm, [("out_file", "in_matrix_file")]), + (fsl_applyxfm_wm, outputnode, [("out_file", "wm_mask_registered_crop")],), + (inputnode, fsl_applyxfm_rois, [("roi_volumes", "in_files")]), + (fsl_concatxfm, fsl_applyxfm_rois, [("out_file", "xfm_file")]), + (fsl_applyxfm_rois, outputnode, [("out_files", "roi_volumes_registered_crop")],), + (fmri_bet, fs_bbregister, [("out_file", "source_file")]), + (inputnode, fsl_applyxfm, [("target", "reference")]), + (inputnode, fsl_applyxfm_wm, [("target", "reference")]), + (inputnode, fsl_applyxfm_rois, [("target", "reference")]), + ] + ) + # fmt:on + + fsl_applyxfm_eroded_wm = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="eroded_wm_registered.nii.gz", + ), + name="apply_registration_wm_eroded", + ) + if self.config.apply_to_eroded_csf: + fsl_applyxfm_eroded_csf = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="eroded_csf_registered.nii.gz", + ), + name="apply_registration_csf_eroded", + ) + + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_eroded_csf, [("eroded_csf", "in_file")],), + (inputnode, fsl_applyxfm_eroded_csf, [("target", "reference")]), + (fsl_concatxfm, fsl_applyxfm_eroded_csf, [("out_file", "in_matrix_file")],), + (fsl_applyxfm_eroded_csf, outputnode, [("out_file", "eroded_csf_registered_crop")],), + ] + ) + # fmt:on + + if self.config.apply_to_eroded_brain: + fsl_applyxfm_eroded_brain = pe.Node( + interface=fsl.ApplyXFM( + apply_xfm=True, + interp="nearestneighbour", + out_file="eroded_brain_registered.nii.gz", + ), + name="apply_registration_brain_eroded", + ) + + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_eroded_brain, [("eroded_brain", "in_file")],), + (inputnode, fsl_applyxfm_eroded_brain, [("target", "reference")],), + (fsl_concatxfm, fsl_applyxfm_eroded_brain, [("out_file", "in_matrix_file")],), + (fsl_applyxfm_eroded_brain, outputnode, [("out_file", "eroded_brain_registered_crop")],), + ] + ) + # fmt:on + + # fmt:off + flow.connect( + [ + (inputnode, fsl_applyxfm_eroded_wm, [("eroded_wm", "in_file")]), + (inputnode, fsl_applyxfm_eroded_wm, [("target", "reference")]), + (fsl_concatxfm, fsl_applyxfm_eroded_wm, [("out_file", "in_matrix_file")],), + (fsl_applyxfm_eroded_wm, outputnode, [("out_file", "eroded_wm_registered_crop")],), + ] + ) + # fmt:on + + return flow def define_inspect_outputs(self): """Update the `inspect_outputs` class attribute. From 007e1e350bd0a0db2a5e08b64431a5d336c54940 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 13:53:06 +0100 Subject: [PATCH 09/15] FIX: Add missing input/output connections in create_ants_workflow() of RegistrationStage --- cmp/stages/registration/registration.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/cmp/stages/registration/registration.py b/cmp/stages/registration/registration.py index 2f5d4fb3c..c4d7f2d08 100644 --- a/cmp/stages/registration/registration.py +++ b/cmp/stages/registration/registration.py @@ -372,14 +372,12 @@ def create_workflow(self, flow, inputnode, outputnode): outputnode : nipype.interfaces.utility.IdentityInterface Identity interface describing the outputs of the stage """ - if self.config.registration_mode == "ANTs": - flow = self.create_ants_workflow(flow, inputnode, outputnode) - - if self.config.registration_mode == "FSL (Linear)": - flow = self.create_flirt_workflow(flow, inputnode, outputnode) - if (self.config.pipeline == "fMRI") and (self.config.registration_mode == "BBregister (FS)"): flow = self.create_bbregister_workflow(flow, inputnode, outputnode) + elif self.config.registration_mode == "ANTs": + flow = self.create_ants_workflow(flow, inputnode, outputnode) + elif self.config.registration_mode == "FSL (Linear)": + flow = self.create_flirt_workflow(flow, inputnode, outputnode) return flow @@ -901,6 +899,16 @@ def extract_warp_field(transforms): ] ) # fmt:on + + # fmt:off + flow.connect( + [ + (inputnode, outputnode, [("target", "target_epicorrected")]), + (inputnode, outputnode, [("bvals", "bvals"), + ("bvecs", "bvecs")]), + ] + ) + # fmt:on return flow def create_flirt_workflow(self, flow, inputnode, outputnode): From cd07258576c1f136c5c38eaf3391147e215b5ed2 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 15:01:58 +0100 Subject: [PATCH 10/15] DOC: Update copyright year [skip ci] --- COPYRIGHT | 2 +- README.md | 2 +- cmp/__init__.py | 2 +- cmp/bidsappmanager/gui/__init__.py | 2 +- cmp/bidsappmanager/gui/bidsapp.py | 2 +- cmp/bidsappmanager/gui/config.py | 2 +- cmp/bidsappmanager/gui/globals.py | 2 +- cmp/bidsappmanager/gui/principal.py | 2 +- cmp/bidsappmanager/gui/qc.py | 2 +- cmp/bidsappmanager/gui/traits.py | 2 +- cmp/bidsappmanager/pipelines/anatomical/anatomical.py | 2 +- cmp/bidsappmanager/pipelines/diffusion/diffusion.py | 2 +- cmp/bidsappmanager/pipelines/functional/eeg.py | 2 +- cmp/bidsappmanager/pipelines/functional/fMRI.py | 2 +- cmp/bidsappmanager/project.py | 2 +- cmp/bidsappmanager/stages/connectome/connectome.py | 2 +- cmp/bidsappmanager/stages/connectome/fmri_connectome.py | 2 +- cmp/bidsappmanager/stages/diffusion/diffusion.py | 2 +- cmp/bidsappmanager/stages/diffusion/reconstruction.py | 2 +- cmp/bidsappmanager/stages/diffusion/tracking.py | 2 +- cmp/bidsappmanager/stages/functional/functionalMRI.py | 2 +- cmp/bidsappmanager/stages/parcellation/parcellation.py | 2 +- cmp/bidsappmanager/stages/preprocessing/fmri_preprocessing.py | 2 +- cmp/bidsappmanager/stages/preprocessing/preprocessing.py | 2 +- cmp/bidsappmanager/stages/registration/registration.py | 2 +- cmp/bidsappmanager/stages/segmentation/segmentation.py | 2 +- cmp/cli/cmpbidsappmanager.py | 2 +- cmp/cli/connectomemapper3.py | 2 +- cmp/cli/connectomemapper3_docker.py | 2 +- cmp/cli/connectomemapper3_singularity.py | 2 +- cmp/cli/showmatrix_gpickle.py | 2 +- cmp/info.py | 2 +- cmp/parser.py | 2 +- cmp/pipelines/anatomical/anatomical.py | 2 +- cmp/pipelines/common.py | 2 +- cmp/pipelines/diffusion/diffusion.py | 2 +- cmp/pipelines/functional/eeg.py | 2 +- cmp/pipelines/functional/fMRI.py | 2 +- cmp/project.py | 2 +- cmp/stages/common.py | 2 +- cmp/stages/connectome/connectome.py | 2 +- cmp/stages/connectome/fmri_connectome.py | 2 +- cmp/stages/diffusion/diffusion.py | 2 +- cmp/stages/diffusion/reconstruction.py | 2 +- cmp/stages/diffusion/tracking.py | 2 +- cmp/stages/functional/functionalMRI.py | 2 +- cmp/stages/parcellation/parcellation.py | 2 +- cmp/stages/preprocessing/fmri_preprocessing.py | 2 +- cmp/stages/preprocessing/preprocessing.py | 2 +- cmp/stages/registration/registration.py | 2 +- cmp/stages/segmentation/segmentation.py | 2 +- cmp/viz/reports.py | 2 +- cmtklib/bids/io.py | 2 +- cmtklib/bids/utils.py | 2 +- cmtklib/config.py | 2 +- cmtklib/connectome.py | 2 +- cmtklib/diffusion.py | 2 +- cmtklib/functionalMRI.py | 2 +- cmtklib/interfaces/ants.py | 2 +- cmtklib/interfaces/camino.py | 2 +- cmtklib/interfaces/camino2trackvis.py | 2 +- cmtklib/interfaces/diffusion_toolkit.py | 2 +- cmtklib/interfaces/dipy.py | 2 +- cmtklib/interfaces/freesurfer.py | 2 +- cmtklib/interfaces/fsl.py | 2 +- cmtklib/interfaces/misc.py | 2 +- cmtklib/interfaces/mrtrix3.py | 2 +- cmtklib/parcellation.py | 2 +- cmtklib/process.py | 2 +- cmtklib/util.py | 2 +- docs/LICENSE.rst | 2 +- run.py | 2 +- scripts/utils/build_bidsapp.sh | 2 +- scripts/utils/build_docs.sh | 2 +- 74 files changed, 74 insertions(+), 74 deletions(-) diff --git a/COPYRIGHT b/COPYRIGHT index 185da9060..afc149cef 100644 --- a/COPYRIGHT +++ b/COPYRIGHT @@ -1,4 +1,4 @@ -Copyright (C) 2009-2021, Ecole Polytechnique Fédérale de Lausanne (EPFL) and +Copyright (C) 2009-2022, Ecole Polytechnique Fédérale de Lausanne (EPFL) and Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland & Contributors, All rights reserved. diff --git a/README.md b/README.md index 6168f1e66..98af98e29 100644 --- a/README.md +++ b/README.md @@ -176,4 +176,4 @@ This software is distributed under the open-source license Modified BSD. See [li All trademarks referenced herein are property of their respective holders. -Copyright (C) 2009-2021, Hospital Center and University of Lausanne (UNIL-CHUV), Ecole Polytechnique Fédérale de Lausanne (EPFL), Switzerland & Contributors. +Copyright (C) 2009-2022, Hospital Center and University of Lausanne (UNIL-CHUV), Ecole Polytechnique Fédérale de Lausanne (EPFL), Switzerland & Contributors. diff --git a/cmp/__init__.py b/cmp/__init__.py index ded5cc72a..d1e67fc5f 100644 --- a/cmp/__init__.py +++ b/cmp/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/__init__.py b/cmp/bidsappmanager/gui/__init__.py index ff9173558..88b297b59 100644 --- a/cmp/bidsappmanager/gui/__init__.py +++ b/cmp/bidsappmanager/gui/__init__.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/bidsapp.py b/cmp/bidsappmanager/gui/bidsapp.py index 03c95f79b..d856887ee 100644 --- a/cmp/bidsappmanager/gui/bidsapp.py +++ b/cmp/bidsappmanager/gui/bidsapp.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/config.py b/cmp/bidsappmanager/gui/config.py index 7e8accbbf..e059379ce 100644 --- a/cmp/bidsappmanager/gui/config.py +++ b/cmp/bidsappmanager/gui/config.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/globals.py b/cmp/bidsappmanager/gui/globals.py index 53f781039..a5b66ea67 100644 --- a/cmp/bidsappmanager/gui/globals.py +++ b/cmp/bidsappmanager/gui/globals.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/principal.py b/cmp/bidsappmanager/gui/principal.py index d45f3cc89..0e18d2309 100644 --- a/cmp/bidsappmanager/gui/principal.py +++ b/cmp/bidsappmanager/gui/principal.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/qc.py b/cmp/bidsappmanager/gui/qc.py index 83aab6fe2..375f8a2b6 100644 --- a/cmp/bidsappmanager/gui/qc.py +++ b/cmp/bidsappmanager/gui/qc.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/gui/traits.py b/cmp/bidsappmanager/gui/traits.py index 524e2fc5e..5059bd2b7 100644 --- a/cmp/bidsappmanager/gui/traits.py +++ b/cmp/bidsappmanager/gui/traits.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/pipelines/anatomical/anatomical.py b/cmp/bidsappmanager/pipelines/anatomical/anatomical.py index fc8f12f51..16a5dcf83 100644 --- a/cmp/bidsappmanager/pipelines/anatomical/anatomical.py +++ b/cmp/bidsappmanager/pipelines/anatomical/anatomical.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/pipelines/diffusion/diffusion.py b/cmp/bidsappmanager/pipelines/diffusion/diffusion.py index 5ff957884..b53016625 100644 --- a/cmp/bidsappmanager/pipelines/diffusion/diffusion.py +++ b/cmp/bidsappmanager/pipelines/diffusion/diffusion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/pipelines/functional/eeg.py b/cmp/bidsappmanager/pipelines/functional/eeg.py index 4d9cfbafa..d0e694cd3 100644 --- a/cmp/bidsappmanager/pipelines/functional/eeg.py +++ b/cmp/bidsappmanager/pipelines/functional/eeg.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/pipelines/functional/fMRI.py b/cmp/bidsappmanager/pipelines/functional/fMRI.py index 8d59cb145..0064e5c21 100644 --- a/cmp/bidsappmanager/pipelines/functional/fMRI.py +++ b/cmp/bidsappmanager/pipelines/functional/fMRI.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/project.py b/cmp/bidsappmanager/project.py index 31991e3a4..a2d0e7839 100644 --- a/cmp/bidsappmanager/project.py +++ b/cmp/bidsappmanager/project.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/connectome/connectome.py b/cmp/bidsappmanager/stages/connectome/connectome.py index 37779d29d..c567fe0da 100644 --- a/cmp/bidsappmanager/stages/connectome/connectome.py +++ b/cmp/bidsappmanager/stages/connectome/connectome.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/connectome/fmri_connectome.py b/cmp/bidsappmanager/stages/connectome/fmri_connectome.py index 890371289..c12900dc9 100644 --- a/cmp/bidsappmanager/stages/connectome/fmri_connectome.py +++ b/cmp/bidsappmanager/stages/connectome/fmri_connectome.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/diffusion/diffusion.py b/cmp/bidsappmanager/stages/diffusion/diffusion.py index 5201f5299..baa9c2d47 100644 --- a/cmp/bidsappmanager/stages/diffusion/diffusion.py +++ b/cmp/bidsappmanager/stages/diffusion/diffusion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/diffusion/reconstruction.py b/cmp/bidsappmanager/stages/diffusion/reconstruction.py index 805c4941c..dce15c5fa 100644 --- a/cmp/bidsappmanager/stages/diffusion/reconstruction.py +++ b/cmp/bidsappmanager/stages/diffusion/reconstruction.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/diffusion/tracking.py b/cmp/bidsappmanager/stages/diffusion/tracking.py index 5b583b491..9f045f720 100644 --- a/cmp/bidsappmanager/stages/diffusion/tracking.py +++ b/cmp/bidsappmanager/stages/diffusion/tracking.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/functional/functionalMRI.py b/cmp/bidsappmanager/stages/functional/functionalMRI.py index 98bf7da7a..96a3a10fa 100644 --- a/cmp/bidsappmanager/stages/functional/functionalMRI.py +++ b/cmp/bidsappmanager/stages/functional/functionalMRI.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/parcellation/parcellation.py b/cmp/bidsappmanager/stages/parcellation/parcellation.py index 5070ea0f7..1f904f8d6 100644 --- a/cmp/bidsappmanager/stages/parcellation/parcellation.py +++ b/cmp/bidsappmanager/stages/parcellation/parcellation.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/preprocessing/fmri_preprocessing.py b/cmp/bidsappmanager/stages/preprocessing/fmri_preprocessing.py index b2cd03f8f..9226feae9 100644 --- a/cmp/bidsappmanager/stages/preprocessing/fmri_preprocessing.py +++ b/cmp/bidsappmanager/stages/preprocessing/fmri_preprocessing.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/preprocessing/preprocessing.py b/cmp/bidsappmanager/stages/preprocessing/preprocessing.py index 1433f2501..a85ce7ea8 100644 --- a/cmp/bidsappmanager/stages/preprocessing/preprocessing.py +++ b/cmp/bidsappmanager/stages/preprocessing/preprocessing.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/registration/registration.py b/cmp/bidsappmanager/stages/registration/registration.py index 66a0281b1..b2909d39b 100644 --- a/cmp/bidsappmanager/stages/registration/registration.py +++ b/cmp/bidsappmanager/stages/registration/registration.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/bidsappmanager/stages/segmentation/segmentation.py b/cmp/bidsappmanager/stages/segmentation/segmentation.py index 1949871a6..4edaa1ba8 100644 --- a/cmp/bidsappmanager/stages/segmentation/segmentation.py +++ b/cmp/bidsappmanager/stages/segmentation/segmentation.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/cli/cmpbidsappmanager.py b/cmp/cli/cmpbidsappmanager.py index 10f5cbf99..4c90a7c68 100644 --- a/cmp/cli/cmpbidsappmanager.py +++ b/cmp/cli/cmpbidsappmanager.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/cli/connectomemapper3.py b/cmp/cli/connectomemapper3.py index 4d6626876..afc478eec 100644 --- a/cmp/cli/connectomemapper3.py +++ b/cmp/cli/connectomemapper3.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/cli/connectomemapper3_docker.py b/cmp/cli/connectomemapper3_docker.py index 874b779a2..741570b68 100644 --- a/cmp/cli/connectomemapper3_docker.py +++ b/cmp/cli/connectomemapper3_docker.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/cli/connectomemapper3_singularity.py b/cmp/cli/connectomemapper3_singularity.py index 81d2787b0..75412630b 100644 --- a/cmp/cli/connectomemapper3_singularity.py +++ b/cmp/cli/connectomemapper3_singularity.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/cli/showmatrix_gpickle.py b/cmp/cli/showmatrix_gpickle.py index 48fd686d3..d0a4908d5 100644 --- a/cmp/cli/showmatrix_gpickle.py +++ b/cmp/cli/showmatrix_gpickle.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/info.py b/cmp/info.py index 1c5019986..a81846489 100644 --- a/cmp/info.py +++ b/cmp/info.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/parser.py b/cmp/parser.py index eb2b174be..6351ece4f 100644 --- a/cmp/parser.py +++ b/cmp/parser.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/pipelines/anatomical/anatomical.py b/cmp/pipelines/anatomical/anatomical.py index bba4bdb82..4b8237c67 100644 --- a/cmp/pipelines/anatomical/anatomical.py +++ b/cmp/pipelines/anatomical/anatomical.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/pipelines/common.py b/cmp/pipelines/common.py index e66ec68e7..a86042d8b 100644 --- a/cmp/pipelines/common.py +++ b/cmp/pipelines/common.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/pipelines/diffusion/diffusion.py b/cmp/pipelines/diffusion/diffusion.py index 668ec52c3..68c85bb53 100644 --- a/cmp/pipelines/diffusion/diffusion.py +++ b/cmp/pipelines/diffusion/diffusion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/pipelines/functional/eeg.py b/cmp/pipelines/functional/eeg.py index 4d9cfbafa..d0e694cd3 100644 --- a/cmp/pipelines/functional/eeg.py +++ b/cmp/pipelines/functional/eeg.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/pipelines/functional/fMRI.py b/cmp/pipelines/functional/fMRI.py index 4ff6b6fc5..51470563f 100644 --- a/cmp/pipelines/functional/fMRI.py +++ b/cmp/pipelines/functional/fMRI.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/project.py b/cmp/project.py index 8094c0122..f136a52c6 100644 --- a/cmp/project.py +++ b/cmp/project.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/common.py b/cmp/stages/common.py index 71d26f82f..7452c12b3 100644 --- a/cmp/stages/common.py +++ b/cmp/stages/common.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/connectome/connectome.py b/cmp/stages/connectome/connectome.py index 3735c8560..b77c90132 100644 --- a/cmp/stages/connectome/connectome.py +++ b/cmp/stages/connectome/connectome.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/connectome/fmri_connectome.py b/cmp/stages/connectome/fmri_connectome.py index 6d932f9f1..f9541fb34 100644 --- a/cmp/stages/connectome/fmri_connectome.py +++ b/cmp/stages/connectome/fmri_connectome.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/diffusion/diffusion.py b/cmp/stages/diffusion/diffusion.py index c23e0a92b..401fd5b8f 100644 --- a/cmp/stages/diffusion/diffusion.py +++ b/cmp/stages/diffusion/diffusion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/diffusion/reconstruction.py b/cmp/stages/diffusion/reconstruction.py index 007255b7e..e573ecc20 100644 --- a/cmp/stages/diffusion/reconstruction.py +++ b/cmp/stages/diffusion/reconstruction.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/diffusion/tracking.py b/cmp/stages/diffusion/tracking.py index 542afe43d..f57a881cf 100644 --- a/cmp/stages/diffusion/tracking.py +++ b/cmp/stages/diffusion/tracking.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/functional/functionalMRI.py b/cmp/stages/functional/functionalMRI.py index 39c83368f..f8a1e2493 100644 --- a/cmp/stages/functional/functionalMRI.py +++ b/cmp/stages/functional/functionalMRI.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/parcellation/parcellation.py b/cmp/stages/parcellation/parcellation.py index 604690ddf..6064567af 100644 --- a/cmp/stages/parcellation/parcellation.py +++ b/cmp/stages/parcellation/parcellation.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/preprocessing/fmri_preprocessing.py b/cmp/stages/preprocessing/fmri_preprocessing.py index b52c34866..56d8771af 100644 --- a/cmp/stages/preprocessing/fmri_preprocessing.py +++ b/cmp/stages/preprocessing/fmri_preprocessing.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/preprocessing/preprocessing.py b/cmp/stages/preprocessing/preprocessing.py index 903a24f4d..e3c827a16 100644 --- a/cmp/stages/preprocessing/preprocessing.py +++ b/cmp/stages/preprocessing/preprocessing.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/registration/registration.py b/cmp/stages/registration/registration.py index c4d7f2d08..4d219e893 100644 --- a/cmp/stages/registration/registration.py +++ b/cmp/stages/registration/registration.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/stages/segmentation/segmentation.py b/cmp/stages/segmentation/segmentation.py index cf32de9d6..c6272d77a 100644 --- a/cmp/stages/segmentation/segmentation.py +++ b/cmp/stages/segmentation/segmentation.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmp/viz/reports.py b/cmp/viz/reports.py index 68ae6e5e1..df77a52e0 100644 --- a/cmp/viz/reports.py +++ b/cmp/viz/reports.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/bids/io.py b/cmtklib/bids/io.py index 7d37c09ac..e5f57f4ee 100644 --- a/cmtklib/bids/io.py +++ b/cmtklib/bids/io.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/bids/utils.py b/cmtklib/bids/utils.py index 9e9a4ee54..cb0c2da46 100644 --- a/cmtklib/bids/utils.py +++ b/cmtklib/bids/utils.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/config.py b/cmtklib/config.py index d023e69a7..13fe5c6b0 100644 --- a/cmtklib/config.py +++ b/cmtklib/config.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/connectome.py b/cmtklib/connectome.py index 99b62dc66..99913ace9 100644 --- a/cmtklib/connectome.py +++ b/cmtklib/connectome.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/diffusion.py b/cmtklib/diffusion.py index ad58b2472..02ea88cd6 100644 --- a/cmtklib/diffusion.py +++ b/cmtklib/diffusion.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/functionalMRI.py b/cmtklib/functionalMRI.py index 70e5a17a1..1b29eedc5 100644 --- a/cmtklib/functionalMRI.py +++ b/cmtklib/functionalMRI.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/ants.py b/cmtklib/interfaces/ants.py index e1e7f6ca9..583491b7c 100644 --- a/cmtklib/interfaces/ants.py +++ b/cmtklib/interfaces/ants.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/camino.py b/cmtklib/interfaces/camino.py index f977e98f8..06728a0ed 100644 --- a/cmtklib/interfaces/camino.py +++ b/cmtklib/interfaces/camino.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/camino2trackvis.py b/cmtklib/interfaces/camino2trackvis.py index b9b422840..a7046f1df 100644 --- a/cmtklib/interfaces/camino2trackvis.py +++ b/cmtklib/interfaces/camino2trackvis.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/diffusion_toolkit.py b/cmtklib/interfaces/diffusion_toolkit.py index 77f8b6663..ba1cf2dce 100644 --- a/cmtklib/interfaces/diffusion_toolkit.py +++ b/cmtklib/interfaces/diffusion_toolkit.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/dipy.py b/cmtklib/interfaces/dipy.py index a5bb379e2..cf6df8fc3 100644 --- a/cmtklib/interfaces/dipy.py +++ b/cmtklib/interfaces/dipy.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/freesurfer.py b/cmtklib/interfaces/freesurfer.py index c1dbcea12..adc55531b 100644 --- a/cmtklib/interfaces/freesurfer.py +++ b/cmtklib/interfaces/freesurfer.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/fsl.py b/cmtklib/interfaces/fsl.py index 3ac35d658..32c46397a 100644 --- a/cmtklib/interfaces/fsl.py +++ b/cmtklib/interfaces/fsl.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/misc.py b/cmtklib/interfaces/misc.py index 79e9fa2a9..5ab2c0ea5 100644 --- a/cmtklib/interfaces/misc.py +++ b/cmtklib/interfaces/misc.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/interfaces/mrtrix3.py b/cmtklib/interfaces/mrtrix3.py index 8d2ae8c5b..9bba12a02 100644 --- a/cmtklib/interfaces/mrtrix3.py +++ b/cmtklib/interfaces/mrtrix3.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/parcellation.py b/cmtklib/parcellation.py index f2d89c2ff..2887047c4 100644 --- a/cmtklib/parcellation.py +++ b/cmtklib/parcellation.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/process.py b/cmtklib/process.py index 08c19931b..d48417925 100644 --- a/cmtklib/process.py +++ b/cmtklib/process.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/cmtklib/util.py b/cmtklib/util.py index e9c76fcf5..c0c57dee4 100644 --- a/cmtklib/util.py +++ b/cmtklib/util.py @@ -1,4 +1,4 @@ -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/docs/LICENSE.rst b/docs/LICENSE.rst index 6044541da..38c589c8f 100644 --- a/docs/LICENSE.rst +++ b/docs/LICENSE.rst @@ -3,7 +3,7 @@ BSD 3-Clause License ======================================================= -Copyright (C) 2009-2021, Ecole Polytechnique Fédérale de Lausanne (EPFL) and +Copyright (C) 2009-2022, Ecole Polytechnique Fédérale de Lausanne (EPFL) and Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, & Contributors, All rights reserved. diff --git a/run.py b/run.py index 0bfb8e239..a3a63968b 100644 --- a/run.py +++ b/run.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*-coding:Latin-1 -* -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland # All rights reserved. # diff --git a/scripts/utils/build_bidsapp.sh b/scripts/utils/build_bidsapp.sh index c72a2b12a..cb78bf932 100755 --- a/scripts/utils/build_bidsapp.sh +++ b/scripts/utils/build_bidsapp.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # diff --git a/scripts/utils/build_docs.sh b/scripts/utils/build_docs.sh index 5f475075f..fd0e2ff6a 100644 --- a/scripts/utils/build_docs.sh +++ b/scripts/utils/build_docs.sh @@ -1,5 +1,5 @@ #!/bin/sh -# Copyright (C) 2009-2021, Ecole Polytechnique Federale de Lausanne (EPFL) and +# Copyright (C) 2009-2022, Ecole Polytechnique Federale de Lausanne (EPFL) and # Hospital Center and University of Lausanne (UNIL-CHUV), Switzerland, and CMP3 contributors # All rights reserved. # From 137a98589f1eab288d01fc4522e2c673fc044395 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 15:02:59 +0100 Subject: [PATCH 11/15] DOC: Update README and index.rst [skip ci] --- README.md | 26 ++++++++++++++++++++------ docs/index.rst | 37 +++++++++++++++++-------------------- 2 files changed, 37 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index 98af98e29..6097161a8 100644 --- a/README.md +++ b/README.md @@ -12,14 +12,28 @@ Connectome Mapper 3 is an open-source Python3 image processing pipeline software Connectome Mapper 3 pipelines use a combination of tools from well-known software packages, including [FSL](https://fsl.fmrib.ox.ac.uk/fsl/fslwiki), [FreeSurfer](https://surfer.nmr.mgh.harvard.edu/fswiki/FreeSurferWiki), [ANTs](http://stnava.github.io/ANTs/), [MRtrix3](http://www.mrtrix.org/), [Dipy](https://nipy.org/dipy/) and [AFNI](https://afni.nimh.nih.gov/), orchestrated by the [Nipype](https://nipype.readthedocs.io/en/latest/) dataflow library. These pipelines were designed to provide the best software implementation for each state of processing at the time conceptualization, and can be updated as newer and better neuroimaging software become available. -Reproducibility and replicatibility is achieved through the distribution of a BIDSApp, a software container image which takes BIDS datasets as inputs and which provides a frozen environment where versions of all external softwares and libraries are fixed. Accessibility has been improved to a greater extend by providing an interactive GUI which supports the user in a the steps involved in the configuration and execution of the containerized pipelines. +To enhance reproducibility and replicatibility, the processing pipelines with all dependencies are encapsulated in a Docker image container, which handles datasets organized following the BIDS standard and is distributed as a `BIDS App` @ Docker Hub. For execution on high-performance computing cluster, a Singularity image is also made freely available @ Sylabs Cloud. -This tool allows you to easily do the following: +To enhanced accessibility and reduce the risk of misconfiguration, Connectome Mapper 3 comes with an interactive GUI, aka `cmpbidsappmanager`, which supports the user in all the steps involved in the configuration of the pipelines, the configuration and execution of the BIDS App, and the control of the output quality. In addition, to facilitate the use by users not familiar with Docker and Singularity containers, Connectome Mapper 3 provides two Python commandline wrappers (`connectomemapper3_docker` and `connectomemapper3_singularity`) that will generate and run the appropriate command. - * Take T1 / Diffusion / resting-state MRI data from raw to multi-resolution connection matrices. - * Implement tools from different software packages. - * Achieve optimal data processing quality by using the best tools available - * Automate and parallelize processing steps, providing a significant speed-up from typical linear, manual processing. +### How to install the python wrappers and the GUI? + +You need to have either Docker or Singularity engine installed and miniconda installed. We refer to the [dedicated documentation page](https://connectome-mapper-3.readthedocs.io/en/latest/installation.html). + +Then, download the appropriate [environment.yml](https://github.com/connectomicslab/connectomemapper3/raw/master/conda/environment.yml) / [environment_macosx.yml](https://github.com/connectomicslab/connectomemapper3/raw/master/conda/environment_macosx.yml) and create a conda environment `py37cmp-gui` with the following command: + +```bash +$ conda create env -f /path/to/environment[_macosx].yml +``` + +Once the environment is created, activate it and install Connectome Mapper 3 with `PyPI` as follows: + +```bash +$ conda activate py37cmp-gui +(py37cmp-gui)$ pip install connectomemapper +``` + +You are ready to use Connectome Mapper 3! ### Resources diff --git a/docs/index.rst b/docs/index.rst index 98eb62e3c..63932bff2 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -62,25 +62,23 @@ These pipelines are designed to provide the best software implementation for each state of processing at the time of conception, and can be easily updated as newer and better neuroimaging software become available. -Portability, reproducibility and replicatibility are achieved through the -distribution of a BIDSApp, a software container image which (1) takes datasets -organized following the Brain Imaging Data Structure (BIDS) standard, and which -(2) provides a frozen computing environment where versions of all external -softwares and libraries are fixed. Accessibility has been improved to a greater extend -by providing an interactive GUI which supports the user in a the steps involved -in the configuration and execution of the containerized pipelines. - -This tool allows you to easily do the following: - - * Handle T1 / Diffusion / resting-state MRI data, organized following the - Brain Imaging Data Structure (BIDS) standard, from raw to multi-resolution - connection matrices. - * Implement tools from different software packages. - * Achieve optimal data processing quality by using the best tools available - * Automate and parallelize processing steps with Nipype, which provides a significant - speed-up from typical linear, manual processing. - * Easily configure the pipelines and control the execution and outputs of the processing - with a GUI. +To enhance reproducibility and replicatibility, the processing pipelines +with all dependencies in a Docker image container, which handles datasets +organized following the BIDS standard and is distributed as a BIDS App @ +`Docker Hub `_. For execution on high-performance computing cluster, a +Singularity image is also made freely available @ +`Sylabs Cloud `_. + +To enhanced accessibility and reduce the risk of misconfiguration, +Connectome Mapper 3 comes with an interactive GUI, aka `cmpbidsappmanager`, +which supports the user in all the steps involved in the configuration of +the pipelines, the configuration and execution of the BIDS App, and +the control of the output quality. In addition, to facilitate the use +by users not familiar with Docker and Singularity containers, +Connectome Mapper 3 provides two Python commandline wrappers +(`connectomemapper3_docker` and `connectomemapper3_singularity`) that will +generate and run the appropriate command. + .. _FSL: https://fsl.fmrib.ox.ac.uk/fsl/fslwiki @@ -96,7 +94,6 @@ This tool allows you to easily do the following: .. _Nipype: https://nipype.readthedocs.io/en/latest/ - ******************* License information ******************* From c650e90ceae6ee5afbb3f4360aff6f3ce2410c6d Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 15:03:49 +0100 Subject: [PATCH 12/15] DOC: Add refactoring of create_workflow() of RegistrationStage in docs/changes.rst [skip ci] --- docs/changes.rst | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/changes.rst b/docs/changes.rst index cf56737f3..3efd23b31 100644 --- a/docs/changes.rst +++ b/docs/changes.rst @@ -21,6 +21,9 @@ It incorporates `Pull Request #132 `_ for more discussion details) +* Split the `create_workflow()` method of the `RegistrationStage` into the `create_ants_workflow()`, `create_flirt_workflow()`, and `create_bbregister_workflow()`. + (See `Issue #95 `_ for more discussion details) + *Code style* * Correct a number of code style issues with class names From 8a61c12ad7d54a195537ac0b9b42aff7e6c7f9b2 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 15:09:36 +0100 Subject: [PATCH 13/15] DOC: Improve installation instruction in README [skip ci] --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6097161a8..f7fb77910 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ To enhanced accessibility and reduce the risk of misconfiguration, Connectome Ma ### How to install the python wrappers and the GUI? -You need to have either Docker or Singularity engine installed and miniconda installed. We refer to the [dedicated documentation page](https://connectome-mapper-3.readthedocs.io/en/latest/installation.html). +You need to have first either Docker or Singularity engine and miniconda installed. We refer to the [dedicated documentation page](https://connectome-mapper-3.readthedocs.io/en/latest/installation.html) for more instruction details. Then, download the appropriate [environment.yml](https://github.com/connectomicslab/connectomemapper3/raw/master/conda/environment.yml) / [environment_macosx.yml](https://github.com/connectomicslab/connectomemapper3/raw/master/conda/environment_macosx.yml) and create a conda environment `py37cmp-gui` with the following command: From 7f4e0766f598110a74364854152255b299497769 Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 15:20:09 +0100 Subject: [PATCH 14/15] DOC: Update usage example in README [skip ci] --- README.md | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index f7fb77910..95ba002f4 100644 --- a/README.md +++ b/README.md @@ -44,11 +44,11 @@ You are ready to use Connectome Mapper 3! ### Usage -This BIDS App has the following command line arguments: +Having the `py37cmp-gui` conda environment previously installed activated, the BIDS App can easily be run using `connectomemapper3_docker`, the python wrapper for Docker, as follows: - $ docker run -it sebastientourbier/connectomemapper-bidsapp -h + (py37cmp-gui)$ connectomemapper3_docker -h - usage: run.py [-h] + usage: connectomemapper3_docker [-h] [--participant_label PARTICIPANT_LABEL [PARTICIPANT_LABEL ...]] [--session_label SESSION_LABEL [SESSION_LABEL ...]] [--anat_pipeline_config ANAT_PIPELINE_CONFIG] @@ -62,7 +62,7 @@ This BIDS App has the following command line arguments: [--fs_license FS_LICENSE] [--coverage] [--notrack] [-v] bids_dir output_dir {participant,group} - Entrypoint script of the BIDS-App Connectome Mapper version v3.0.0-RC3 + Entrypoint script of the BIDS-App Connectome Mapper version v3.0.1 via Docker positional arguments: bids_dir The directory with the input dataset formatted @@ -154,8 +154,7 @@ Thanks goes to these wonderful people ([emoji key](https://allcontributors.org/d -Thanks also goes to all these wonderful people that contributed to the two first versions -of Connectome Mapper: +Thanks also goes to all these wonderful people that contributed to Connectome Mapper 1 and 2: * Collaborators from Signal Processing Laboratory (LTS5), EPFL, Lausanne: From ee3f45caca774291bf2352b2769600537922fd0b Mon Sep 17 00:00:00 2001 From: Sebastien Tourbier Date: Tue, 4 Jan 2022 16:34:48 +0100 Subject: [PATCH 15/15] CI: Update list of outputs for tests 4, 5, and 6 --- .../expected_outputs/ds-sample_test-04_outputs.txt | 12 ------------ .../expected_outputs/ds-sample_test-05_outputs.txt | 12 ------------ .../expected_outputs/ds-sample_test-06_outputs.txt | 12 ------------ 3 files changed, 36 deletions(-) diff --git a/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt index 4cdbfc352..9f8206dcc 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-04_outputs.txt @@ -1099,12 +1099,6 @@ nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unma nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/_node.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/_report/report.rst nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/result_dwi2tensor_unmasked.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/command.txt -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/first.nii.gz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_inputs.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_node.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_report/report.rst -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/result_extract_first.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/command.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/grad.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/_inputs.pklz @@ -1166,12 +1160,6 @@ nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registratio nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/_report/report.rst nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/result_SyN_registration.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/Syn_warped_image.nii.gz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/command.txt -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_inputs.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_node.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_report/report.rst -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/result_target_resample.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/target_first.nii.gz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/command.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/fa_corrected.mif nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/_inputs.pklz diff --git a/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt index 19e33c7ce..1481edeca 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-05_outputs.txt @@ -1101,12 +1101,6 @@ nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unma nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/_node.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/_report/report.rst nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/result_dwi2tensor_unmasked.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/command.txt -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/first.nii.gz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_inputs.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_node.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_report/report.rst -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/result_extract_first.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/command.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/grad.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/_inputs.pklz @@ -1168,12 +1162,6 @@ nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registratio nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/_report/report.rst nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/result_SyN_registration.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/Syn_warped_image.nii.gz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/command.txt -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_inputs.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_node.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_report/report.rst -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/result_target_resample.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/target_first.nii.gz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/command.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/fa_corrected.mif nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/_inputs.pklz diff --git a/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt b/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt index b93aa0755..dc2fcb1f3 100644 --- a/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt +++ b/.circleci/tests/expected_outputs/ds-sample_test-06_outputs.txt @@ -1100,12 +1100,6 @@ nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unma nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/_node.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/_report/report.rst nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/dwi2tensor_unmasked/result_dwi2tensor_unmasked.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/command.txt -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/first.nii.gz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_inputs.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_node.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/_report/report.rst -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_first/result_extract_first.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/command.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/grad.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/extract_grad/_inputs.pklz @@ -1167,12 +1161,6 @@ nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registratio nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/_report/report.rst nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/result_SyN_registration.pklz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/SyN_registration/Syn_warped_image.nii.gz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/command.txt -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_inputs.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_node.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/_report/report.rst -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/result_target_resample.pklz -nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/target_resample/target_first.nii.gz nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/command.txt nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/fa_corrected.mif nipype-1.7.0/sub-01/ses-01/diffusion_pipeline/registration_stage/tensor2FA/_inputs.pklz