diff --git a/.github/workflows/python-publish.yml b/.github/workflows/python-publish.yml new file mode 100644 index 0000000..bdaab28 --- /dev/null +++ b/.github/workflows/python-publish.yml @@ -0,0 +1,39 @@ +# This workflow will upload a Python Package using Twine when a release is created +# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries + +# This workflow uses actions that are not certified by GitHub. +# They are provided by a third-party and are governed by +# separate terms of service, privacy policy, and support +# documentation. + +name: Upload Python Package + +on: + release: + types: [published] + +permissions: + contents: read + +jobs: + deploy: + + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + - name: Set up Python + uses: actions/setup-python@v3 + with: + python-version: '3.x' + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + - name: Build package + run: python -m build + - name: Publish package + uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 + with: + user: __token__ + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..4119736 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,24 @@ +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.12" + jobs: + pre_create_environment: + - asdf plugin add poetry + - asdf install poetry latest + - asdf global poetry latest + - poetry config virtualenvs.create false + post_install: + - poetry install --with dev + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + +submodules: + include: all # Include all submodules, if any + recursive: true \ No newline at end of file diff --git a/README.md b/README.md index 5408a98..aa764f1 100644 --- a/README.md +++ b/README.md @@ -1,52 +1,35 @@ # ElikoPy +[![Documentation Status](https://readthedocs.org/projects/elikopy/badge/?version=latest)](https://elikopy.readthedocs.io/en/latest/?badge=latest) [![PyPI](https://img.shields.io/pypi/v/elikopy?label=pypi%20package)](https://pypi.org/project/elikopy//) ![GitHub repo size](https://img.shields.io/github/repo-size/Hyedryn/elikopy) [![DOI](https://zenodo.org/badge/296056994.svg)](https://zenodo.org/doi/10.5281/zenodo.10514465) -ElikoPy is Python library aiming at easing the processing of diffusion imaging for microstructural analysis. -This Python library is based on - - DIPY, a python library for the analysis of MR diffusion imaging. - - Microstructure fingerprinting, a python library doing estimation of white matter microstructural properties from a dictionary of Monte Carlo diffusion MRI fingerprints. - - FSL, a comprehensive library of analysis tools for FMRI, MRI and DTI brain imaging data. - - DIAMOND, a c software that is characterizing brain tissue by assessment of the distribution of anisotropic microstructural environments in diffusion‐compartment imaging. +ElikoPy is a Python library designed to simplify the processing of diffusion imaging for microstructural analysis. -### Installation +## Installation -ElikoPy requires [Python](https://www.python.org/) v3.8+ to run. +### Prerequistes -After cloning the repo, you can either firstly install all the python dependencies including optionnal dependency used to speed up the code: +ElikoPy requires [Python](https://www.python.org/) v3.8+. -```sh -$ pip install -r requirements.txt --user -``` -Or you can install directly the library with only the mandatory dependencies (if you performed the previous step, you still need to perform this step): - -```sh -$ python3 -m pip install . -``` +### Installation Steps -Microstructure Fingerprinting is currently not avaible in the standard python repo, you can clone and install this library manually. +Clone the repository and install the dependencies: ```sh -$ git clone git@github.com:rensonnetg/microstructure_fingerprinting.git -$ cd microstructure_fingerprinting -$ python setup.py install +git clone https://github.com/Hyedryn/elikopy.git +cd elikopy +python -m pip install . ``` -FSL also needs to be installed and availabe in our path if you want to perform mouvement correction or tbss. - -Unfortunatly, the DIAMOND code is not publically available. If you do not have it in your possesion, you will not be able to use this algorithm. If you have it, simply add the executable to your path. - -### Usage - -Todo +If you wish to use movement correction or TBSS, ensure FSL is installed and available in your path. -### Development +**Note:** The DIAMOND microstructural model is not publicly available. If you have it, add the executable to your path. Otherwise, you won't be able to use this algorithm. -Want to contribute? Great! +## Development -Do not hesitate to open issue or pull request! -### Todos +Interested in contributing? Wonderful! - - Release a complete and accurate documentation for the library +Feel free to open issues or pull requests. Your contributions are welcome! +## Publications & Citations -**Free Software, Hell Yeah!** +If you use ElikoPy in your research, please cite it using the package DOI [![DOI](https://zenodo.org/badge/296056994.svg)](https://zenodo.org/doi/10.5281/zenodo.10514465). diff --git a/code_sructure.txt b/code_sructure.txt deleted file mode 100644 index 4d99c26..0000000 --- a/code_sructure.txt +++ /dev/null @@ -1,11 +0,0 @@ -1) dicom to nifti -2) patient list -3) Preproc => Bet, eddy (motion correction), denoising -4) DTI -5) TBSS + association avec région -6) NODDI -7) DIAMOND -8) Fingerprint -9) Visualization -10) Tractography -11) Apply all workflow \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index b81007d..d1851aa 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,9 +13,8 @@ autodoc_mock_imports = ['torch'] import os import sys -sys.path.insert(0, os.path.abspath('..')) +sys.path.insert(0, os.path.abspath('../elikopy')) -import elikopy import sphinx_rtd_theme @@ -36,8 +35,10 @@ extensions = [ 'sphinx_rtd_theme', 'sphinx.ext.autodoc', + 'sphinx_autodoc_typehints', ] + # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] diff --git a/elikopy/__init__.py b/elikopy/__init__.py index 1ee0149..104269b 100644 --- a/elikopy/__init__.py +++ b/elikopy/__init__.py @@ -3,11 +3,6 @@ import elikopy.utils import elikopy.utilsSynb0Disco import elikopy.registration -try: - import elikopy.mouse -except ImportError as e: - elikopy.mouse = None - print("Mouse module not available: {}".format(e)) try: diff --git a/elikopy/core.py b/elikopy/core.py index 0b4e5d4..d4a1a58 100644 --- a/elikopy/core.py +++ b/elikopy/core.py @@ -14,14 +14,17 @@ import matplotlib import elikopy.utils -from elikopy.individual_subject_processing import preproc_solo, dti_solo, white_mask_solo, noddi_solo, diamond_solo, \ - mf_solo, noddi_amico_solo, ivim_solo, odf_csd_solo, odf_msmtcsd_solo, tracking_solo, verdict_solo, clean_study_solo +from elikopy.individual_subject_processing import (preproc_solo, dti_solo, + white_mask_solo, noddi_solo, diamond_solo, mf_solo, noddi_amico_solo, + ivim_solo, odf_csd_solo, odf_msmtcsd_solo, tracking_solo, sift_solo, + verdict_solo, clean_study_solo) from elikopy.utils import submit_job, get_job_state, makedir, tbss_utils, regall_FA, regall, randomise_all def dicom_to_nifti(folder_path): - """ Convert dicom data into compressed nifti. Converted dicoms are then moved to a sub-folder named original_data. + """ Convert dicom data into compressed nifti. Converted dicoms are then + moved to a sub-folder named original_data. The niftis are named patientID_ProtocolName_SequenceName. :param folder_path: Path to root folder containing all the dicoms @@ -268,11 +271,27 @@ def patient_list(self, folder_path=None, bids_path=None, reverseEncoding=True): except: print('WARNING: JSON missing for patient', name) - try: - shutil.copyfile(folder_path + typeFolderName + "index.txt",folder_path + "/subjects/" + name + "/dMRI/raw/" + "index.txt") + if os.path.exists(folder_path + typeFolderName + "acqparams.txt"): shutil.copyfile(folder_path + typeFolderName + "acqparams.txt",folder_path + "/subjects/" + name + "/dMRI/raw/" + "acqparams.txt") - except: - print('WARNING: acqparam or index missing, you will get error trying to run EDDY correction') + else: + print('WARNING: acqparam missing, generating a default one') + with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams.txt','w') as f: + f.write("0 1 0 0.0779\n") + + if os.path.exists(folder_path + typeFolderName + "index.txt"): + shutil.copyfile(folder_path + typeFolderName + "index.txt", + folder_path + "/subjects/" + name + "/dMRI/raw/" + "index.txt") + else: + print('WARNING: index missing, generating a default one') + #Count number of volumes using fslval command + cmd = "fslval " + folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.nii.gz" + " dim4" + process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE) + output_proc, error_proc = process.communicate() + n = int(output_proc.decode('utf-8')) + #Create index file + with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'index.txt', 'w') as file: + # write a line composed of n 1s + file.write(" ".join(["1"]*n) + "\n") try: shutil.copyfile(folder_path + typeFolderName + "slspec.txt",folder_path + "/subjects/" + name + "/dMRI/raw/" + "slspec.txt") @@ -336,107 +355,126 @@ def patient_list(self, folder_path=None, bids_path=None, reverseEncoding=True): dw_mri_path = folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.nii.gz" b0_path = folder_path + "/subjects/" + name + "/dMRI/raw/" + name +"_b0_reverse.nii.gz" - #Copy b0 to patient path: - fslroi = "fslroi " + reverse_path + " " + b0_path + " 0 1" - reverse_log = open(folder_path + "/logs.txt","a+") + # Read bval file directly + with open(reverse_path_bval, "r") as bval_file: + bvals = list(map(float, bval_file.read().strip().split())) + + # Identify indices of b0 volumes + b0_indices = [idx for idx, bval in enumerate(bvals) if bval == 0] + + if not b0_indices: + print("No b0 volumes found in reverse encoding.") + b0_indices = [0] + + # Extract and concatenate non-sequential b0 volumes + temp_b0_files = [] + for i, b0_idx in enumerate(b0_indices): + temp_b0_file = f"{folder_path}/subjects/{name}/dMRI/raw/temp_b0_{i}.nii.gz" + temp_b0_files.append(temp_b0_file) + fslroi_cmd = f"fslroi {reverse_path} {temp_b0_file} {b0_idx} 1" + try: + output = subprocess.check_output(fslroi_cmd, universal_newlines=True, + shell=True, stderr=subprocess.STDOUT) + print(output) + except subprocess.CalledProcessError as e: + print(f"Error extracting b0 volume at index {b0_idx}") + exit() + + # Merge all temporary b0 files into a single file + merge_b0_cmd = f"fslmerge -t {b0_path} " + " ".join(temp_b0_files) try: - output = "" - output = subprocess.check_output(fslroi, universal_newlines=True, shell=True, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - print("Error when calling fslroi, no reverse direction will be available") - reverse_log.write("Error when calling fslroi, no reverse direction will be available\n") - print(e.returncode) - print(e.cmd) - print(e.output) - reverse_log.write(e.output + "\n") - finally: + output = subprocess.check_output(merge_b0_cmd, universal_newlines=True, shell=True, + stderr=subprocess.STDOUT) print(output) - reverse_log.write(output) - #print(error_log) - - - #Merge b0 with original DW-MRI: - merge_b0 = "fslmerge -t " + dw_mri_path + " " + dw_mri_path + " " + b0_path + " " - try: - output = "" - output = subprocess.check_output(merge_b0, universal_newlines=True, shell=True, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - print("Error when calling fslmerge, no reverse direction will be available") - reverse_log.write("Error when calling fslmerge, no reverse direction will be available\n") - print(e.returncode) - print(e.cmd) - print(e.output) - reverse_log.write(e.output + "\n") - finally: - print(output) - reverse_log.write(output) - - #Edit bvec: - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bvec", "r") as file_object: - lines = file_object.readlines() - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bvec", "r") as file_object2: - nlines = file_object2.read().count('\n') - - if nlines > 4: - lines.append("1 0 0\n") - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bvec", - "w") as f: - for line in lines: - f.write(line) - else: - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bvec", - "w") as f: - i = 0 - for line in lines: - if i==0: - f.write(line.rstrip().rstrip("\n") + " 1\n") - elif i==1: - f.write(line.rstrip().rstrip("\n") + " 0\n") - elif i==2: - f.write(line.rstrip().rstrip("\n") + " 0\n") - else: - f.write(line) - i = i + 1 - - #Edit bval - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bval", "r") as file_object: - file_object=file_object.read().rstrip().rstrip("\n") - - nlines = file_object.count('\n') - if nlines > 4: - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bval", "w") as myfile: - myfile.write(file_object + "\n0"+ "\n") - else: - with open(folder_path + "/subjects/" + name + "/dMRI/raw/" + name + "_raw_dmri.bval", "w") as myfile: - myfile.write(file_object + " 0"+ "\n") - - #Edit index: - with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'index.txt', "r") as f0: - line = f0.read() - line = " ".join(line.split()) - original_index = [int(s) for s in line.split(' ')] - original_index.append(original_index[-1]+1) - - with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'index.txt', "w") as myfile: - new_index = ''.join(str(j) + " " for j in original_index).rstrip() + "\n" - myfile.write(new_index) + print("Error when merging b0 volumes") + exit() + + # Cleanup temporary files + for temp_file in temp_b0_files: + os.remove(temp_file) + + # Merge extracted b0 volumes with the original DW-MRI file + #merge_b0_cmd = f"fslmerge -t {dw_mri_path} {dw_mri_path} {b0_path}" + #try: + # output = subprocess.check_output(merge_b0_cmd, universal_newlines=True, shell=True, + # stderr=subprocess.STDOUT) + # print(output) + #except subprocess.CalledProcessError as e: + # print("Error when merging b0 volumes with the original DW-MRI") + # exit() + + # Update bvec + bvec_path = f"{folder_path}/subjects/{name}/dMRI/raw/{name}_raw_dmri.bvec" + # Read existing bvec file + with open(bvec_path, "r") as bvec_file: + lines = bvec_file.readlines() + + # Check if the bvec file is formatted with multiple lines or a single line + if len(lines) >= 3: # Multi-line format + lines_reverse = ["0","0","0"] + for _ in range(len(b0_indices)-1): + lines_reverse[0] = lines_reverse[0].strip() + " 0" + lines_reverse[1] = lines_reverse[1].strip() + " 0" + lines_reverse[2] = lines_reverse[2].strip() + " 0" + lines_reverse[0] = lines_reverse[0].strip() + "\n" + lines_reverse[1] = lines_reverse[1].strip() + "\n" + lines_reverse[2] = lines_reverse[2].strip() + "\n" + else: # Single-line format + lines_reverse = ["0","0","0"] + + bvec_reverse_path = f"{folder_path}/subjects/{name}/dMRI/raw/{name}_raw_dmri_reverse.bvec" + # Write updated bvec file + with open(bvec_reverse_path, "w") as bvec_file: + bvec_file.writelines(lines_reverse) + + # Edit bval + bval_path = f"{folder_path}/subjects/{name}/dMRI/raw/{name}_raw_dmri.bval" + bval_reverse_path = f"{folder_path}/subjects/{name}/dMRI/raw/{name}_raw_dmri_reverse.bval" + # Read existing bval file + with open(bval_path, "r") as bval_file: + bval_content = bval_file.read().strip() + + # Check if the bval file is formatted with multiple lines or a single line + if "\n" in bval_content: # Multi-line format + reverse_bval_lines = [] + for _ in b0_indices: + reverse_bval_lines.append("0") + # Write updated bval file + with open(bval_reverse_path, "w") as reverse_bval_file: + reverse_bval_file.write("\n".join(reverse_bval_lines) + "\n") + else: # Single-line format + # Append zeroes for each b0 volume found + reverse_bval_content = " ".join(["0"] * len(b0_indices)) + # Write updated bval file + with open(bval_reverse_path, "w") as reverse_bval_file: + reverse_bval_file.write(reverse_bval_content + "\n") + + + # Update index file + with open(f"{folder_path}/subjects/{name}/dMRI/raw/index.txt", "r") as idx_file: + original_index = list(map(int, idx_file.read().strip().split())) + + new_indices = [original_index[-1] + 1 for i in range(len(b0_indices))] + with open(f"{folder_path}/subjects/{name}/dMRI/raw/index_reverse.txt", "w") as idx_file: + idx_file.write(" ".join(map(str, new_indices)) + "\n") #Edit acqparameters: with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams.txt') as f: original_acq = [[float(x) for x in line2.split()] for line2 in f] + shutil.copyfile(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams.txt', folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams_original.txt') + shutil.copyfile(reverse_path_acqparameters, folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams_reverse.txt') + with open(reverse_path_acqparameters) as f2: reverse_acq = [[float(x) for x in line2.split()] for line2 in f2] original_acq.append([reverse_acq[0][0], reverse_acq[0][1], reverse_acq[0][2], reverse_acq[0][3]]) - with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams.txt', 'w') as file: + with open(folder_path + "/subjects/" + name + '/dMRI/raw/' + 'acqparams_all.txt', 'w') as file: file.writelines(' '.join(str(j) for j in i) + '\n' for i in original_acq) print(original_acq) - - reverse_log.close() - error = list(dict.fromkeys(error)) success = list(dict.fromkeys(success)) @@ -456,7 +494,7 @@ def patient_list(self, folder_path=None, bids_path=None, reverseEncoding=True): f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Patient list generated\n") f.close() - def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoising=False, denoising_algorithm="mppca_mrtrix", gibbs=False, topup=False, topupConfig=None, forceSynb0DisCo=False, useGPUsynb0DisCo=False, eddy=False, biasfield=False, biasfield_bsplineFitting=[100,3], biasfield_convergence=[1000,0.001], patient_list_m=None, starting_state=None, bet_median_radius=2, bet_numpass=1, bet_dilate=2, static_files_path=None, cuda=None, cuda_name="eddy_cuda10.1", s2v=[0,5,1,'trilinear'], olrep=[False, 4, 250, 'sw'], slurm=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None, qc_reg=True, niter=5, slspec_gc_path=None, report=True): + def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoising=True, gibbs=False, topup=True, topupConfig=None, topupOnlyFirstB0=False, forceSynb0DisCo=False, useGPUsynb0DisCo=False, eddy=True, biasfield=False, biasfield_bsplineFitting=[100,3], biasfield_convergence=[1000,0.001], patient_list_m=None, starting_state=None, bet_median_radius=2, bet_numpass=1, bet_dilate=2, static_files_path=None, cuda=None, cuda_name="eddy_cuda10.1", s2v=[0,5,1,'trilinear'], olrep=[False, 4, 250, 'sw'], eddy_additional_arg="", slurm=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None, qc_reg=False, niter=5, slspec_gc_path=None, report=True): """ Performs data preprocessing. By default only the brain extraction is enabled. Optional preprocessing steps include : reslicing, denoising, gibbs ringing correction, susceptibility field estimation, EC-induced distortions and motion correction, bias field correction. The results are stored in the preprocessing subfolder of each study subject /subjects//dMRI/preproc. @@ -496,8 +534,6 @@ def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoi """ assert starting_state in (None, "denoising", "gibbs", "topup", "eddy", "biasfield", "report", "post_report", "topup_synb0DisCo_Registration", "topup_synb0DisCo_Inference", "topup_synb0DisCo_Apply", "topup_synb0DisCo_topup"), 'invalid starting state!' - if denoising==True: - assert denoising_algorithm in ["patch2self", "mppca_mrtrix", "mppca_dipy"], 'invalid denoising algorithm!' if starting_state=="denoising": assert denoising == True, 'if starting_state is denoising, denoising must be True!' if starting_state=="gibbs": @@ -545,10 +581,10 @@ def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoi p_job = { "wrap": "export OMP_NUM_THREADS="+str(tot_cpu)+" ; export FSLPARALLEL="+str(tot_cpu)+" ; python -c 'from elikopy.individual_subject_processing import preproc_solo; preproc_solo(\"" + folder_path + "/\",\"" + p + "\",eddy=" + str( eddy) + ",biasfield=" + str(biasfield) + ",biasfield_convergence=[" + str(biasfield_convergence[0]) + "," + str(biasfield_convergence[1]) + "],biasfield_bsplineFitting=[" + str(biasfield_bsplineFitting[0]) + "," + str(biasfield_bsplineFitting[1]) + "],denoising=" + str( - denoising) + ",denoising_algorithm=\"" + str(denoising_algorithm) +"\",reslice=" + str(reslice) + ",reslice_addSlice=" + str(reslice_addSlice) + ",gibbs=" + str( - gibbs) + ",topup=" + str(topup) + ",forceSynb0DisCo=" + str(forceSynb0DisCo) + ",useGPUsynb0DisCo=" + str(useGPUsynb0DisCo) + ",topupConfig=\"" + str(topupConfig) + "\",starting_state=\"" + str(starting_state) + "\",static_files_path=\""+ static_files_path +"\" ,bet_median_radius=" + str( + denoising) + ",reslice=" + str(reslice) + ",reslice_addSlice=" + str(reslice_addSlice) + ",gibbs=" + str( + gibbs) + ",topup=" + str(topup) + ",topupOnlyFirstB0=" + str(topupOnlyFirstB0) + ",forceSynb0DisCo=" + str(forceSynb0DisCo) + ",useGPUsynb0DisCo=" + str(useGPUsynb0DisCo) + ",topupConfig=\"" + str(topupConfig) + "\",starting_state=\"" + str(starting_state) + "\",static_files_path=\""+ static_files_path +"\" ,bet_median_radius=" + str( bet_median_radius) + ",bet_dilate=" + str(bet_dilate) + ", qc_reg=" + str(qc_reg) + ", report=" + str(report) + ", slspec_gc_path=" + str(slspec_gc_path) + ", core_count=" + str(core_count)+ ", niter=" + str(niter)+",bet_numpass=" + str( - bet_numpass) + ",cuda=" + str(cuda) + ",cuda_name=\"" + str(cuda_name) + "\",s2v=[" + str(s2v[0]) + "," + str(s2v[1]) + "," + str(s2v[2]) + ",\"" + str(s2v[3]) + "\"],olrep=[" + str(olrep[0]) + "," + str(olrep[1]) + "," + str(olrep[2]) + ",\"" + str(olrep[3]) + "\"])'", + bet_numpass) + ",cuda=" + str(cuda) + ",cuda_name=\"" + str(cuda_name) + "\",s2v=[" + str(s2v[0]) + "," + str(s2v[1]) + "," + str(s2v[2]) + ",\"" + str(s2v[3]) + "\"],olrep=[" + str(olrep[0]) + "," + str(olrep[1]) + "," + str(olrep[2]) + ",\"" + str(olrep[3]) + "\"], " + "eddy_additional_arg=\"" + str(eddy_additional_arg) + "\" )'", "job_name": "preproc_" + p, "ntasks": 1, "cpus_per_task": 8, @@ -592,12 +628,12 @@ def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoi f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully submited job %s using slurm\n" % p_job_id) else: core_count = 1 if cpus is None else cpus - preproc_solo(folder_path + "/",p,reslice=reslice,reslice_addSlice=reslice_addSlice,denoising=denoising, denoising_algorithm=denoising_algorithm, gibbs=gibbs, - topup=topup, topupConfig=topupConfig, forceSynb0DisCo=forceSynb0DisCo, useGPUsynb0DisCo=useGPUsynb0DisCo, + preproc_solo(folder_path + "/",p,reslice=reslice,reslice_addSlice=reslice_addSlice,denoising=denoising, gibbs=gibbs, + topup=topup, topupConfig=topupConfig, topupOnlyFirstB0=topupOnlyFirstB0, forceSynb0DisCo=forceSynb0DisCo, useGPUsynb0DisCo=useGPUsynb0DisCo, eddy=eddy,biasfield=biasfield, biasfield_bsplineFitting=biasfield_bsplineFitting, biasfield_convergence=biasfield_convergence, starting_state=starting_state, bet_median_radius=bet_median_radius,bet_dilate=bet_dilate,bet_numpass=bet_numpass,cuda=self._cuda, qc_reg=qc_reg, core_count=core_count, - cuda_name=cuda_name, s2v=s2v, olrep=olrep, niter=niter, slspec_gc_path=slspec_gc_path, report=report, static_files_path=static_files_path) + cuda_name=cuda_name, s2v=s2v, olrep=olrep, niter=niter, slspec_gc_path=slspec_gc_path, report=report, static_files_path=static_files_path, eddy_additional_arg=eddy_additional_arg) matplotlib.pyplot.close(fig='all') f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully preprocessed patient %s\n" % p) f.flush() @@ -625,13 +661,13 @@ def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoi output, error = process.communicate() # 2) merge both pdfs - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger for p in patient_list: patient_path = p if os.path.exists(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr.qc/qc_updated.pdf'): pdfs = [folder_path + '/subjects/' + patient_path + '/dMRI/preproc/quality_control/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr.qc/qc_updated.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control.pdf') @@ -651,7 +687,8 @@ def preproc(self, folder_path=None, reslice=False, reslice_addSlice=False, denoi f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": All the preprocessing operation are finished!\n") f.close() - def dti(self,folder_path=None, patient_list_m=None, maskType="brain_mask_dilated", slurm=None, slurm_email=None, slurm_timeout=None, slurm_cpus=None, slurm_mem=None): + def dti(self,folder_path=None, patient_list_m=None, maskType="brain_mask_dilated", use_all_shells: bool = False, + slurm=None, slurm_email=None, slurm_timeout=None, slurm_cpus=None, slurm_mem=None): """Computes the DTI metrics for each subject using Weighted Least-Squares. The outputs are available in the directories /subjects//dMRI/dti/. example : study.dti() @@ -659,6 +696,9 @@ def dti(self,folder_path=None, patient_list_m=None, maskType="brain_mask_dilated :param folder_path: the path to the root directory. default=study_folder :param patient_list_m: Define a subset of subjects to process instead of all the available subjects. example : ['patientID1','patientID2','patientID3']. default=None :param maskType: Define which mask to use during processing. default="brain_mask_dilated" + :param use_all_shells: Boolean. DTI will use all shells available, not just + shells <= 2000, this will cause a more defined white matter at the cost of + an erronous estimation of the CSF. The default is False. :param slurm: Whether to use the Slurm Workload Manager or not (for computer clusters). default=value_during_init :param slurm_email: Email adress to send notification if a task fails. default=None :param slurm_timeout: Replace the default slurm timeout of 1h by a custom timeout. @@ -695,7 +735,7 @@ def dti(self,folder_path=None, patient_list_m=None, maskType="brain_mask_dilated if slurm: p_job = { - "wrap": "python -c 'from elikopy.individual_subject_processing import dti_solo; dti_solo(\"" + folder_path + "/\",\"" + p + "\",maskType=\"" + str(maskType) + "\")'", + "wrap": "python -c 'from elikopy.individual_subject_processing import dti_solo; dti_solo(\"" + folder_path + "/\",\"" + p + "\",maskType=\"" + str(maskType) + "\", use_all_shells=" + str(use_all_shells) +")'", "job_name": "dti_" + p, "ntasks": 1, "cpus_per_task": 1, @@ -717,7 +757,7 @@ def dti(self,folder_path=None, patient_list_m=None, maskType="brain_mask_dilated f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Patient %s is ready to be processed\n" % p) f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully submited job %s using slurm\n" % p_job_id) else: - dti_solo(folder_path + "/",p,maskType=maskType) + dti_solo(folder_path + "/",p,maskType=maskType,use_all_shells=use_all_shells) matplotlib.pyplot.close(fig='all') f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully applied DTI on patient %s\n" % p) f.flush() @@ -731,7 +771,8 @@ def dti(self,folder_path=None, patient_list_m=None, maskType="brain_mask_dilated f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": End of DTI\n") f.close() - def fingerprinting(self, dictionary_path=None, folder_path=None, peaksType="MSMT-CSD", maskType="brain_mask_dilated", csf_mask=True, ear_mask=False, mfdir=None, slurm=None, patient_list_m=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None): + def fingerprinting(self, dictionary_path=None, folder_path=None, peaksType="MSMT-CSD", maskType="brain_mask_dilated", csf_mask=True, ear_mask=False, mfdir=None, output_filename:str="", + slurm=None, patient_list_m=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None): """Computes the Microstructure Fingerprinting metrics for each subject. The outputs are available in the directories /subjects//dMRI/microstructure/mf/. example : study.fingerprinting(dictionary_path='my_dictionary') @@ -744,10 +785,10 @@ def fingerprinting(self, dictionary_path=None, folder_path=None, peaksType="MSMT :param slurm: Whether to use the Slurm Workload Manager or not (for computer clusters). default=value_during_init :param slurm_email: Email adress to send notification if a task fails. default=None :param slurm_timeout: Replace the default slurm timeout of 20h by a custom timeout. - :param slurm_cpus: Replace the default number of slurm cpus of 1 by a custom number of cpus of using slum, or for standard processing, its the number of core available for processing. + :param cpus: Replace the default number of slurm cpus of 1 by a custom number of cpus of using slum, or for standard processing, its the number of core available for processing. :param slurm_mem: Replace the default amount of ram allocated to the slurm task (8096MO by cpu) by a custom amount of ram. """ - log_prefix="MF" + log_prefix = "MF" folder_path = self._folder_path if folder_path is None else folder_path slurm = self._slurm if slurm is None else slurm mfdir = "mf" if mfdir is None else mfdir @@ -784,7 +825,7 @@ def fingerprinting(self, dictionary_path=None, folder_path=None, peaksType="MSMT if slurm: p_job = { - "wrap": "export MKL_NUM_THREADS="+ str(core_count)+" ; export OMP_NUM_THREADS="+ str(core_count)+" ; python -c 'from elikopy.individual_subject_processing import mf_solo; mf_solo(\"" + folder_path + "/\",\"" + p + "\", \"" + dictionary_path + "\", peaksType=\"" + str(peaksType) + "\", core_count=" + str(core_count) + ", maskType=\"" + str(maskType) + "\", mfdir=\"" + str(mfdir)+ "\", csf_mask=" + str(csf_mask) + ", ear_mask=" + str(ear_mask) + ")'", + "wrap": "export MKL_NUM_THREADS="+ str(core_count)+" ; export OMP_NUM_THREADS="+ str(core_count)+" ; python -c 'from elikopy.individual_subject_processing import mf_solo; mf_solo(\"" + folder_path + "/\",\"" + p + "\", \"" + dictionary_path + "\", peaksType=\"" + str(peaksType) + "\", core_count=" + str(core_count) + ", maskType=\"" + str(maskType) + "\", mfdir=\"" + str(mfdir)+ "\", csf_mask=" + str(csf_mask) + ", ear_mask=" + str(ear_mask) + ", output_filename= \"" + output_filename + "\"" + ")'", "job_name": "mf_" + p, "ntasks": 1, "cpus_per_task": core_count, @@ -805,7 +846,7 @@ def fingerprinting(self, dictionary_path=None, folder_path=None, peaksType="MSMT f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Patient %s is ready to be processed\n" % p) f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully submited job %s using slurm\n" % p_job_id) else: - mf_solo(folder_path + "/", p, dictionary_path, peaksType=peaksType, core_count=core_count, maskType=maskType, csf_mask=csf_mask, ear_mask=ear_mask, mfdir=mfdir) + mf_solo(folder_path + "/", p, dictionary_path, peaksType=peaksType, core_count=core_count, maskType=maskType, csf_mask=csf_mask, ear_mask=ear_mask, mfdir=mfdir, output_filename=output_filename) matplotlib.pyplot.close(fig='all') f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully applied microstructure fingerprinting on patient %s\n" % p) f.flush() @@ -983,7 +1024,8 @@ def odf_msmtcsd(self, folder_path=None, num_peaks = 2, peaks_threshold=0.25, mas f.close() - def tracking(self, folder_path=None, streamline_number:int=100000, max_angle:int=15, msmtCSD:bool=True, slurm=None, patient_list_m=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None): + def tracking(self, folder_path=None, streamline_number:int=100000, max_angle:int=15, cutoff:float=0.1, msmtCSD:bool=True, output_filename:str="tractogram", save_as_trk=False, maskType:str="brain_mask", + slurm=None, patient_list_m=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None): """Computes the odf using MSMT-CSD for each subject. The outputs are available in the directories /subjects//dMRI/tractography/. example : study.tracking_solo() @@ -1027,7 +1069,7 @@ def tracking(self, folder_path=None, streamline_number:int=100000, max_angle:int if slurm: p_job = { - "wrap": "export MKL_NUM_THREADS="+ str(core_count)+" ; export OMP_NUM_THREADS="+ str(core_count)+" ; python -c 'from elikopy.individual_subject_processing import tracking_solo; tracking_solo(\"" + folder_path + "/\",\"" + p + "\", streamline_number =" + str(streamline_number) + ", msmtCSD=" + str(msmtCSD) + ", core_count=" + str(core_count) + ", max_angle="+ str(max_angle) + ")'", + "wrap": "export MKL_NUM_THREADS="+ str(core_count)+" ; export OMP_NUM_THREADS="+ str(core_count)+" ; python -c 'from elikopy.individual_subject_processing import tracking_solo; tracking_solo(\"" + folder_path + "/\",\"" + p + "\", streamline_number =" + str(streamline_number) + ", msmtCSD=" + str(msmtCSD) + ", core_count=" + str(core_count) + ", save_as_trk=" + str(save_as_trk) + ", cutoff=" + str(cutoff) + ", max_angle="+ str(max_angle) + ", maskType= \"" + maskType + "\"" + ", output_filename= \"" + output_filename + "\"" + ")'", "job_name": "TRACKING_" + p, "ntasks": 1, "cpus_per_task": core_count, @@ -1047,7 +1089,7 @@ def tracking(self, folder_path=None, streamline_number:int=100000, max_angle:int f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Patient %s is ready to be processed\n" % p) f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully submited job %s using slurm\n" % p_job_id) else: - tracking_solo(folder_path + "/", p, streamline_number=streamline_number, max_angle=max_angle, msmtCSD=msmtCSD, core_count=core_count) + tracking_solo(folder_path + "/", p, streamline_number=streamline_number, max_angle=max_angle, cutoff=cutoff, msmtCSD=msmtCSD, output_filename=output_filename, core_count=core_count, save_as_trk=save_as_trk, maskType=maskType) matplotlib.pyplot.close(fig='all') f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully applied tracking_solo on patient %s\n" % p) f.flush() @@ -1060,6 +1102,91 @@ def tracking(self, folder_path=None, streamline_number:int=100000, max_angle:int f=open(folder_path + "/logs.txt", "a+") f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": End of tractography\n") f.close() + + def sift(self, folder_path=None, streamline_number: int = 100000, + msmtCSD: bool = True, input_filename: str = "tractogram", save_as_trk = False, + slurm = None, patient_list_m = None, slurm_email = None, + slurm_timeout = None, cpus = None, slurm_mem = None): + """Computes the sifted odf using MSMT-CSD for each subject. The outputs are + available in the directories /subjects//dMRI/tractography/. + + example : study.sift_solo() + + :param folder_path: the path to the root directory. default=study_folder + :param patient_list_m: Define a subset of subjects to process instead of all the available subjects. example : ['patientID1','patientID2','patientID3']. default=None + :param slurm: Whether to use the Slurm Workload Manager or not (for computer clusters). default=value_during_init + :param slurm_email: Email adress to send notification if a task fails. default=None + :param slurm_timeout: Replace the default slurm timeout of 20h by a custom timeout. + :param slurm_cpus: Replace the default number of slurm cpus of 1 by a custom number of cpus of using slum, or for standard processing, its the number of core available for processing. + :param slurm_mem: Replace the default amount of ram allocated to the slurm task (8096MO by cpu) by a custom amount of ram. + """ + log_prefix = "SIFT" + folder_path = self._folder_path if folder_path is None else folder_path + slurm = self._slurm if slurm is None else slurm + slurm_email = self._slurm_email if slurm_email is None else slurm_email + + f = open(folder_path + "/logs.txt", "a+") + f.write("["+log_prefix+"] " + + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + + ": Beginning of SIFT with slurm:" + str(slurm) + "\n") + f.close() + + dest_success = folder_path + "/subjects/subj_list.json" + with open(dest_success, 'r') as f: + patient_list = json.load(f) + + if patient_list_m: + patient_list = patient_list_m + + core_count = 4 if cpus is None else cpus + + job_list = [] + f = open(folder_path + "/logs.txt", "a+") + for p in patient_list: + patient_path = p + + tracking_path = (folder_path + '/subjects/' + patient_path + + "/dMRI/tractography") + makedir(tracking_path, folder_path + '/subjects/' + patient_path + + "/dMRI/tractography/sift_logs.txt", log_prefix) + + if slurm: + p_job = { + "wrap": "export MKL_NUM_THREADS="+ str(core_count)+" ; export OMP_NUM_THREADS="+ str(core_count)+" ; python -c 'from elikopy.individual_subject_processing import sift_solo; sift_solo(\"" + folder_path + "/\",\"" + p + "\", streamline_number =" + str(streamline_number) + ", msmtCSD=" + str(msmtCSD) + ", core_count=" + str(core_count) + ", save_as_trk=" + str(save_as_trk) + ", input_filename= \"" + input_filename + "\"" + ")'", + "job_name": "SIFT_" + p, + "ntasks": 1, + "cpus_per_task": core_count, + "mem_per_cpu": 4096, + "time": "00:25:00", + "mail_user": slurm_email, + "mail_type": "FAIL", + "output": folder_path + '/subjects/' + patient_path + "/dMRI/tractography/slurm-%j.out", + "error": folder_path + '/subjects/' + patient_path + "/dMRI/tractography/slurm-%j.err", + } + p_job["time"] = p_job["time"] if slurm_timeout is None else slurm_timeout + p_job["mem_per_cpu"] = p_job["mem_per_cpu"] if slurm_mem is None else slurm_mem + p_job_id = {} + p_job_id["id"] = submit_job(p_job) + p_job_id["name"] = p + job_list.append(p_job_id) + f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Patient %s is ready to be processed\n" % p) + f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully submited job %s using slurm\n" % p_job_id) + else: + sift_solo(folder_path + "/", p, streamline_number=streamline_number, msmtCSD=msmtCSD, input_filename=input_filename, core_count=core_count, save_as_trk=save_as_trk) + matplotlib.pyplot.close(fig='all') + f.write("["+log_prefix+"] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Successfully applied sift_solo on patient %s\n" % p) + f.flush() + f.close() + + # Wait for all jobs to finish + if slurm: + elikopy.utils.getJobsState(folder_path, job_list, log_prefix) + + f = open(folder_path + "/logs.txt", "a+") + f.write("["+log_prefix+"] " + + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + + ": End of SIFT\n") + f.close() def white_mask(self, maskType, folder_path=None, patient_list_m=None, corr_gibbs=True, debug=False, slurm=None, slurm_email=None, slurm_timeout=None, cpus=None, slurm_mem=None): @@ -2315,19 +2442,20 @@ def patientlist_wrapper(self, function, func_args, folder_path=None, patient_lis for p in patient_list: patient_path = p if slurm_subpath is None: - slurm_path = folder_path + '/subjects/' + patient_path + '/' + "slurm-%j" + slurm_path = folder_path + '/subjects/' + patient_path + '/' else: - slurm_path = folder_path + '/subjects/' + patient_path + '/' + slurm_subpath + "/slurm-%j" + slurm_path = folder_path + '/subjects/' + patient_path + '/' + slurm_subpath if not (os.path.exists(slurm_path)): try: os.makedirs(slurm_path) except OSError: print("Creation of the directory %s failed" % slurm_path) - + slurm_path = slurm_path + "/slurm-%j" + if slurm: job = { - "wrap": "export OMP_NUM_THREADS=" + str(core_count) + " ; export FSLPARALLEL=" + str( + "wrap": "export OMP_NUM_THREADS=" + str(core_count) + " ; export PYTHONUNBUFFERED=" + str(True) + " ; export FSLPARALLEL=" + str( core_count) + " ; python -c '" + baseImportCmd + "; "+function_name+"(\"" + str( folder_path) + "\",\"" + str( patient_path) + "\"", diff --git a/elikopy/individual_subject_processing.py b/elikopy/individual_subject_processing.py index 3c84905..ea6ccf4 100644 --- a/elikopy/individual_subject_processing.py +++ b/elikopy/individual_subject_processing.py @@ -2,16 +2,28 @@ import os import shutil import json - +import sys import numpy as np import math - +from scipy.ndimage.morphology import binary_dilation +from dipy.align.transforms import RigidTransform3D import subprocess -from elikopy.utils import makedir +from elikopy.utils import makedir, update_status + +import functools +print = functools.partial(print, flush=True) + +def motion_transform_wrapper(i, S0s, data, params0, affine, affreg): + """Wrapper function for motion_transform to handle a single volume.""" + transform = RigidTransform3D() # Reinitialize RigidTransform3D in each worker + return motion_transform(S0s, data[..., i], transform, params0, affine, affreg) +def motion_transform(S0s, data_i, transform, params0, affine, affreg): + return affreg.optimize(np.copy(S0s), np.copy(data_i), transform, params0, + affine, affine, ret_metric=True)[1] -def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoising=False, denoising_algorithm="mppca_mrtrix", gibbs=False, topup=False, topupConfig=None, forceSynb0DisCo=False, useGPUsynb0DisCo=False, eddy=False, biasfield=False, biasfield_bsplineFitting=[100,3], biasfield_convergence=[1000,0.001], static_files_path=None, starting_state=None, bet_median_radius=2, bet_numpass=1, bet_dilate=2, cuda=False, cuda_name="eddy_cuda10.1", s2v=[0,5,1,'trilinear'], olrep=[False, 4, 250, 'sw'], qc_reg=True, core_count=1, niter=5, report=True, slspec_gc_path=None): +def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoising=False, gibbs=False, topup=False, topupConfig=None, topupOnlyFirstB0=False, forceSynb0DisCo=False, useGPUsynb0DisCo=False, eddy=False, biasfield=False, biasfield_bsplineFitting=[100,3], biasfield_convergence=[1000,0.001], static_files_path=None, starting_state=None, bet_median_radius=2, bet_numpass=1, bet_dilate=2, cuda=False, cuda_name="eddy_cuda10.1", s2v=[0,8,1,'trilinear'], fwhm='10,8,4,2,0,0,0,0', olrep=[False, 4, 250, 'sw'], eddy_additional_arg="", qc_reg=True, core_count=1, niter=5, report=True, slspec_gc_path=None): """ Performs data preprocessing on a single subject. By default only the brain extraction is enabled. Optional preprocessing steps include : reslicing, denoising, gibbs ringing correction, susceptibility field estimation, EC-induced distortions and motion correction, bias field correction. The results are stored in the preprocessing subfolder of the study subject /subjects//dMRI/preproc. @@ -47,10 +59,10 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin """ in_reslice = reslice + curr_dmri = None assert starting_state in (None,"None", "denoising", "gibbs", "topup", "eddy", "biasfield", "report", "topup_synb0DisCo_Registration", "topup_synb0DisCo_Inference", "topup_synb0DisCo_Apply", "topup_synb0DisCo_topup"), 'invalid starting state!' if starting_state == "denoising": assert denoising == True, 'if starting_state is denoising, denoising must be True!' - assert denoising_algorithm in ["patch2self", "mppca_mrtrix", "mppca_dipy"], 'invalid denoising algorithm!' if starting_state == "gibbs": assert gibbs == True, 'if starting_state is gibbs, gibbs must be True!' if starting_state in ("topup", "topup_synb0DisCo_Registration", "topup_synb0DisCo_Inference", "topup_synb0DisCo_Apply", "topup_synb0DisCo_topup"): @@ -82,14 +94,32 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Beginning of individual preprocessing for patient %s \n" % p) f.close() + + anat_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1.nii.gz' + if os.path.exists(anat_path): + brain_extracted_T1_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_brain.nii.gz' + brain_extracted_mask_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_brain_mask.nii.gz' + if not os.path.exists(brain_extracted_T1_path): + cmd = f"mri_synth_strip -i {anat_path} -o {brain_extracted_T1_path} -m {brain_extracted_mask_path} " + import subprocess + process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + output, error = process.communicate() + print(output) + print(error) + + if not os.path.exists(brain_extracted_T1_path): + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": T1 Brain extraction failed for patient %s \n" % p) + # print to std err + print("T1 Brain extraction failed for patient %s" % p, file=sys.stderr) + from dipy.io.image import load_nifti, save_nifti from dipy.segment.mask import median_otsu - from dipy.denoise.localpca import mppca nifti_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.nii.gz' if (starting_state == None): data, affine, voxel_size = load_nifti(nifti_path, return_voxsize=True) - + curr_dmri = data reslice_path = folder_path + '/subjects/' + patient_path + "/dMRI/preproc/reslice" if reslice and starting_state == None: makedir(reslice_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) @@ -101,6 +131,8 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin if reslice_addSlice: data = np.insert(data, np.size(data,2), 0, axis=2) + curr_dmri = data + nifti_path = reslice_path + '/' + patient_path + '_reslice.nii.gz' save_nifti(reslice_path + '/' + patient_path + '_reslice.nii.gz', data, affine) print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Reslice completed for patient %s \n" % p) @@ -108,24 +140,16 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Reslice completed for patient %s \n" % p) f.close() + if reslice: + nifti_path = reslice_path + '/' + patient_path + '_reslice.nii.gz' if starting_state == None: from elikopy.utils import clean_mask - b0_mask, mask = median_otsu(data, median_radius=bet_median_radius, numpass=bet_numpass, vol_idx=range(0, np.shape(data)[3]), dilate=bet_dilate) + _, mask = median_otsu(curr_dmri, median_radius=bet_median_radius, numpass=bet_numpass, vol_idx=range(0, np.shape(curr_dmri)[3]), dilate=bet_dilate) mask = clean_mask(mask) save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz',mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz',b0_mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - mask.astype(np.float32), affine) - - _, mask_nodilate = median_otsu(data, median_radius=bet_median_radius, numpass=bet_numpass, - vol_idx=range(0, np.shape(data)[3]), dilate=None) - mask_nodilate = clean_mask(mask_nodilate) - - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask.nii.gz', - mask_nodilate.astype(np.float32), affine) print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Brain extraction completed for patient %s \n" % p) @@ -135,21 +159,15 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin f.close() if not denoising and not eddy and not gibbs and not topup and not biasfield: - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', b0_mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', mask.astype(np.float32), affine) + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz', curr_dmri.astype(np.float32), affine) shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval") shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bvec", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec") - if denoising_algorithm in ['mppca_mrtrix', 'mppca_dipy']: - denoising_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/mppca' - denoising_ext = '_mppca.nii.gz' - elif denoising_algorithm == "patch2self": - from dipy.denoise.patch2self import patch2self + denoising_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/mppca' + denoising_ext = '_mppca.nii.gz' - denoising_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/patch2self' - denoising_ext = '_patch2self.nii.gz' if denoising and starting_state!="gibbs" and starting_state!="eddy" and (starting_state not in ("topup", "topup_synb0DisCo_Registration", "topup_synb0DisCo_Inference", "topup_synb0DisCo_Apply", "topup_synb0DisCo_topup")) and starting_state!="biasfield" and starting_state!="report": print("[" + log_prefix + "] " + datetime.datetime.now().strftime( @@ -162,40 +180,28 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Denoising launched for patient %s \n" % p) - if (starting_state == "denoising"): - mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz' - b0_mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz' - b0_mask, affine, voxel_size = load_nifti(b0_mask_path, return_voxsize=True) - mask, _ = load_nifti(mask_path) + import subprocess + bashCommand = "dwidenoise -nthreads " + str(core_count) + " " + nifti_path + \ + " " + denoising_path + '/' + patient_path + '_mppca.nii.gz' +\ + " -noise " + denoising_path + '/' + patient_path + '_sigmaNoise.nii.gz -force' - if denoising_algorithm == "mppca_mrtrix": - import subprocess - bet_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz' - bashCommand = "dwidenoise -nthreads " + str(core_count) + " " + bet_path + \ - " " + denoising_path + '/' + patient_path + '_mppca.nii.gz' +\ - " -noise " + denoising_path + '/' + patient_path + '_sigmaNoise.nii.gz -force' + process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=f, + stderr=subprocess.STDOUT) + + output, error = process.communicate() + + b0_reverse_raw_path= os.path.join(folder_path, 'subjects', patient_path, 'dMRI', 'raw', patient_path + '_b0_reverse.nii.gz') + if os.path.exists(b0_reverse_raw_path): + bashCommand = "dwidenoise -nthreads " + str(core_count) + " " + b0_reverse_raw_path + \ + " " + denoising_path + '/' + patient_path + '_b0_reverse_mppca.nii.gz' + \ + " -noise " + denoising_path + '/' + patient_path + '_b0_reverse_sigmaNoise.nii.gz -force' process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=f, stderr=subprocess.STDOUT) output, error = process.communicate() + denoised, _ = load_nifti(denoising_path + '/' + patient_path + '_mppca.nii.gz') - denoised, _ = load_nifti(denoising_path + '/' + patient_path + '_mppca.nii.gz') - - elif denoising_algorithm == "mppca_dipy": - pr = math.ceil((np.shape(b0_mask)[3] ** (1 / 3) - 1) / 2) - denoised, sigma = mppca(b0_mask, patch_radius=pr, return_sigma=True, mask = mask) - save_nifti(denoising_path + '/' + patient_path + '_sigmaNoise.nii.gz', sigma.astype(np.float32), affine) - save_nifti(denoising_path + '/' + patient_path + '_mppca.nii.gz', denoised.astype(np.float32), affine) - else: - bvals_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval' - bvals = np.loadtxt(bvals_path) - denoised = patch2self(b0_mask, bvals) - save_nifti(denoising_path + '/' + patient_path + '_patch2self.nii.gz', denoised.astype(np.float32), affine) - - # computes and save the residuals - rms_diff = np.sqrt((data - denoised) ** 2) - save_nifti(denoising_path + '/' + patient_path + '_patch2self_residuals.nii.gz', rms_diff.astype(np.float32), affine) f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Denoising finished for patient %s \n" % p) print("[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Denoising finished for patient %s \n" % p) @@ -205,13 +211,11 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": End of denoising for patient %s \n" % p) - b0_mask = denoised + curr_dmri = denoised if not eddy and not gibbs and not topup and not biasfield: - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', - b0_mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - mask.astype(np.float32), affine) + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz', + curr_dmri.astype(np.float32), affine) shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval") shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bvec", @@ -223,23 +227,20 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin makedir(gibbs_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) if (starting_state == "gibbs"): - mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz' if not denoising: - b0_mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz' + curr_dmri_path = nifti_path else: - b0_mask_path = denoising_path + '/' + patient_path + denoising_ext - b0_mask, affine, voxel_size = load_nifti(b0_mask_path, return_voxsize=True) - mask, _ = load_nifti(mask_path) + curr_dmri_path = denoising_path + '/' + patient_path + denoising_ext + curr_dmri, affine, voxel_size = load_nifti(curr_dmri_path, return_voxsize=True) - data = gibbs_removal(b0_mask, num_threads=core_count) + data = gibbs_removal(curr_dmri, num_processes=core_count) corrected_path = folder_path + '/subjects/' + patient_path + "/dMRI/preproc/gibbs/" + patient_path + '_gibbscorrected.nii.gz' save_nifti(corrected_path, data.astype(np.float32), affine) + curr_dmri = data if not eddy and not topup and not biasfield: - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', - data.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - mask.astype(np.float32), affine) + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz', + curr_dmri.astype(np.float32), affine) shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval") shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bvec", @@ -248,7 +249,6 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin # Explicitly freeing memory import gc denoised = None - b0_mask = None mask = None data = None affine = None @@ -271,7 +271,16 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin elif denoising: imain_tot = denoising_path + '/' + patient_path + denoising_ext else: - imain_tot = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz' + imain_tot = nifti_path + if curr_dmri is None: + curr_dmri, affine, voxel_size = load_nifti(imain_tot, return_voxsize=True) + + if denoising: + b0_reverse_path = denoising_path + '/' + patient_path + '_b0_reverse_mppca.nii.gz' # Only if b0 reverse is 4D data + if not os.path.exists(b0_reverse_path): + b0_reverse_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_b0_reverse.nii.gz' + else: + b0_reverse_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_b0_reverse.nii.gz' multiple_encoding=False topup_log = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/topup/topup_logs.txt", "a+") @@ -281,10 +290,14 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin line = " ".join(line.split()) topup_index = [int(s) for s in line.split(' ')] - with open(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt') as f: - topup_acq = [[float(x) for x in line2.split()] for line2 in f] + if os.path.exists(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams_all.txt'): + with open(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams_all.txt') as f: + topup_acq = [[float(x) for x in line2.split()] for line2 in f] + else: + with open(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt') as f: + topup_acq = [[float(x) for x in line2.split()] for line2 in f] - #Find all the bo to extract. + #Find all the b0 to extract. current_index = 0 all_index ={} i=1 @@ -334,36 +347,128 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Patient %s \n" % p + " has multiple direction of gradient encoding, launching topup directly ") + + # If any of the 3D dimension is an odd number, use b02b0_1.cnf as topup config + if np.any([x%2!=0 for x in curr_dmri.shape[:3]]) and topupConfig is None: + topupConfig = 'b02b0_1.cnf' topupConfig = 'b02b0.cnf' if topupConfig is None else topupConfig - bashCommand = 'export OMP_NUM_THREADS='+str(core_count)+' ; export FSLPARALLEL='+str(core_count)+' ; topup --imain="' + topup_path + '/b0.nii.gz" --config="' + topupConfig + '" --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --fout="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_fout_estimate" --iout="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_iout_estimate" --verbose' + + + # Extract b0s from main file: + + # Read bval file directly + path_bval = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval" + with open(path_bval, "r") as bval_file: + bvals = list(map(float, bval_file.read().strip().split())) + + # Identify indices of b0 volumes + b0_indices = [idx for idx, bval in enumerate(bvals) if bval == 0] + + if not b0_indices or topupOnlyFirstB0: + if not b0_indices: + print("No b0 volumes found in dMRI.") + else: + print("Only using the first b0 volume for topup.") + b0_indices = [0] + + # Extract and concatenate non-sequential b0 volumes + temp_b0_files = [] + for i, b0_idx in enumerate(b0_indices): + temp_b0_file = f"{folder_path}/subjects/{patient_path}/dMRI/preproc/topup/temp_b0_{i}.nii.gz" + temp_b0_files.append(temp_b0_file) + fslroi_cmd = f"fslroi {imain_tot} {temp_b0_file} {b0_idx} 1" + try: + output = subprocess.check_output(fslroi_cmd, universal_newlines=True, + shell=True, stderr=subprocess.STDOUT) + print(output) + except subprocess.CalledProcessError as e: + print(f"Error extracting b0 volume at index {b0_idx}") + exit() + + # Merge all temporary b0 files into a single file + b0_part1_path = f"{topup_path}/b0_part1.nii.gz" + b0_part2_path = f"{topup_path}/b0_part2.nii.gz" + shutil.copyfile(b0_reverse_path, b0_part2_path) + merge_b0_cmd = f"fslmerge -t {b0_part1_path} " + " ".join(temp_b0_files) + try: + output = subprocess.check_output(merge_b0_cmd, universal_newlines=True, shell=True, + stderr=subprocess.STDOUT) + print(output) + except subprocess.CalledProcessError as e: + print("Error when merging b0 volumes") + exit() + + # Cleanup temporary files + for temp_file in temp_b0_files: + os.remove(temp_file) + + # Merge extracted b0 volumes with the original DW-MRI file + if os.path.exists(f"{topup_path}/b0.nii.gz"): + os.remove(f"{topup_path}/b0.nii.gz") + b0_path = f"{topup_path}/b0.nii.gz" + merge_b0_cmd = f"fslmerge -t {b0_path} {b0_part1_path} {b0_part2_path} " + try: + print(merge_b0_cmd) + output = subprocess.check_output(merge_b0_cmd, universal_newlines=True, shell=True, + stderr=subprocess.STDOUT) + print(output) + except subprocess.CalledProcessError as e: + print("Error when merging b0 volumes with the original DW-MRI") + exit() + + # Generate acqparams_alL.txt file + with open(folder_path + "/subjects/" + patient_path + '/dMRI/raw/' + 'acqparams_original.txt') as f1: + original_acq = [[float(x) for x in line2.split()] for line2 in f1] + + with open(folder_path + "/subjects/" + patient_path + '/dMRI/raw/' + 'acqparams_reverse.txt') as f2: + reverse_acq = [[float(x) for x in line2.split()] for line2 in f2] + + # Get number of b0 in b0_part2 using index_reverse.txt + with open(folder_path + "/subjects/" + patient_path + '/dMRI/raw/' + 'index_reverse.txt') as f3: + b0_num_reverse = len([int(x) for x in f3.read().strip().split()]) + + for i in range(len(b0_indices)-1): + original_acq.append([original_acq[0][0], original_acq[0][1], original_acq[0][2], original_acq[0][3]]) + for i in range(b0_num_reverse): + original_acq.append([reverse_acq[0][0], reverse_acq[0][1], reverse_acq[0][2], reverse_acq[0][3]]) + + with open(f"{topup_path}/acqparams_all.txt", "w") as f4: + f4.writelines(' '.join(str(j) for j in i) + '\n' for i in original_acq) + + bashCommand = ('export OMP_NUM_THREADS='+str(core_count)+' ; export FSLPARALLEL='+str(core_count)+ + ' ; topup --imain="' + topup_path + '/b0.nii.gz" --config="' + topupConfig + + '" --datain="' + topup_path + '/acqparams_all.txt" '+ + '--out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" '+ + '--fout="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_fout_estimate" '+ + '--iout="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_iout_estimate" '+ + '--verbose --nthr='+str(core_count) + ' ') bashcmd = bashCommand.split() print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Topup launched for patient %s \n" % p + " with bash command " + bashCommand) f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Topup launched for patient %s \n" % p + " with bash command " + bashCommand) - f.close() + process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=topup_log, stderr=subprocess.STDOUT) - # wait until topup finish + + # wait until apply topup finish output, error = process.communicate() - if not eddy: - inindex="" - first=True - for r in roi: - if first: - inindex = str(topup_index[r-1]) - else: - inindex = inindex + "," + str(topup_index[r-1]) + bashCommand2 = 'applytopup --method=jac --imain="' + imain_tot + '" --inindex=1 --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --topup="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr"' + + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": applytopup launched for patient %s \n" % p + " with bash command " + bashCommand2) - bashCommand2 = 'applytopup --imain="' + imain_tot + '" --inindex='+inindex+' --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --topup="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr"' + f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": applytopup launched for patient %s \n" % p + " with bash command " + bashCommand2) + f.close() - process2 = subprocess.Popen(bashCommand2, universal_newlines=True, shell=True, stdout=topup_log, - stderr=subprocess.STDOUT) - # wait until apply topup finish - output, error = process2.communicate() + process2 = subprocess.Popen(bashCommand2, universal_newlines=True, shell=True, stdout=topup_log, + stderr=subprocess.STDOUT) + # wait until apply topup finish + output, error = process2.communicate() else: f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") @@ -392,13 +497,12 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin synb0DisCo_starting_step = "topup" synb0DisCo(folder_path,topup_path,patient_path,starting_step=synb0DisCo_starting_step,topup=True,gpu=useGPUsynb0DisCo, static_files_path=static_files_path) - if not eddy: - bashCommand2 = 'export OMP_NUM_THREADS='+str(core_count)+' ; export FSLPARALLEL='+str(core_count)+' ; applytopup --imain="' + imain_tot + '" --inindex=1 --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --topup="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --method=jac --interp=spline --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr"' + bashCommand2 = 'export OMP_NUM_THREADS='+str(core_count)+' ; export FSLPARALLEL='+str(core_count)+' ; applytopup --imain="' + imain_tot + '" --inindex=1 --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --topup="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --method=jac --interp=spline --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr"' - process2 = subprocess.Popen(bashCommand2, universal_newlines=True, shell=True, stdout=topup_log, - stderr=subprocess.STDOUT) - # wait until apply topup finish - output, error = process2.communicate() + process2 = subprocess.Popen(bashCommand2, universal_newlines=True, shell=True, stdout=topup_log, + stderr=subprocess.STDOUT) + # wait until apply topup finish + output, error = process2.communicate() topup_log.close() @@ -410,18 +514,90 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin "%d.%b %Y %H:%M:%S") + ": End of topup for patient %s \n" % p) f.close() + + ## Compute pre eddy/biasfield mask + gc.collect() + from elikopy.utils import clean_mask + + topup_corr_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr.nii.gz' + + topup_corr, affine = load_nifti(topup_corr_path) + topup_corr_b0_ref = topup_corr[..., 0] + dwiref_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr_dwiref.nii.gz' + save_nifti(dwiref_path, topup_corr_b0_ref.astype(np.float32), affine) + topup_corr_b0_ref = None + topup_corr = None + gc.collect() + + + # Step 1 : dwi2mask + bvec_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec' + bval_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval' + dwi2mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_space-dwi_type-dwi2mask_brainmask.nii.gz' + cmd = f"dwi2mask -fslgrad {bvec_path} {bval_path} {topup_corr_path} {dwi2mask_path} -force " + import subprocess + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": dwi2mask launched for patient %s \n" % p + " with bash command " + cmd) + f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") + f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": dwi2mask launched for patient %s \n" % p + " with bash command " + cmd) + f.flush() + process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=f, stderr=subprocess.STDOUT) + # wait until dwi2mask finish + output, error = process.communicate() + f.flush() + f.close() + + gc.collect() + # Step 2 : mri_synth_strip on dwiref + mrisynthstrip_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_space-dwi_type-mrisynthstrip_brainmask.nii.gz' + cmd = f"mri_synth_strip -i {dwiref_path} -m {mrisynthstrip_path}" + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": mri_synth_strip launched for patient %s \n" % p + " with bash command " + cmd) + f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") + f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": mri_synth_strip launched for patient %s \n" % p + " with bash command " + cmd) + f.flush() + process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=f, stderr=subprocess.STDOUT) + output, error = process.communicate() + f.flush() + f.close() + + # Step 3 : median otsu on preprocess data + topup_corr, affine = load_nifti(topup_corr_path) + _, mask = median_otsu(topup_corr, median_radius=2, numpass=1, vol_idx=range(0, np.shape(topup_corr)[3]), + dilate=2) + mask = clean_mask(mask) + save_nifti( + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_type-otsu_dilate-2_brainmask.nii.gz', + mask.astype(np.float32), affine) + + # Step 4: Apply all masks to preprocess data + dwi2mask_mask, _ = load_nifti(dwi2mask_path) + mrisynthstrip_mask, _ = load_nifti(mrisynthstrip_path) + + full_mask = np.logical_or(mask, np.logical_or(dwi2mask_mask, mrisynthstrip_mask)) + full_mask = binary_dilation(full_mask, iterations=1) + + topup_corr_full_mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_brain_mask.nii.gz' + save_nifti(topup_corr_full_mask_path, + full_mask.astype(np.float32), affine) + if not eddy and not biasfield: data, affine = load_nifti( folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + "_unwarped.nii.gz") - b0_mask, mask = median_otsu(data, median_radius=2, numpass=1, vol_idx=range(0, np.shape(data)[3]), dilate=2) - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', - b0_mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - mask.astype(np.float32), affine) + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz', data.astype(np.float32), affine) shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval") shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bvec", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec") + save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask.nii.gz', full_mask.astype(np.float32), affine) + + if topup: + topup_corr_full_mask_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_brain_mask.nii.gz' + processing_mask = topup_corr_full_mask_path + else: + processing_mask = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz' if eddy and starting_state!="biasfield" and starting_state!="report": print("[" + log_prefix + "] " + datetime.datetime.now().strftime( @@ -435,45 +611,43 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin else: eddycmd = "export OMP_NUM_THREADS="+str(core_count)+" ; export FSLPARALLEL="+str(core_count)+" ; eddy" - fwhm = '10' - for _ in range(niter-1): - fwhm = fwhm + ',0' if s2v[0] != 0: slspec_path = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'slspec.txt' if slspec_gc_path is not None and os.path.isdir(slspec_gc_path): if gibbs: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --ge_slspecs="' + slspec_gc_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --ge_slspecs="' + slspec_gc_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' elif denoising: - bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --ge_slspecs="' + slspec_gc_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --ge_slspecs="' + slspec_gc_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' else: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --ge_slspecs="' + slspec_gc_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + nifti_path + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --ge_slspecs="' + slspec_gc_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' elif os.path.isfile(slspec_path): if gibbs: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --slspec="' + slspec_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --slspec="' + slspec_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' elif denoising: - bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --slspec="' + slspec_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --slspec="' + slspec_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' else: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --slspec="' + slspec_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + nifti_path + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --slspec="' + slspec_path + '" --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' else: if gibbs: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' elif denoising: - bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' else: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + nifti_path + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --mporder=' + str(s2v[0]) + ' --s2v_niter=' + str(s2v[1]) + ' --s2v_lambda=' + str(s2v[2]) + ' --s2v_interp=' + s2v[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' else: if gibbs: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/gibbs/' + patient_path + '_gibbscorrected.nii.gz" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' elif denoising: - bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + denoising_path + '/' + patient_path + denoising_ext + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' else: - bashCommand = eddycmd + ' --imain="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz" --mask="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_binary_mask.nii.gz" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' + bashCommand = eddycmd + ' --imain="' + nifti_path + '" --mask="' + processing_mask + '" --acqp="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --index="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt" --bvecs="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bvec" --bvals="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.bval" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + '_eddy_corr" --verbose --cnr_maps --residuals --repol=' + str(olrep[0]) + ' --ol_nstd=' + str(olrep[1]) + ' --ol_nvox=' + str(olrep[2]) + ' --ol_type=' + olrep[3] + ' --fwhm=' + fwhm + ' --niter=' + str(niter) + ' --slm=linear' if topup: bashCommand = bashCommand + ' --topup="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate"' import subprocess + bashCommand = bashCommand + " " + eddy_additional_arg bashcmd = bashCommand.split() print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Eddy launched for patient %s \n" % p + " with bash command " + bashCommand) @@ -497,15 +671,12 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin "%d.%b %Y %H:%M:%S") + ": End of eddy for patient %s \n" % p) f.close() - data, affine = load_nifti( - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + "_eddy_corr.nii.gz") - b0_mask, mask = median_otsu(data, median_radius=2, numpass=1, vol_idx=range(0, np.shape(data)[3]), dilate=2) - if not biasfield: - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', - b0_mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - mask.astype(np.float32), affine) + data, affine = load_nifti( + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/eddy/' + patient_path + "_eddy_corr.nii.gz") + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz', + data.astype(np.float32), affine) + shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval") shutil.copyfile( @@ -527,33 +698,16 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin elif denoising: inputImage = denoising_path + '/' + patient_path + denoising_ext else: - inputImage = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/bet/' + patient_path + '_mask.nii.gz' - - #bashCommand= "N4BiasFieldCorrection -i " + inputImage + " -o [" + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + "_biasfield_corr.nii.gz, " + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + "_biasfield_est.nii.gz] -d 4" - + inputImage = nifti_path - bashCommand = 'export OMP_NUM_THREADS='+str(core_count)+' ; dwibiascorrect ants {} {} -fslgrad {} {} -mask {} -bias {} -scratch {} -force -info -nthreads {}'.format( + bashCommand = 'export OMP_NUM_THREADS='+str(core_count)+' ; dwibiascorrect ants {} {} -fslgrad {} {} -bias {} -scratch {} -force -info -nthreads {}'.format( inputImage, folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + "_biasfield_corr.nii.gz", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval", - folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + "_biasfield_est.nii.gz", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/tmp', core_count) - '''bashCommand = 'export OMP_NUM_THREADS=' + str( - core_count) + ' ; dwibiascorrect ants {} {} -fslgrad {} {} -mask {} -bias {} -scratch {} -force -info -nthreads {} -ants.b {} -ants.c {} '.format( - inputImage, - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + "_biasfield_corr.nii.gz", - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec", - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval", - folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + "_biasfield_est.nii.gz", - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/tmp', - core_count, - biasfield_bsplineFitting, - biasfield_convergence)''' - import subprocess bashcmd = bashCommand.split() print("[" + log_prefix + "] " + datetime.datetime.now().strftime( @@ -571,19 +725,9 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin output, error = process.communicate() biasfield_log.close() - #shutil.rmtree(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/tmp', ignore_errors=True) - shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + '_biasfield_corr.nii.gz', - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz') + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz') - data, affine = load_nifti( - folder_path + '/subjects/' + patient_path + '/dMRI/preproc/biasfield/' + patient_path + '_biasfield_corr.nii.gz') - b0_mask, mask = median_otsu(data, median_radius=2, numpass=1, vol_idx=range(0, np.shape(data)[3]), dilate=2) - - save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', - b0_mask.astype(np.float32), affine) - save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', - mask.astype(np.float32), affine) if not eddy: shutil.copyfile(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval", @@ -607,7 +751,85 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin f.write(output) f.truncate() + # Generate b0 ref: + preproc, affine = load_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz') + b0_ref = preproc[..., 0] + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dwiref.nii.gz', b0_ref.astype(np.float32), affine) + + preproc = None + b0_ref = None + gc.collect() + #### Generate final mask #### + from elikopy.utils import clean_mask + + # Step 1 : dwi2mask + preproc_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz' + bvec_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.bvec' + bval_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.bval' + dwi2mask_path = folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_space-dwi_type-dwi2mask_brainmask.nii.gz' + cmd = f"dwi2mask -fslgrad {bvec_path} {bval_path} {preproc_path} {dwi2mask_path} -force" + import subprocess + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": dwi2mask launched for patient %s \n" % p + " with bash command " + cmd) + f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") + f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": dwi2mask launched for patient %s \n" % p + " with bash command " + cmd) + f.flush() + process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=f, stderr=subprocess.STDOUT) + # wait until dwi2mask finish + output, error = process.communicate() + f.flush() + f.close() + + # Step 2 : mri_synth_strip on dwiref + dwiref_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dwiref.nii.gz' + mrisynthstrip_path = folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_space-dwi_type-mrisynthstrip_brainmask.nii.gz' + cmd = f"mri_synth_strip -i {dwiref_path} -m {mrisynthstrip_path}" + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": mri_synth_strip launched for patient %s \n" % p + " with bash command " + cmd) + f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") + f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": mri_synth_strip launched for patient %s \n" % p + " with bash command " + cmd) + f.flush() + process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=f, stderr=subprocess.STDOUT) + output, error = process.communicate() + f.flush() + f.close() + + # Step 3 : median otsu on preprocess data + preproc, affine = load_nifti( + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc_nomask.nii.gz') + preproc_masked, mask = median_otsu(preproc, median_radius=2, numpass=1, vol_idx=range(0, np.shape(preproc)[3]), dilate=2) + mask = clean_mask(mask) + save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_type-otsu_dilate-2_brainmask.nii.gz', + mask.astype(np.float32), affine) + + # Step 4: Apply all masks to preprocess data + dwi2mask_mask, _ = load_nifti(dwi2mask_path) + mrisynthstrip_mask, _ = load_nifti(mrisynthstrip_path) + + all_mask = mask + dwi2mask_mask + mrisynthstrip_mask + full_mask = np.zeros_like(all_mask) + full_mask[all_mask >= 2] = 1 + full_mask = clean_mask(full_mask) + + full_mask_inclusive = np.logical_or(mask, np.logical_or(dwi2mask_mask, mrisynthstrip_mask)) + full_mask_inclusive = clean_mask(full_mask_inclusive) + # dilate mask + full_mask_inclusive = binary_dilation(full_mask_inclusive, iterations=1) + preproc_masked = preproc * full_mask_inclusive[..., np.newaxis] + + save_nifti(folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz', + preproc_masked.astype(np.float32), affine) + save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask.nii.gz', + full_mask.astype(np.float32), affine) + save_nifti(folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_brain_mask_dilated.nii.gz', + full_mask_inclusive.astype(np.float32), affine) + + + if not report: + update_status(folder_path, patient_path, "preproc") return print("[" + log_prefix + "] " + datetime.datetime.now().strftime( @@ -626,11 +848,10 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin matplotlib.use('Agg') import matplotlib.pyplot as plt import dipy.reconst.dti as dti - from dipy.align.imaffine import (AffineMap, MutualInformationMetric, AffineRegistration) + from dipy.align.imaffine import MutualInformationMetric, AffineRegistration from dipy.align.transforms import RigidTransform3D from dipy.segment.mask import segment_from_cfa from dipy.segment.mask import bounding_box - from scipy.ndimage.morphology import binary_dilation from os.path import isdir from skimage import measure from fpdf import FPDF @@ -658,8 +879,13 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin reslice_data, reslice_affine = load_nifti(preproc_path + "reslice/" + patient_path + "_reslice.nii.gz") # bet data (stage to compare with final) - bet_data, bet_affine = load_nifti(preproc_path + "bet/" + patient_path + "_mask.nii.gz") mask_raw, mask_raw_affine = load_nifti(preproc_path + "bet/" + patient_path + "_binary_mask.nii.gz") + if bool_reslice: + bet_data = reslice_data * mask_raw[..., np.newaxis] + bet_affine = reslice_affine + else: + bet_data = raw_data * mask_raw[..., np.newaxis] + bet_affine = raw_affine # mppca data bool_mppca = isdir(os.path.join(preproc_path, "mppca")) @@ -688,6 +914,7 @@ def preproc_solo(folder_path, p, reslice=False, reslice_addSlice=False, denoisin # eddy data (=preproc total) bool_eddy = isdir(os.path.join(preproc_path, "eddy")) preproc_data, preproc_affine = load_nifti(preproc_path + patient_path + "_dmri_preproc.nii.gz") + preproc_nomask_data, preproc_nomask_affine = load_nifti(preproc_path + patient_path + "_dmri_preproc_nomask.nii.gz") mask_preproc, mask_preproc_affine = load_nifti(mask_path) bvals, bvecs = read_bvals_bvecs(preproc_path + patient_path + "_dmri_preproc.bval", preproc_path + patient_path + "_dmri_preproc.bvec") @@ -940,6 +1167,7 @@ def mask_to_coor(mask): tenmodel = dti.TensorModel(gtab_raw) _, maskSNR = median_otsu(raw_data, vol_idx=[0]) + maskSNR = clean_mask(maskSNR) tensorfit = tenmodel.fit(raw_data, mask=maskSNR) threshold = (0.5, 1, 0, 0.3, 0, 0.3) @@ -1174,8 +1402,8 @@ def mask_to_coor(mask): transform = RigidTransform3D() # =========================================================== - S0s_raw = bet_data[:, :, :, gtab_raw.b0s_mask] - S0s_preproc = preproc_data[:, :, :, gtab_preproc.b0s_mask] + S0s_raw = raw_data[:, :, :, gtab_raw.b0s_mask] + S0s_preproc = preproc_nomask_data[:, :, :, gtab_preproc.b0s_mask] volume = [] motion_raw = [] motion_proc = [] @@ -1190,16 +1418,20 @@ def mask_to_coor(mask): "%d.%b %Y %H:%M:%S") + ": Starting Eddy QC_REG for patient %s with multicore enabled \n" % p) f.close() from concurrent.futures import ProcessPoolExecutor - for i in range(np.shape(preproc_data)[3]): - #print('current iteration : ', i, end="\r") - volume.append(i) - def motion_raw_transform(i): - return affreg.optimize(np.copy(S0s_raw[..., 0]), np.copy(bet_data[..., i]), transform, params0, bet_affine, - bet_affine, ret_metric=True)[1] - with ProcessPoolExecutor(max_workers=int(core_count*0.8)) as executor: - for r in executor.map(motion_raw_transform, range(np.shape(preproc_data)[3])): - motion_raw.append(r) + volume = list(range(np.shape(preproc_data)[3])) + # Process motion_raw using the wrapper function + with ProcessPoolExecutor(max_workers=int(core_count * 0.8)) as executor: + motion_raw = list(executor.map( + motion_transform_wrapper, + volume, + [S0s_raw[..., 0]] * len(volume), + [raw_data] * len(volume), + [params0] * len(volume), + [raw_affine] * len(volume), + [affreg] * len(volume) + )) + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": End of QC_REG motion_raw for patient %s" % p) @@ -1208,12 +1440,17 @@ def motion_raw_transform(i): "%d.%b %Y %H:%M:%S") + ": End of QC_REG motion_raw for patient %s \n" % p) f.close() - def motion_preproc_transform(i): - return affreg.optimize(np.copy(S0s_preproc[..., 0]), np.copy(preproc_data[..., i]), transform, params0, - preproc_affine, preproc_affine, ret_metric=True)[1] - with ProcessPoolExecutor(max_workers=int(core_count*0.8)) as executor: - for r in executor.map(motion_preproc_transform, range(np.shape(preproc_data)[3])): - motion_proc.append(r) + # Process motion_proc using the wrapper function + with ProcessPoolExecutor(max_workers=int(core_count * 0.8)) as executor: + motion_proc = list(executor.map( + motion_transform_wrapper, + volume, + [S0s_preproc[..., 0]] * len(volume), + [preproc_nomask_data] * len(volume), + [params0] * len(volume), + [preproc_nomask_affine] * len(volume), + [affreg] * len(volume) + )) print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": End of QC_REG motion_preproc for patient %s" % p) @@ -1232,20 +1469,20 @@ def motion_preproc_transform(i): #print('current iteration : ', i, end="\r") volume.append(i) - rigid = affreg.optimize(np.copy(S0s_raw[..., 0]), np.copy(bet_data[..., i]), transform, params0, bet_affine, - bet_affine, ret_metric=True) + rigid = affreg.optimize(np.copy(S0s_raw[..., 0]), np.copy(raw_data[..., i]), transform, params0, raw_affine, + raw_affine, ret_metric=True) motion_raw.append(rigid[1]) - rigid = affreg.optimize(np.copy(S0s_preproc[..., 0]), np.copy(preproc_data[..., i]), transform, params0, - preproc_affine, preproc_affine, ret_metric=True) + rigid = affreg.optimize(np.copy(S0s_preproc[..., 0]), np.copy(preproc_nomask_data[..., i]), transform, params0, + preproc_nomask_affine, preproc_nomask_affine, ret_metric=True) motion_proc.append(rigid[1]) - if int(i/np.shape(preproc_data)[3])%5==0: + if int(i/np.shape(preproc_nomask_data)[3])%5==0: print("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Eddy QC_REG : Volume " + str(i) + "/" + str(np.shape(preproc_data)[3]) + " \n") + "%d.%b %Y %H:%M:%S") + ": Eddy QC_REG : Volume " + str(i) + "/" + str(np.shape(preproc_nomask_data)[3]) + " \n") f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Eddy QC_REG : Volume " + str(i) + "/" + str(np.shape(preproc_data)[3]) + " \n") + "%d.%b %Y %H:%M:%S") + ": Eddy QC_REG : Volume " + str(i) + "/" + str(np.shape(preproc_nomask_data)[3]) + " \n") f.close() # ============================================================ @@ -1381,14 +1618,22 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, "preproc") + -def dti_solo(folder_path, p, maskType="brain_mask_dilated", report=True): +def dti_solo(folder_path, p, maskType="brain_mask_dilated", + use_all_shells: bool = False, report=True): """ - Computes the DTI metrics for a single subject. The outputs are available in the directories /subjects//dMRI/dti/. + Computes the DTI metrics for a single subject. The outputs are available in + the directories /subjects//dMRI/dti/. :param folder_path: the path to the root directory. :param p: The name of the patient. - :param use_wm_mask: If true a white matter mask is used. The white_matter() function needs to already be applied. default=False + :param use_wm_mask: If true a white matter mask is used. The white_matter() + function needs to already be applied. default=False + :param use_all_shells: Boolean. DTI will use all shells available, not just + shells <= 2000, this will cause a more defined white matter at the cost of + an erronous estimation of the CSF. The default is False. """ log_prefix = "DTI SOLO" print("[" + log_prefix + "] " + datetime.datetime.now().strftime( @@ -1421,10 +1666,19 @@ def dti_solo(folder_path, p, maskType="brain_mask_dilated", report=True): bvals, bvecs = read_bvals_bvecs( folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval", folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec") + # Remove shells >2000================================ + if not use_all_shells: + indexes = np.argwhere(bvals < 2000+10) + indexes = indexes.squeeze() + bvals = bvals[indexes] + bvecs = bvecs[indexes] + data = data[..., indexes] + print('Warning: removing shells above b=2000 for DTI. To disable this, ' + + 'activate the use_all_shells option.') # create the model=================================== b0_threshold = np.min(bvals)+10 b0_threshold = max(50, b0_threshold) - gtab = gradient_table(bvals, bvecs,b0_threshold=b0_threshold) + gtab = gradient_table(bvals, bvecs, b0_threshold=b0_threshold) tenmodel = dti.TensorModel(gtab) tenfit = tenmodel.fit(data, mask=mask) # FA ================================================ @@ -1601,9 +1855,9 @@ def print_page(self, images): shutil.copyfile(qc_path + '/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: """Merge with QC of preproc"""; - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_dti.pdf') @@ -1618,6 +1872,8 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, "dti") + def white_mask_solo(folder_path, p, maskType, corr_gibbs=True, core_count=1, debug=False): """ Computes a white matter mask for a single subject based on the T1 structural image or on the anisotropic power map @@ -1661,6 +1917,26 @@ def white_mask_solo(folder_path, p, maskType, corr_gibbs=True, core_count=1, deb f.close() # Read the moving image ==================================== anat_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1.nii.gz' + if os.path.exists(anat_path): + brain_extracted_T1_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_brain.nii.gz' + brain_extracted_mask_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_brain_mask.nii.gz' + if not os.path.exists(brain_extracted_T1_path): + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": T1 Brain extraction for patient %s (mri_synth_strip) \n" % p) + cmd = f"mri_synth_strip -i {anat_path} -o {brain_extracted_T1_path} -m {brain_extracted_mask_path} " + print("[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": " + cmd) + import subprocess + process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + output, error = process.communicate() + print(output) + print(error) + + if not os.path.exists(brain_extracted_T1_path): + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": T1 Brain extraction failed for patient %s \n" % p) + # print to std err + print("T1 Brain extraction failed for patient %s" % p, file=sys.stderr) wm_log = open(folder_path + '/subjects/' + patient_path + "/masks/wm_logs.txt", "a+") @@ -1672,7 +1948,7 @@ def white_mask_solo(folder_path, p, maskType, corr_gibbs=True, core_count=1, deb "%d.%b %Y %H:%M:%S") + ": Beginning of gibbs for patient %s \n" % p) data_gibbs, affine_gibbs = load_nifti(anat_path) - data_gibbs = gibbs_removal(data_gibbs,num_threads=core_count) + data_gibbs = gibbs_removal(data_gibbs,num_processes=core_count) corrected_gibbs_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_gibbscorrected.nii.gz' save_nifti(corrected_gibbs_path, data_gibbs.astype(np.float32), affine_gibbs) @@ -1688,28 +1964,10 @@ def white_mask_solo(folder_path, p, maskType, corr_gibbs=True, core_count=1, deb input_bet_path = anat_path # anat_path = folder_path + '/anat/' + patient_path + '_T1.nii.gz' - bet_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_brain.nii.gz' - bashCommand = 'export OMP_NUM_THREADS='+str(core_count)+' ; export FSLPARALLEL='+str(core_count)+' ; bet ' + input_bet_path + ' ' + bet_path + ' -B' - if debug: - bashCommand = bashCommand + " -d" - bashcmd = bashCommand.split() - - wm_log.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Bet launched for patient %s \n" % p + " with bash command " + bashCommand) - print("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Bet launched for patient %s \n" % p + " with bash command " + bashCommand) - - process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=wm_log,stderr=subprocess.STDOUT) - output, error = process.communicate() - - wm_log.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": End of BET for patient %s \n" % p) - print("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": End of BET for patient %s \n" % p) - + brain_extracted_T1_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_brain.nii.gz' wm_log.close() - moving_data, moving_affine = load_nifti(bet_path) + moving_data, moving_affine = load_nifti(brain_extracted_T1_path) moving = moving_data moving_grid2world = moving_affine # Read the static image ==================================== @@ -1827,7 +2085,7 @@ def white_mask_solo(folder_path, p, maskType, corr_gibbs=True, core_count=1, deb qball_model = shm.QballModel(gtab, 8) if core_count > 1: peaks = dp.peaks_from_model(model=qball_model, data=data, relative_peak_threshold=.5, min_separation_angle=25, - sphere=sphere, mask=mask, parallel=True, nbr_processes=core_count) + sphere=sphere, mask=mask, parallel=True, num_processes=core_count) else: peaks = dp.peaks_from_model(model=qball_model, data=data, relative_peak_threshold=.5, min_separation_angle=25, @@ -1870,7 +2128,7 @@ def white_mask_solo(folder_path, p, maskType, corr_gibbs=True, core_count=1, deb matplotlib.use('Agg') import matplotlib.pyplot as plt from fpdf import FPDF - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger T1_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1.nii.gz' T1gibbs_path = folder_path + '/subjects/' + patient_path + "/T1/" + patient_path + '_T1_gibbscorrected.nii.gz' @@ -2019,7 +2277,7 @@ def print_page(self, images): shutil.copyfile(qc_path + '/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_wm.pdf') @@ -2036,6 +2294,7 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, maskType) def noddi_solo(folder_path, p, maskType="brain_mask_dilated", lambda_iso_diff=3.e-9, lambda_par_diff=1.7e-9, use_amico=False,core_count=1): """ Computes the NODDI metrics for a single. The outputs are available in the directories /subjects//dMRI/microstructure/noddi/. @@ -2267,9 +2526,9 @@ def print_page(self, images): shutil.copyfile(qc_path + '/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: """Merge with QC of preproc"""; - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_noddi.pdf') @@ -2284,6 +2543,7 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, "noddi") def noddi_amico_solo(folder_path, p, maskType="brain_mask_dilated"): """ Perform noddi amico on a single subject and store the data in the /subjects//dMRI/microstructure/noddi_amico/. @@ -2342,6 +2602,8 @@ def noddi_amico_solo(folder_path, p, maskType="brain_mask_dilated"): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, "noddi_amico") + def diamond_solo(folder_path, p, core_count=4, reportOnly=False, maskType="brain_mask_dilated",customDiamond=""): """Computes the DIAMOND metrics for a single subject. The outputs are available in the directories /subjects//dMRI/microstructure/diamond/. @@ -2566,9 +2828,9 @@ def print_page(self, images): shutil.copyfile(qc_path + '/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: """Merge with QC of preproc"""; - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_diamond.pdf') @@ -2583,8 +2845,11 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, "diamond") -def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_dilated", report=True, csf_mask=True, ear_mask=False, peaksType="MSMT-CSD", mfdir=None): +def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_dilated", + report=True, csf_mask=True, ear_mask=False, peaksType="MSMT-CSD", + mfdir=None, output_filename: str = ""): """Perform microstructure fingerprinting and store the data in the /subjects//dMRI/microstructure/mf/. :param folder_path: the path to the root directory. @@ -2593,7 +2858,9 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ :param CSD_bvalue: If the DIAMOND outputs are not available, the fascicles directions are estimated using a CSD with the images at the b-values specified in this argument. default=None :param core_count: Define the number of available core. default=1 :param use_wm_mask: If true a white matter mask is used. The white_matter() function needs to already be applied. default=False + :param output_filename: str. Specify output filename. """ + log_prefix = "MF SOLO" print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Beginning of individual microstructure fingerprinting processing for patient %s \n" % p, flush = True) @@ -2626,13 +2893,8 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ # imports import microstructure_fingerprinting as mf - import microstructure_fingerprinting.mf_utils as mfu from dipy.io.image import load_nifti, save_nifti from dipy.io import read_bvals_bvecs - from dipy.core.gradients import gradient_table - from dipy.reconst.csdeconv import (ConstrainedSphericalDeconvModel, auto_response) - from dipy.direction import peaks_from_model - from dipy.data import default_sphere # load the data data, affine = load_nifti( @@ -2649,29 +2911,23 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + "_brain_mask_dilated.nii.gz") # compute numfasc and peaks - if os.path.exists(diamond_path) and peaksType=="DIAMOND": + if os.path.exists(diamond_path) and peaksType == "DIAMOND": f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Diamond Path found! MF will be based on diamond \n") tensor_files0 = folder_path + '/subjects/' + patient_path + "/dMRI/microstructure/diamond/" + patient_path + '_diamond_t0.nii.gz' tensor_files1 = folder_path + '/subjects/' + patient_path + "/dMRI/microstructure/diamond/" + patient_path + '_diamond_t1.nii.gz' fracs_file = folder_path + '/subjects/' + patient_path + "/dMRI/microstructure/diamond/" + patient_path + '_diamond_fractions.nii.gz' - (peaks, numfasc) = mf.cleanup_2fascicles(frac1=None, frac2=None, mu1=tensor_files0, mu2=tensor_files1, - peakmode='tensor', mask=mask, frac12=fracs_file) + (peaks, numfasc) = mf.cleanup_2fascicles(frac1=None, frac2=None, + mu1=tensor_files0, mu2=tensor_files1, + peakmode='tensor', mask=mask, + frac12=fracs_file) + color_order = 'rgb' elif peaksType == "CSD": - if os.path.exists(odf_csd_path + '/' + patient_path + '_CSD_peaks.nii.gz') and os.path.exists(odf_csd_path + '/' + patient_path + '_CSD_values.nii.gz'): - csd_peaks_peak_dirs, _ = load_nifti(odf_csd_path + '/' + patient_path + '_CSD_peaks.nii.gz') - csd_peaks_peak_values, _ = load_nifti(odf_csd_path + '/' + patient_path + '_CSD_values.nii.gz') - else: - odf_csd_solo(folder_path, patient_path, core_count=core_count) - csd_peaks_peak_dirs, _ = load_nifti(odf_csd_path + '/' + patient_path + '_CSD_peaks.nii.gz') - csd_peaks_peak_values, _ = load_nifti(odf_csd_path + '/' + patient_path + '_CSD_values.nii.gz') - # peaks=csd_peaks_peak_dirs - numfasc_2 = np.sum(csd_peaks_peak_values[:, :, :, 0] > 0.15) + np.sum( - csd_peaks_peak_values[:, :, :, 1] > 0.15) - print("Approximate number of non empty voxel: ", numfasc_2) - - # peaks = csd_peaks.peak_dirs - # peaks = np.reshape(peaks, (peaks.shape[0], peaks.shape[1], peaks.shape[2], 6), order='C') + csd_peaks_peak_dirs, _ = load_nifti(odf_csd_path + '/' + patient_path + '_CSD_peaks.nii.gz') + csd_peaks_peak_values, _ = load_nifti(odf_csd_path + '/' + patient_path + '_CSD_values.nii.gz') + numfasc_2 = (np.sum(csd_peaks_peak_values[:, :, :, 0] > 0.15) + + np.sum(csd_peaks_peak_values[:, :, :, 1] > 0.15)) + print("Approximate number of non empty voxel: ", numfasc_2, flush=True) normPeaks0 = csd_peaks_peak_dirs[..., 0, :] normPeaks1 = csd_peaks_peak_dirs[..., 1, :] @@ -2686,28 +2942,42 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ mu2 = normPeaks1 frac1 = csd_peaks_peak_values[..., 0] frac2 = csd_peaks_peak_values[..., 1] - (peaks, numfasc) = mf.cleanup_2fascicles(frac1=frac1, frac2=frac2, mu1=mu1, mu2=mu2, peakmode='peaks', + (peaks, numfasc) = mf.cleanup_2fascicles(frac1=frac1, frac2=frac2, + mu1=mu1, mu2=mu2, + peakmode='peaks', mask=mask, frac12=None) - elif peaksType=="MSMT-CSD": - if os.path.exists(odf_msmtcsd_path + '/' + patient_path + "_MSMT-CSD_peaks.nii.gz") and os.path.exists(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks_amp.nii.gz'): - msmtcsd_peaks_peak_dirs, _ = load_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks.nii.gz') - msmtcsd_peaks_peak_values, _ = load_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks_amp.nii.gz') - else: - odf_msmtcsd_solo(folder_path, patient_path, core_count=core_count) - msmtcsd_peaks_peak_dirs, _ = load_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks.nii.gz') - msmtcsd_peaks_peak_values, _ = load_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks_amp.nii.gz') + color_order = 'rgb' + elif peaksType == "MSMT-CSD": + + from elikopy.utils import get_acquisition_view + from scipy.linalg import polar + + msmtcsd_peaks_peak_dirs, _ = load_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks.nii.gz') + msmtcsd_peaks_peak_values, _ = load_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks_amp.nii.gz') - numfasc_2 = np.sum(msmtcsd_peaks_peak_values[:, :, :, 0] > 0.15) + np.sum( - msmtcsd_peaks_peak_values[:, :, :, 1] > 0.15) - print("Approximate number of non empty voxel: ", numfasc_2) + numfasc_2 = (np.sum(msmtcsd_peaks_peak_values[:, :, :, 0] > 0.15) + + np.sum(msmtcsd_peaks_peak_values[:, :, :, 1] > 0.15)) + print("Approximate number of non empty voxel: ", numfasc_2, flush=True) - # peaks = csd_peaks.peak_dirs - # peaks = np.reshape(peaks, (peaks.shape[0], peaks.shape[1], peaks.shape[2], 6), order='C') - msmtcsd_peaks_peak_dirs[..., 1] = -msmtcsd_peaks_peak_dirs[..., 1] - msmtcsd_peaks_peak_dirs[..., 2] = -msmtcsd_peaks_peak_dirs[..., 2] + # Polar decomposition + u, _ = polar(affine[0:3, 0:3]) - msmtcsd_peaks_peak_dirs[..., 4] = -msmtcsd_peaks_peak_dirs[..., 4] - msmtcsd_peaks_peak_dirs[..., 5] = -msmtcsd_peaks_peak_dirs[..., 5] + # Number of fixels + K = int(msmtcsd_peaks_peak_dirs.shape[-1]/3) + + # Rotate peaks to go from Mrtrix convention to Python + for k in range(K): + msmtcsd_peaks_peak_dirs[..., 3*k:3*k+3] = msmtcsd_peaks_peak_dirs[..., 3*k:3*k+3] @ u + + # TODO : Automate RGB selection to all views + view = get_acquisition_view(affine) + if view == 'axial': + color_order = 'rgb' + elif view == 'sagittal': + color_order = 'brg' + else: + color_order = 'rgb' + print("Warning: No correction found for the RGB colors of the current acquisition view. Defaulting to axial (RGB).") normPeaks0 = msmtcsd_peaks_peak_dirs[..., 0:3] normPeaks1 = msmtcsd_peaks_peak_dirs[..., 3:6] @@ -2722,7 +2992,9 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ mu2 = normPeaks1 frac1 = msmtcsd_peaks_peak_values[..., 0] frac2 = msmtcsd_peaks_peak_values[..., 1] - (peaks, numfasc) = mf.cleanup_2fascicles(frac1=frac1, frac2=frac2, mu1=mu1, mu2=mu2, peakmode='peaks', + (peaks, numfasc) = mf.cleanup_2fascicles(frac1=frac1, frac2=frac2, + mu1=mu1, mu2=mu2, + peakmode='peaks', mask=mask, frac12=None) f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( @@ -2733,36 +3005,44 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Beginning of fitting\n") print("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Beginning of fitting\n") + "%d.%b %Y %H:%M:%S") + ": Beginning of fitting\n", flush=True) import time parallel = False if core_count == 1 else True start = time.time() # Fit to data: - MF_fit = mf_model.fit(data, mask, numfasc, peaks=peaks, bvals=bvals, bvecs=bvecs, csf_mask=csf_mask, - ear_mask=ear_mask, verbose=3, parallel=parallel)#, cpus=core_count) + MF_fit = mf_model.fit(data, mask, numfasc, peaks=peaks, bvals=bvals, + bvecs=bvecs, csf_mask=csf_mask, ear_mask=ear_mask, + verbose=3, parallel=parallel) # , cpus=core_count) end = time.time() stats_header = "patient_id, elapsed_time, core_count" stats_val = p + ", " + str(end - start) + ", " + str(core_count) - print(stats_header) - print(stats_val) + print(stats_header, flush=True) + print(stats_val, flush=True) f.write(stats_header) f.write(stats_val) f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": End of fitting\n") - + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": End of fitting\n", flush=True) # extract info frac_f0 = MF_fit.frac_f0 fvf_tot = MF_fit.fvf_tot MSE = MF_fit.MSE R2 = MF_fit.R2 + if len(output_filename) > 0: + filename = '_mf_'+output_filename + else: + filename = '_mf' - MF_fit.write_nifti(mf_path + '/' + patient_path + '_mf.nii.gz', affine=affine) + MF_fit.write_nifti(mf_path + '/' + patient_path + filename+'.nii.gz', affine=affine) + print("[" + log_prefix + "] " + datetime.datetime.now().strftime( + "%d.%b %Y %H:%M:%S") + ": Saving of unravel pseudotensor dic\n", flush = True) # Export pseudo tensor frac = 0 frac_list = [] @@ -2770,50 +3050,49 @@ def mf_solo(folder_path, p, dictionary_path, core_count=1, maskType="brain_mask_ fvf_list = [] import nibabel as nib from elikopy.utils import peak_to_tensor - while os.path.exists(mf_path + '/' + patient_path + '_mf_peak_f'+str(frac)+'.nii.gz') and os.path.exists(mf_path + '/' + patient_path + '_mf_frac_f' + str(frac) + '.nii.gz'): - peaks_path = mf_path + '/' + patient_path + '_mf_peak_f' + str(frac) + '.nii.gz' - frac_path = mf_path + '/' + patient_path + '_mf_frac_f' + str(frac) + '.nii.gz' - fvf_path = mf_path + '/' + patient_path + '_mf_fvf_f' + str(frac) + '.nii.gz' + while os.path.exists(mf_path + '/' + patient_path + filename+'_peak_f'+str(frac)+'.nii.gz') and os.path.exists(mf_path + '/' + patient_path + filename+'_frac_f' + str(frac) + '.nii.gz'): + peaks_path = mf_path + '/' + patient_path + filename+'_peak_f' + str(frac) + '.nii.gz' + frac_path = mf_path + '/' + patient_path + filename+'_frac_f' + str(frac) + '.nii.gz' + fvf_path = mf_path + '/' + patient_path + filename+'_fvf_f' + str(frac) + '.nii.gz' img_mf_peaks = nib.load(peaks_path) img_mf_frac = nib.load(frac_path) hdr = img_mf_peaks.header pixdim = hdr['pixdim'][1:4] t = peak_to_tensor(img_mf_peaks.get_fdata(),norm=None,pixdim=pixdim) t_normed = peak_to_tensor(img_mf_peaks.get_fdata(), norm=img_mf_frac.get_fdata(),pixdim=pixdim) - hdr['dim'][0] = 5 # 4 scalar, 5 vector - hdr['dim'][4] = 1 # 3 - hdr['dim'][5] = 6 # 1 + hdr['dim'][0] = 5 # 4 scalar, 5 vector + hdr['dim'][4] = 1 # 3 + hdr['dim'][5] = 6 # 1 hdr['regular'] = b'r' hdr['intent_code'] = 1005 - save_nifti(mf_path + '/' + patient_path + '_mf_peak_f' + str(frac) + '_pseudoTensor.nii.gz', t, img_mf_peaks.affine, hdr) - save_nifti(mf_path + '/' + patient_path + '_mf_peak_f' + str(frac) + '_pseudoTensor_normed.nii.gz', t_normed, img_mf_peaks.affine, hdr) - frac = frac + 1 + save_nifti(mf_path + '/' + patient_path + filename+'_peak_f' + str(frac) + '_pseudoTensor.nii.gz', t, img_mf_peaks.affine, hdr) + save_nifti(mf_path + '/' + patient_path + filename+'_peak_f' + str(frac) + '_pseudoTensor_normed.nii.gz', t_normed, img_mf_peaks.affine, hdr) - import TIME.utils - RGB_peak = TIME.utils.peaks_to_RGB([img_mf_peaks.get_fdata()]) - save_nifti(mf_path + '/' + patient_path + '_mf_peak_f' + str(frac) + '_RGB.nii.gz', RGB_peak, img_mf_frac.affine) + import unravel.utils + RGB_peak = unravel.utils.peaks_to_RGB(img_mf_peaks.get_fdata(), order=color_order) + save_nifti(mf_path + '/' + patient_path + filename+'_peak_f' + str(frac) + '_RGB.nii.gz', RGB_peak, img_mf_frac.affine) peaks_list.append(img_mf_peaks.get_fdata()) - - RGB_peak_frac = TIME.utils.peaks_to_RGB([img_mf_peaks.get_fdata()], [img_mf_frac.get_fdata()]) - save_nifti(mf_path + '/' + patient_path + '_mf_peak_f' + str(frac) + '_RGB_frac.nii.gz', RGB_peak_frac, img_mf_frac.affine) frac_list.append(img_mf_frac.get_fdata()) if os.path.exists(fvf_path): img_mf_fvf = nib.load(fvf_path) fvf_list.append(img_mf_fvf.get_fdata()) - RGB_peak_frac_fvf = TIME.utils.peaks_to_RGB([img_mf_peaks.get_fdata()], [img_mf_frac.get_fdata()], [img_mf_fvf.get_fdata()]) - save_nifti(mf_path + '/' + patient_path + '_mf_peak_f' + str(frac) + '_RGB_frac_fvf.nii.gz', RGB_peak_frac_fvf, img_mf_frac.affine) + frac = frac + 1 + + peaks=np.stack(peaks_list,axis=-1) + frac=np.stack(frac_list,axis=-1) + fvf=np.stack(fvf_list,axis=-1) if len(frac_list) > 0 and len(peaks_list) > 0: - RGB_peaks_frac = TIME.utils.peaks_to_RGB(peaks_list, frac_list) - save_nifti(mf_path + '/' + patient_path + '_mf_peak_tot_RGB_frac.nii.gz', RGB_peaks_frac, img_mf_frac.affine) + RGB_peaks_frac = unravel.utils.peaks_to_RGB(peaks, frac, order=color_order) + save_nifti(mf_path + '/' + patient_path + filename+'_peak_tot_RGB_frac.nii.gz', RGB_peaks_frac, img_mf_frac.affine) if len(frac_list) > 0 and len(peaks_list) > 0 and len(fvf_list)>0: - RGB_peaks_frac_fvf = TIME.utils.peaks_to_RGB(peaks_list, frac_list, fvf_list) - save_nifti(mf_path + '/' + patient_path + '_mf_peak_tot_RGB_frac_fvf.nii.gz', RGB_peaks_frac_fvf, img_mf_frac.affine) + RGB_peaks_frac_fvf = unravel.utils.peaks_to_RGB(peaks, frac, fvf, order=color_order) + save_nifti(mf_path + '/' + patient_path + filename+'_peak_tot_RGB_frac_fvf.nii.gz', RGB_peaks_frac_fvf, img_mf_frac.affine) print("[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Starting quality control %s \n" % p) + "%d.%b %Y %H:%M:%S") + ": Starting quality control %s \n" % p, flush = True) f.write("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": Starting quality control %s \n" % p) @@ -2941,9 +3220,9 @@ def print_page(self, images): folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: """Merge with QC of preproc"""; - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_mf.pdf') @@ -2958,6 +3237,9 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + + update_status(folder_path, patient_path, "fingerprinting") + def odf_csd_solo(folder_path, p, num_peaks=2, peaks_threshold = .25, CSD_bvalue=None, core_count=1, maskType="brain_mask_dilated", report=True, CSD_FA_treshold=0.7, return_odf=False): """Perform microstructure fingerprinting and store the data in the /subjects//dMRI/microstructure/mf/. @@ -3082,21 +3364,22 @@ def odf_csd_solo(folder_path, p, num_peaks=2, peaks_threshold = .25, CSD_bvalue= save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_f2_pseudoTensor.nii.gz', t_p2, affine, hdr) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_f2_pseudoTensor_normed.nii.gz', t_normed_p2, affine, hdr) - import TIME.utils - RGB_peak = TIME.utils.peaks_to_RGB([peaks1]) + import unravel.utils + RGB_peak = unravel.utils.peaks_to_RGB(peaks1) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_f1_RGB.nii.gz', RGB_peak, affine) - RGB_peak_frac = TIME.utils.peaks_to_RGB([peaks1], [frac1]) + RGB_peak_frac = unravel.utils.peaks_to_RGB(peaks1, frac1) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_f1_RGB_frac.nii.gz', RGB_peak_frac, affine) - RGB_peak = TIME.utils.peaks_to_RGB([peaks2]) + RGB_peak = unravel.utils.peaks_to_RGB(peaks2) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_f2_RGB.nii.gz', RGB_peak, affine) - RGB_peak_frac = TIME.utils.peaks_to_RGB([peaks2], [frac2]) + RGB_peak_frac = unravel.utils.peaks_to_RGB(peaks2, frac2) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_f2_RGB_frac.nii.gz', RGB_peak_frac, affine) - RGB_peaks = TIME.utils.peaks_to_RGB([peaks1, peaks2]) + RGB_peaks = unravel.utils.peaks_to_RGB(np.stack([peaks1, peaks2],axis=-1)) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_tot_RGB.nii.gz', RGB_peaks, affine) - RGB_peaks_frac = TIME.utils.peaks_to_RGB([peaks1, peaks2], [frac1, frac2]) + RGB_peaks_frac = unravel.utils.peaks_to_RGB(np.stack([peaks1, peaks2],axis=-1), + np.stack([frac1, frac2],axis=-1)) save_nifti(odf_csd_path + '/' + patient_path + '_CSD_peak_tot_RGB_frac.nii.gz', RGB_peaks_frac, affine) @@ -3107,6 +3390,7 @@ def odf_csd_solo(folder_path, p, num_peaks=2, peaks_threshold = .25, CSD_bvalue= "%d.%b %Y %H:%M:%S") + ": Starting quality control %s \n" % p) f.close() + update_status(folder_path, patient_path, "odf_csd") def odf_msmtcsd_solo(folder_path, p, core_count=1, num_peaks=2, peaks_threshold = 0.25, report=True, maskType="brain_mask_dilated"): """Perform MSMT CSD odf computation and store the data in the /subjects//dMRI/ODF/MSMT-CSD/. @@ -3165,24 +3449,16 @@ def odf_msmtcsd_solo(folder_path, p, core_count=1, num_peaks=2, peaks_threshold output, error = process.communicate() - if os.path.exists(odf_msmtcsd_path + '/' + "fixel"): - shutil.rmtree(odf_msmtcsd_path + '/' + "fixel") - - fod2fixel_cmd = "fod2fixel -force -nthreads " + str(core_count) + " -peak peaks.mif" + \ - " -maxnum " + str(num_peaks) + " " + \ + sh2peaks_cmd = "sh2peaks -force -nthreads " + str(core_count) + \ + " -num " + str(num_peaks) + " " + \ odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_WM_ODF.nii.gz ' + \ - odf_msmtcsd_path + '/' + "fixel ; " + odf_msmtcsd_path + '/' + patient_path + "_MSMT-CSD_peaks.nii.gz ; " - fixel2peak_cmd = "fixel2peaks -info -force -nthreads " + str(core_count) + " " + \ - odf_msmtcsd_path + '/' + "fixel " + odf_msmtcsd_path + '/' + patient_path + "_MSMT-CSD_peaks.nii.gz " +\ - " -number " + str(num_peaks) + " ; " + peaks2amp_cmd = "peaks2amp -force -nthreads " + str(core_count) + " " + \ + odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peaks.nii.gz ' + \ + odf_msmtcsd_path + '/' + patient_path + "_MSMT-CSD_peaks_amp.nii.gz ; " - fixel2voxel_cmd = "fixel2voxel -info -force -nthreads " + str(core_count) + " " + \ - odf_msmtcsd_path + '/' + "fixel/peaks.mif None " + odf_msmtcsd_path + \ - '/' + patient_path + "_MSMT-CSD_peaks_amp.nii.gz" + \ - " -number " + str(num_peaks) + " ; " - - bashCommand = 'export OMP_NUM_THREADS=' + str(core_count) + ' ; ' + fod2fixel_cmd + fixel2peak_cmd + fixel2voxel_cmd + bashCommand = 'export OMP_NUM_THREADS=' + str(core_count) + ' ; ' + sh2peaks_cmd + peaks2amp_cmd print("[" + log_prefix + "] " + datetime.datetime.now().strftime( "%d.%b %Y %H:%M:%S") + ": mrtrix ODF MSMT-CSD postprocessing launched for patient %s \n" % p + " with bash command " + bashCommand) @@ -3221,28 +3497,29 @@ def odf_msmtcsd_solo(folder_path, p, core_count=1, num_peaks=2, peaks_threshold hdr['intent_code'] = 1005 save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f1_pseudoTensor.nii.gz', t_p1, affine, hdr) - save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT_peak_f1_pseudoTensor_normed.nii.gz', t_normed_p1, affine, + save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f1_pseudoTensor_normed.nii.gz', t_normed_p1, affine, hdr) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f2_pseudoTensor.nii.gz', t_p2, affine, hdr) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f2_pseudoTensor_normed.nii.gz', t_normed_p2, affine, hdr) - import TIME.utils + import unravel.utils - RGB_peak = TIME.utils.peaks_to_RGB([peaks_1_2[:,:,:,0:3]]) + RGB_peak = unravel.utils.peaks_to_RGB(peaks_1_2[:,:,:,0:3]) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f1_RGB.nii.gz', RGB_peak, affine) - RGB_peak_frac = TIME.utils.peaks_to_RGB([peaks_1_2[:,:,:,0:3]], [frac_1_2[:, :, :, 0]]) - save_nifti(odf_msmtcsd_path + '/' + patient_path + '_CSD_peak_f1_RGB_frac.nii.gz', RGB_peak_frac, affine) + RGB_peak_frac = unravel.utils.peaks_to_RGB(peaks_1_2[:,:,:,0:3], frac_1_2[:, :, :, 0]) + save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f1_RGB_frac.nii.gz', RGB_peak_frac, affine) - RGB_peak = TIME.utils.peaks_to_RGB([peaks_1_2[:,:,:,3:6]]) + RGB_peak = unravel.utils.peaks_to_RGB(peaks_1_2[:,:,:,3:6]) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f2_RGB.nii.gz', RGB_peak, affine) - RGB_peak_frac = TIME.utils.peaks_to_RGB([peaks_1_2[:,:,:,3:6]], [frac_1_2[:, :, :, 1]]) + RGB_peak_frac = unravel.utils.peaks_to_RGB(peaks_1_2[:,:,:,3:6], frac_1_2[:, :, :, 1]) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_f2_RGB_frac.nii.gz', RGB_peak_frac, affine) - RGB_peaks = TIME.utils.peaks_to_RGB([peaks_1_2[:,:,:,0:3], peaks_1_2[:,:,:,3:6]]) + RGB_peaks = unravel.utils.peaks_to_RGB(np.stack([peaks_1_2[:,:,:,0:3], peaks_1_2[:,:,:,3:6]],axis=-1)) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_tot_RGB.nii.gz', RGB_peaks, affine) - RGB_peaks_frac = TIME.utils.peaks_to_RGB([peaks_1_2[:,:,:,0:3], peaks_1_2[:,:,:,3:6]], [frac_1_2[:, :, :, 0], frac_1_2[:, :, :, 1]]) + RGB_peaks_frac = unravel.utils.peaks_to_RGB(np.stack([peaks_1_2[:,:,:,0:3], peaks_1_2[:,:,:,3:6]],axis=-1), + np.stack([frac_1_2[:, :, :, 0], frac_1_2[:, :, :, 1]],axis=-1)) save_nifti(odf_msmtcsd_path + '/' + patient_path + '_MSMT-CSD_peak_tot_RGB_frac.nii.gz', RGB_peaks_frac, affine) @@ -3254,6 +3531,8 @@ def odf_msmtcsd_solo(folder_path, p, core_count=1, num_peaks=2, peaks_threshold f.close() + update_status(folder_path, patient_path, "odf_msmtcsd") + def ivim_solo(folder_path, p, core_count=1, G1Ball_2_lambda_iso=7e-9, G1Ball_1_lambda_iso=[.5e-9, 6e-9], maskType="brain_mask_dilated"): @@ -3443,9 +3722,9 @@ def print_page(self, images): shutil.copyfile(qc_path + '/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: """Merge with QC of preproc"""; - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_ivim.pdf') @@ -3460,23 +3739,35 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() -def tracking_solo(folder_path:str, p:str, streamline_number:int=100000, max_angle:int=15, msmtCSD:bool=True, core_count:int=1): + update_status(folder_path, patient_path, "ivim") + + +def tracking_solo(folder_path:str, p:str, streamline_number:int=100000, + max_angle:int=15, cutoff:float=0.1, msmtCSD:bool=True, + output_filename:str='tractogram',core_count:int=1, + maskType:str="brain_mask",save_as_trk=False): """ Computes the whole brain tractogram of a single patient based on the fod obtained from msmt-CSD. :param folder_path: the path to the root directory. :param p: The name of the patient. :param streamline_number: Number of streamlines in the final tractogram. default=100000 :param max_angle: Maximum angle between two tractography steps. default=15 + :param cutoff: Value below which streamlines do not propagate. default=0.1 :param msmtCSD: boolean. If True then uses ODF from msmt-CSD, if False from CSD. default=True + :param output_filename: str. Specify output filename for tractogram. + :param maskType: str. Specify a masking region of interest, streamlines exiting the mask will be truncated. """ - + + assert maskType in ["brain_mask_dilated","brain_mask"], "The mask parameter must be one of the following : brain_mask_dilated, brain_mask" + import nibabel as nib from elikopy.utils import dipy_fod_to_mrtrix from dipy.io.streamline import load_tractogram, save_trk - + patient_path = p - - params={'Number of streamlines':streamline_number, 'Maximum angle':max_angle} + + params={'Number of streamlines': streamline_number, 'Maximum angle': max_angle, + 'Cutoff value': cutoff, 'Mask type': maskType} if msmtCSD: if not os.path.isdir(folder_path + '/subjects/' + patient_path + "/dMRI/ODF/MSMT-CSD/"): @@ -3494,35 +3785,94 @@ def tracking_solo(folder_path:str, p:str, streamline_number:int=100000, max_angl odf_file_path = folder_path + '/subjects/' + patient_path + "/dMRI/ODF/CSD/"+patient_path + "_CSD_SH_ODF_mrtrix.nii.gz" params['Local modeling']='CSD' tracking_path = folder_path + '/subjects/' + patient_path + "/dMRI/tractography/" - mask_path = folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + "_brain_mask_dilated.nii.gz" + seed_path = folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + "_brain_mask.nii.gz" + mask_path = folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + '_' + maskType + '.nii.gz' dwi_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz' - output_file = tracking_path+patient_path+'_tractogram.tck' - + output_file = tracking_path+patient_path+'_'+output_filename+'.tck' + if not os.path.isdir(tracking_path): os.mkdir(tracking_path) - + bashCommand=('tckgen -nthreads ' + str(core_count) + ' ' + odf_file_path +' '+ output_file+ - ' -seed_image ' +mask_path+ + ' -seed_image ' +seed_path+ ' -select ' +str(streamline_number)+ ' -angle ' +str(max_angle)+ + ' -cutoff ' +str(cutoff)+ + ' -mask ' +mask_path+ ' -force') - + tracking_log = open(tracking_path+"tractography_logs.txt", "a+") - process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=tracking_log, - stderr=subprocess.STDOUT) + process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, + stdout=tracking_log, stderr=subprocess.STDOUT) process.communicate() tracking_log.close() - - tract = load_tractogram(output_file, dwi_path) - save_trk(tract, output_file[:-3]+'trk') - + if save_as_trk: + tract = load_tractogram(output_file, dwi_path) + + save_trk(tract, output_file[:-3]+'trk') + with open(output_file[:-3]+'json', 'w') as outfile: json.dump(params, outfile) - + + update_status(folder_path, patient_path, "tracking") + + +def sift_solo(folder_path: str, p: str, streamline_number: int = 100000, + msmtCSD: bool = True, input_filename: str = 'tractogram', + core_count: int = 1, save_as_trk=False): + """ Computes the sifted whole brain tractogram of a single patient based on + the fod obtained from msmt-CSD. + + :param folder_path: the path to the root directory. + :param p: The name of the patient. + :param streamline_number: Number of streamlines in the final tractogram. default=100000 + :param msmtCSD: boolean. If True then uses ODF from msmt-CSD, if False from CSD. default=True + :param input_filename: str. Specify input filename for tractogram. + """ + + from dipy.io.streamline import load_tractogram, save_trk + + patient_path = p + + if msmtCSD: + odf_file_path = (folder_path + '/subjects/' + patient_path + + "/dMRI/ODF/MSMT-CSD/"+patient_path + + "_MSMT-CSD_WM_ODF.nii.gz") + else: + odf_file_path = (folder_path + '/subjects/' + patient_path + + "/dMRI/ODF/CSD/"+patient_path + + "_CSD_SH_ODF_mrtrix.nii.gz") + + tracking_path = folder_path + '/subjects/' + patient_path + "/dMRI/tractography/" + mask_path = folder_path + '/subjects/' + patient_path + '/masks/' + patient_path + "_brain_mask_dilated.nii.gz" + dwi_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + '_dmri_preproc.nii.gz' + + input_file = tracking_path+patient_path+'_'+input_filename+'.tck' + output_file = tracking_path+patient_path+'_'+input_filename+'_sift.tck' + + bashCommand=('tcksift ' + input_file + ' ' + odf_file_path + ' ' + + output_file + + ' -nthreads ' + str(core_count) + + ' -term_number ' + str(streamline_number) + + ' -force') + + sift_log = open(tracking_path+"sift_logs.txt", "a+") + process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, + stdout=sift_log, stderr=subprocess.STDOUT) + + process.communicate() + sift_log.close() + + if save_as_trk: + tract = load_tractogram(output_file, dwi_path) + save_trk(tract, output_file[:-3]+'trk') + + update_status(folder_path, patient_path, "siftComputation") + def verdict_solo(folder_path, p, core_count=1, small_delta=0.003, big_delta=0.035, G1Ball_1_lambda_iso=0.9e-9, C1Stick_1_lambda_par=[3.05e-9, 10e-9],TumorCells_Dconst=0.9e-9): """ Computes the verdict metrics for a single. The outputs are available in the directories /subjects//dMRI/microstructure/verdict/. @@ -3710,9 +4060,9 @@ def print_page(self, images): shutil.copyfile(qc_path + '/qc_report.pdf', folder_path + '/subjects/' + patient_path + '/quality_control.pdf') else: """Merge with QC of preproc"""; - from PyPDF2 import PdfFileMerger + from pypdf import PdfMerger pdfs = [folder_path + '/subjects/' + patient_path + '/quality_control.pdf', qc_path + '/qc_report.pdf'] - merger = PdfFileMerger() + merger = PdfMerger() for pdf in pdfs: merger.append(pdf) merger.write(folder_path + '/subjects/' + patient_path + '/quality_control_verdict.pdf') @@ -3727,6 +4077,8 @@ def print_page(self, images): "%d.%b %Y %H:%M:%S") + ": Successfully processed patient %s \n" % p) f.close() + update_status(folder_path, patient_path, "verdict") + def report_solo(folder_path,patient_path, slices=None, short=False): """ Legacy report function. diff --git a/elikopy/mouse.py b/elikopy/mouse.py deleted file mode 100644 index 44370ab..0000000 --- a/elikopy/mouse.py +++ /dev/null @@ -1,1086 +0,0 @@ -import numpy as np -import nibabel as nib -import datetime -import os -import shutil -import json -import math - -import re -from bruker2nifti.converter import Bruker2Nifti - -from threading import Thread - -import subprocess -from elikopy.utils import makedir - -from dipy.denoise.localpca import mppca -from dipy.io.image import load_nifti, save_nifti -from dipy.io.gradients import read_bvals_bvecs -from dipy.core.gradients import gradient_table, reorient_bvecs -from dipy.viz import regtools -from dipy.data.fetcher import fetch_syn_data, read_syn_data -from dipy.align.imaffine import (transform_centers_of_mass, - AffineMap, - MutualInformationMetric, - AffineRegistration) -from dipy.align.transforms import (TranslationTransform3D, - RigidTransform3D, - AffineTransform3D, - TranslationTransform2D, - RigidTransform2D, - AffineTransform2D, - RotationTransform2D, - RotationTransform3D) - -from skimage.morphology import binary_dilation, binary_erosion -denoised = None - - -def threaded_mppca(a, b, chunk): - global denoised - pr = math.ceil((np.shape(chunk)[3] ** (1 / 3) - 1) / 2) - denoised_chunk, sigma = mppca(chunk, patch_radius=pr, return_sigma=True) - denoised[:, :, :, a:b] = denoised_chunk - - -def convertAndMerge(folder_path, raw_bruker_dicom_subfolder, nifti_bruker_folder, subject): - # instantiate a converter - log_prefix= "ConvertAndMerge" - f = open(folder_path + "/mouse_logs.txt", "a+") - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Beginning of convert and merge for mouse %s \n" % subject - print(msg) - f.write(msg) - - bru = Bruker2Nifti(raw_bruker_dicom_subfolder, nifti_bruker_folder, study_name=subject) - - # select the options (attributes) you may want to change - the one shown below are the default one: - bru.verbose = 0 - # converter settings for the nifti values - bru.nifti_version = 1 - bru.qform_code = 1 - bru.sform_code = 2 - bru.save_human_readable = True - bru.save_b0_if_dwi = (True) # if DWI, it saves the first layer as a single nfti image. - bru.correct_slope = True - bru.correct_offset = True - # advanced sample positioning - bru.sample_upside_down = False - bru.frame_body_as_frame_head = False - # chose to convert extra files: - bru.get_acqp = False - bru.get_method = False - bru.get_reco = False - - # Check that the list of scans and the scans names automatically selected makes some sense: - print(bru.scans_list) - print(bru.list_new_name_each_scan) - - # call the function convert, to convert the study from DICOM to NifTi: - bru.convert() - # Merge all DWI volumes: - bvecs = None - bvals = None - mergedDWI = None - shell_index = [] - - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Merging images \n" - print(msg) - f.write(msg) - for dirr in os.listdir(os.path.join(nifti_bruker_folder, subject)): - basedir = os.path.join(nifti_bruker_folder, subject, dirr) - if not os.path.isfile(os.path.join(basedir, "acquisition_method.txt")): - continue - with open(os.path.join(basedir, "acquisition_method.txt")) as facq_method: - method = facq_method.readlines()[0] - - if (method == "DtiEpi"): - print("DtiEpi", dirr) - bvec = np.load(os.path.join(basedir, dirr + "_DwGradVec.npy")) - bval = np.load(os.path.join(basedir, dirr + "_DwEffBval.npy")) - dwi = nib.load(os.path.join(basedir, dirr + ".nii.gz")) - zerobvec = np.where((bvec[:, 0] == 0) & (bvec[:, 1] == 0) & (bvec[:, 2] == 0)) - bvec[zerobvec] = [1, 0, 0] - if any(x is None for x in [bvecs, bvals, mergedDWI]): - bvecs = bvec - bvals = bval - mergedDWI = dwi - shell_index.append(0) - else: - bvecs = np.concatenate((bvecs, bvec), axis=0) - bvals = np.concatenate((bvals, bval), axis=0) - shell_index.append(mergedDWI.shape[-1]) - mergedDWI = nib.concat_images([mergedDWI, dwi], axis=3) - elif (method == "FLASH"): - print("FLASH", dirr) - elif (method == "RARE"): - print("RARE", dirr) - elif (method == "MSME"): - print("MSME", dirr) - elif (method == "FieldMap"): - print("FieldMap", dirr) - elif (method == "nmrsuDtiEpi"): - print("nmrsuDtiEpi", dirr) - makedir(os.path.join(nifti_bruker_folder, subject, "reverse_encoding"), folder_path + "/logs.txt", log_prefix) - dwi = nib.load(os.path.join(basedir, dirr + ".nii.gz")) - bval = np.load(os.path.join(basedir, dirr + "_DwEffBval.npy")) - bvec = np.load(os.path.join(basedir, dirr + "_DwGradVec.npy")) - zerobvec = np.where((bvec[:, 0] == 0) & (bvec[:, 1] == 0) & (bvec[:, 2] == 0)) - bvec[zerobvec] = [1, 0, 0] - np.savetxt(os.path.join(nifti_bruker_folder, subject, "reverse_encoding", subject + ".bvec"), bvec, fmt="%.42f") - np.savetxt(os.path.join(nifti_bruker_folder, subject, "reverse_encoding", subject + ".bval"), bval, newline=' ', - fmt="%.42f") - dwi.to_filename(os.path.join(nifti_bruker_folder, subject, "reverse_encoding", subject + ".nii.gz")) - fnVol = open(os.path.join(nifti_bruker_folder, subject, "reverse_encoding", "nVol.txt"), "w") - fnVol.write(str(bval.shape[0])) - fnVol.close() - else: - print("Unknow acquisition method:", method) - - np.savetxt(os.path.join(nifti_bruker_folder, subject, subject + ".bvec"), bvecs.T, fmt="%.42f") - np.savetxt(os.path.join(nifti_bruker_folder, subject, subject + ".bval"), bvals.T, newline=' ', fmt="%.42f") - mergedDWI.to_filename(os.path.join(nifti_bruker_folder, subject, subject + ".nii.gz")) - print("Number of total volumes: ", bvals.shape) - - fnVol = open(os.path.join(nifti_bruker_folder, subject, "nVol.txt"), "w") - fnVol.write(str(int(bvals.shape[0]))) - fnVol.close() - - fshellIndex = open(os.path.join(nifti_bruker_folder, subject, "shell_index.txt"), "w") - for element in shell_index: - fshellIndex.write(str(element) + "\n") - fshellIndex.close() - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": End of convert and merge for mouse %s \n" % subject - print(msg) - f.write(msg) - f.close() - return int(bvals.shape[0]) - - -def link(folder_path, nifti_bruker_folder, nVol, subjectName, acqparams_path): - pattern = re.compile("data_\\d") - patternNum = re.compile("\\d") - - log_prefix = "link" - - match = False - dataFolderIndex = [] - for typeFolder in os.listdir(folder_path): - if pattern.match(typeFolder): - nVolDataFolder = int(np.loadtxt(os.path.join(folder_path, typeFolder, "nVol.txt"))) - dataFolderIndex.append(patternNum.search(typeFolder).group(0)) - if nVol == nVolDataFolder: - print("match") - match = True - break - - if not match: - dataFolderIndex.sort() - if len(dataFolderIndex) > 0: - typeFolder = "data_" + str(dataFolderIndex[-1] + 1) - else: - typeFolder = "data_1" - makedir(os.path.join(folder_path, typeFolder), folder_path + "/logs.txt", log_prefix) - makedir(os.path.join(folder_path, typeFolder, "reverse_encoding"), folder_path + "/logs.txt", log_prefix) - index = open(os.path.join(folder_path, typeFolder, "index.txt"), "w") - for i in range(nVol): - index.write('1 ') - index.close() - index = open(os.path.join(folder_path, typeFolder, "reverse_encoding", "index.txt"), "w") - for i in range(4): - index.write('1 ') - index.close() - fnVol = open(os.path.join(folder_path, typeFolder, "nVol.txt"), "w") - fnVol.write(str(nVol)) - fnVol.close() - shutil.copyfile(os.path.join(nifti_bruker_folder, "shell_index.txt"), os.path.join(folder_path, typeFolder, "shell_index.txt")) - - fraw = open(acqparams_path, 'r') - acq = open(os.path.join(folder_path, typeFolder, "acqparams.txt"), "w") - acq.write(fraw.readline()) - acq.close() - acq = open(os.path.join(folder_path, typeFolder, "reverse_encoding", "acqparams.txt"), "w") - acq.write(fraw.readline()) - acq.close() - fraw.close() - - subjectPath = os.path.join(nifti_bruker_folder, subjectName) - shutil.copyfile(subjectPath + ".nii.gz", os.path.join(folder_path, typeFolder, subjectName + ".nii.gz")) - shutil.copyfile(subjectPath + ".bvec", os.path.join(folder_path, typeFolder, subjectName + ".bvec")) - shutil.copyfile(subjectPath + ".bval", os.path.join(folder_path, typeFolder, subjectName + ".bval")) - - subjectPath_reverse = os.path.join(nifti_bruker_folder, "reverse_encoding", subjectName) - shutil.copyfile(subjectPath_reverse + ".nii.gz", - os.path.join(folder_path, typeFolder, "reverse_encoding", subjectName + ".nii.gz")) - shutil.copyfile(subjectPath_reverse + ".bvec", - os.path.join(folder_path, typeFolder, "reverse_encoding", subjectName + ".bvec")) - shutil.copyfile(subjectPath_reverse + ".bval", - os.path.join(folder_path, typeFolder, "reverse_encoding", subjectName + ".bval")) - - -def gen_Nifti(folder_path, raw_bruker_dicom_folder, nifti_bruker_folder): - """ - :param BaseIN: "/CECI/proj/pilab/PermeableAccess/souris_MKF3Hp7nU/raw" - :param BaseOUT: "/CECI/proj/pilab/PermeableAccess/souris_MKF3Hp7nU/bruker_elikopy" - :param ProcIN: "/CECI/proj/pilab/PermeableAccess/souris_MKF3Hp7nU/test_b2e_study" - """ - log_prefix= "gen_Nifti" - f = open(folder_path + "/mouse_logs.txt", "a+") - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Beginning of gen Nifti \n" - print(msg) - f.write(msg) - f.close() - for file in os.listdir(raw_bruker_dicom_folder): - if (os.path.isdir(os.path.join(raw_bruker_dicom_folder, file)) and file != 'cleaning_data'): - raw_bruker_dicom_subfolder = os.path.join(raw_bruker_dicom_folder, file) - nVol = convertAndMerge(folder_path,raw_bruker_dicom_subfolder, nifti_bruker_folder, file) - link(folder_path, os.path.join(nifti_bruker_folder, file), nVol, file, os.path.join(raw_bruker_dicom_subfolder, "acqparams.txt")) - - - -def preprocessing_solo(folder_path, patient_path, starting_step = None, denoising=True, denoising_algorithm="mppca_dipy", motion_correction=True, brain_extraction=True, topup=True, core_count = 1): - p = patient_path - log_prefix = "MOUSE PREPROC SOLO" - makedir(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/", folder_path + "/logs.txt", log_prefix) - f = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", "a+") - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Beginning of individual preprocessing for mouse %s \n" % p - print(msg) - f.write(msg) - - assert starting_step in [None, "denoising", "motion_correction", "brain_extraction", "topup"], "starting_step must be None, denoising, motion_correction, brain_extraction or topup" - - global denoised - - fdwi = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + '_raw_dmri.nii.gz' - fbval = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bval" - fbvec = folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + patient_path + "_raw_dmri.bvec" - - denoising_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/denoising/' - motionCorr_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/motionCorrection/' - topup_path = folder_path + '/subjects/' + patient_path + "/dMRI/preproc/topup" - brainExtraction_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/brainExtraction/' - mask_path = folder_path + '/subjects/' + patient_path + '/masks/' - - data, affine = load_nifti(fdwi) - bvals, bvecs = read_bvals_bvecs(fbval, fbvec) - if starting_step not in ["denoising", "motion_correction", None] and motion_correction: - bvals, bvecs = read_bvals_bvecs(motionCorr_path + patient_path + '_motionCorrected.bval', motionCorr_path + patient_path + '_motionCorrected.bvec') - gtab = gradient_table(bvals, bvecs, b0_threshold=65) - - - imain_tot = fdwi - - import json - with open(os.path.join(folder_path + '/subjects/', "subj_type.json")) as json_file: - subj_type = json.load(json_file) - - - - ############################ - ### MPPCA Denoising step ### - ############################ - if denoising and starting_step not in ["motion_correction", "topup", "brain_extraction"]: - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime( - "%d.%b %Y %H:%M:%S") + ": Start of denoising step \n" - print(msg) - f.write(msg) - - makedir(denoising_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - - shell_index = [] - with open(os.path.join(folder_path, "data_" + str(subj_type[patient_path]), "shell_index.txt"), "r") as fshell: - for line in fshell: - shell_index.append(int(line.strip())) - - denoised = data.copy() - - print(shell_index, data.shape) - - if denoising_algorithm == "mppca_mrtrix": - makedir(denoising_path + "/mrtrix",folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - - threads = [] - for i in range(len(shell_index) - 1): - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + "Start of mppca for shell" + str(i) + " (index:" + str(shell_index[i]) + "," + str(shell_index[i + 1]) + ")\n" - print(msg) - f.write(msg) - a = shell_index[i] - b = shell_index[i + 1] - chunk = data[:, :, :, a:b].copy() - - if denoising_algorithm == "mppca_dipy": - threads.append(Thread(target=threaded_mppca, args=(a, b, chunk))) - threads[-1].start() - elif denoising_algorithm == "mppca_mrtrix": - makedir(denoising_path + "/mrtrix",folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - import subprocess - bashCommand = "dwidenoise -nthreads " + str(core_count) + " " + imain_tot + \ - " " + denoising_path + '/mrtrix/' + 'mppca_shell' + str(i) + '_.nii.gz' + \ - " -noise " + denoising_path + '/' + patient_path + '_sigmaNoise_shell' + str(i) + '.nii.gz -force' - - process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=f, - stderr=subprocess.STDOUT) - - output, error = process.communicate() - - - - - if denoising_algorithm == "mppca_dipy": - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + "All threads have been launched.\n" - print(msg) - f.write(msg) - for i in range(len(threads)): - threads[i].join() - elif denoising_algorithm == 'mppca_mrtrix': - print("Merging denoising results") - #TODO: Merge denoising results - - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + "All threads have finished.\n" - print(msg) - f.write(msg) - - save_nifti(denoising_path + '/' + patient_path + '_mppca.nii.gz', denoised.astype(np.float32), affine) - data = denoised - - if denoising: - imain_tot = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/denoisingMPPCA/' + patient_path + '_mppca.nii.gz' - - ############################## - ### Motion correction step ### - ############################## - - if motion_correction and starting_step not in ["topup", "brain_extraction"]: - makedir(motionCorr_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - - if starting_step == "motion_correction": - data, affine = load_nifti(imain_tot) - - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " Motion correction step.\n" - print(msg) - f.write(msg) - - reg_affines_precorrection = [] - static_precorrection = data[..., 0] - static_grid2world_precorrection = affine - moving_grid2world_precorrection = affine - for i in range(data.shape[-1]): - if gtab.b0s_mask[i]: - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " Motion correction step: Premoving b0 number " + str(i) + "\n" - print(msg) - f.write(msg) - moving = data[..., i] - moved, trans_affine = affine_reg(static_precorrection, static_grid2world_precorrection, - moving, moving_grid2world_precorrection) - data[..., i] = moved - else: - moving = data[..., i] - data[..., i] = trans_affine.transform(moving) - reg_affines_precorrection.append(trans_affine.affine) - - gtab_precorrection = reorient_bvecs(gtab, reg_affines_precorrection) - - bvec = gtab.bvecs - zerobvec = np.where((bvec[:, 0] == 0) & (bvec[:, 1] == 0) & (bvec[:, 2] == 0)) - bvec[zerobvec] = [1, 0, 0] - save_nifti(motionCorr_path + patient_path + '_motionCorrected.nii.gz', data, affine) - np.savetxt(motionCorr_path + patient_path + '_motionCorrected.bval', bvals) - np.savetxt(motionCorr_path + patient_path + '_motionCorrected.bvec', bvec) - - gtab = gtab_precorrection - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " End of Motion correction step\n" - print(msg) - f.write(msg) - - if motion_correction: - imain_tot = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/motionCorrection/' + patient_path + '_motionCorrected.nii.gz' - - ############################# - ### Topup step ### - ############################# - - if topup and starting_step not in ["brain_extraction"]: - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " Start of topup step\n" - print(msg) - f.write(msg) - makedir(topup_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", - log_prefix) - - if starting_step == "topup": - data, affine = load_nifti(imain_tot) - - multiple_encoding = False - topup_log = open(folder_path + '/subjects/' + patient_path + "/dMRI/preproc/topup/topup_logs.txt", "a+") - - with open(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'index.txt') as findex: - line = findex.read() - line = " ".join(line.split()) - topup_index = [int(s) for s in line.split(' ')] - - with open(folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt') as facqparams: - topup_acq = [[float(x) for x in line2.split()] for line2 in facqparams] - - # Find all the bo to extract. - current_index = 0 - all_index = {} - i = 1 - roi = [] - for ind in topup_index: - if ind != current_index and ind not in all_index: - roi.append(i) - fslroi = "fslroi " + imain_tot + " " + topup_path + "/b0_" + str(i) + ".nii.gz " + str(i - 1) + " 1" - process = subprocess.Popen(fslroi, universal_newlines=True, shell=True, stdout=topup_log, - stderr=subprocess.STDOUT) - - output, error = process.communicate() - print("B0 of index" + str(i) + " extracted!") - current_index = ind - all_index[ind] = all_index.get(ind, 0) + 1 - i = i + 1 - - # Merge b0 - if len(roi) == 1: - shutil.copyfile(topup_path + "/b0_" + str(roi[0]) + ".nii.gz", topup_path + "/b0.nii.gz") - else: - roi_to_merge = "" - for r in roi: - roi_to_merge = roi_to_merge + " " + topup_path + "/b0_" + str(r) + ".nii.gz" - cmd = "fslmerge -t " + topup_path + "/b0.nii.gz" + roi_to_merge - process = subprocess.Popen(cmd, universal_newlines=True, shell=True, stdout=topup_log, - stderr=subprocess.STDOUT) - output, error = process.communicate() - - # Check if multiple or single encoding direction - curr_x = 0.0 - curr_y = 0.0 - curr_z = 0.0 - first = True - print("Topup acq parameters:") - print(topup_acq) - for acq in topup_acq: - if not first and (curr_x != acq[1] or curr_y != acq[2] or curr_z != acq[3]): - multiple_encoding = True - first = False - curr_x = acq[1] - curr_y = acq[2] - curr_z = acq[3] - - - if multiple_encoding: - makedir(topup_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - topupConfig = 'b02b0.cnf' # if topupConfig is None else topupConfig - bashCommand = 'export OMP_NUM_THREADS=' + str(core_count) + ' ; export FSLPARALLEL=' + str( - core_count) + ' ; topup --imain="' + topup_path + '/b0.nii.gz" --config="' + topupConfig + '" --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --fout="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_fout_estimate" --iout="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_iout_estimate" --verbose' - bashcmd = bashCommand.split() - process = subprocess.Popen(bashCommand, universal_newlines=True, shell=True, stdout=topup_log, - stderr=subprocess.STDOUT) - # wait until topup finish - output, error = process.communicate() - - inindex = "" - first = True - for r in roi: - if first: - inindex = str(topup_index[r - 1]) - first = False - else: - inindex = inindex + "," + str(topup_index[r - 1]) - - bashCommand2 = 'applytopup --imain="' + imain_tot + '" --inindex=' + inindex + ' --datain="' + folder_path + '/subjects/' + patient_path + '/dMRI/raw/' + 'acqparams.txt" --topup="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_estimate" --out="' + folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr"' - - process2 = subprocess.Popen(bashCommand2, universal_newlines=True, shell=True, stdout=topup_log, - stderr=subprocess.STDOUT) - # wait until apply topup finish - output, error = process2.communicate() - - topup_log.close() - - if topup: - imain_tot = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/topup/' + patient_path + '_topup_corr.nii.gz' - - ############################# - ### Brain extraction step ### - ############################# - if brain_extraction: - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " Start of brain extraction step\n" - print(msg) - f.write(msg) - - if starting_step == "brain_extraction": - data, affine = load_nifti(imain_tot) - - makedir(brainExtraction_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - makedir(mask_path, folder_path + '/subjects/' + patient_path + "/dMRI/preproc/preproc_logs.txt", log_prefix) - - # created a brain for designing a mask (sum of all shells) - b0final = np.zeros(data.shape[:-1]) - for i in range(data.shape[-1]): - b0final += data[:, :, :, i] - - save_nifti(brainExtraction_path + patient_path + '_Brain_extraction_ref.nii.gz', b0final, affine) - - # function to find a mask - final_mask = mask_Wizard(b0final, 4, 7, work='2D') - - # Saving - out = nib.Nifti1Image(final_mask, affine) - out.to_filename(mask_path + patient_path + '_brain_mask.nii.gz') - - data[final_mask == 0] = 0 - - save_nifti(brainExtraction_path + patient_path + '_Extracted_brain.nii.gz', data, affine) - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " End of brain extraction step\n" - print(msg) - f.write(msg) - - ################################ - ### Final preprocessing step ### - ################################ - - final_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' - save_nifti(final_path + '/' + patient_path + '_dmri_preproc.nii.gz', data, affine) - np.savetxt(final_path + '/' + patient_path + '_dmri_preproc.bval', bvals) - np.savetxt(final_path + '/' + patient_path + '_dmri_preproc.bvec', bvecs) - - msg = "[" + log_prefix + "] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + \ - ": " + " End of preprocessing\n" - print(msg) - f.write(msg) - - - - -""" -========================================================= -Motion correction of DWI data -@author: DELINTE Nicolas, BIOUL Nicolas, HENAUT Eliott -========================================================= -""" - -def affine_reg(static, static_grid2world, moving, moving_grid2world, work='3D'): - """ - - - Parameters - ---------- - static : TYPE - DESCRIPTION. - static_grid2world : TYPE - DESCRIPTION. - moving : TYPE - DESCRIPTION. - moving_grid2world : TYPE - DESCRIPTION. - work : TYPE, optional - DESCRIPTION. The default is '3D'. - - Returns - ------- - transformed : TYPE - DESCRIPTION. - affine : TYPE - DESCRIPTION. - - """ - - if work == '3D': - c_of_mass = transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world) - nbins = 32 - sampling_prop = None - metric = MutualInformationMetric(nbins, sampling_prop) - - level_iters = [500, 100, 10] - - sigmas = [3.0, 1.0, 0.0] - - factors = [4, 2, 1] - - affreg = AffineRegistration(metric=metric, - level_iters=level_iters, - sigmas=sigmas, - factors=factors) - - transform = TranslationTransform3D() - params0 = None - starting_affine = c_of_mass.affine - translation = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - - transformed = translation.transform(moving) - # save_nifti('/Users/nicolasbioul/Desktop/translation.nii.gz', transformed, moving_grid2world) - - transform = RotationTransform3D() - params0 = None - starting_affine = translation.affine - rotation = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - - transformed = rotation.transform(moving) - # save_nifti('/Users/nicolasbioul/Desktop/rotation.nii.gz', transformed, moving_grid2world) - - transform = RigidTransform3D() - params0 = None - starting_affine = rotation.affine - rigid = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - transformed = rigid.transform(moving) - # save_nifti('/Users/nicolasbioul/Desktop/rigid.nii.gz', transformed, moving_grid2world) - - transform = AffineTransform3D() - params0 = None - starting_affine = rigid.affine - affine = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - - transformed = affine.transform(moving) - # save_nifti('/Users/nicolasbioul/Desktop/affine.nii.gz', transformed, moving_grid2world) - - return transformed, affine - - elif work == '2D': - - c_of_mass = transform_centers_of_mass(static, static_grid2world, moving, moving_grid2world) - nbins = 32 - sampling_prop = None - metric = MutualInformationMetric(nbins, sampling_prop) - - level_iters = [500, 100, 10] - - sigmas = [3.0, 1.0, 0.0] - - factors = [4, 2, 1] - - affreg = AffineRegistration(metric=metric, - level_iters=level_iters, - sigmas=sigmas, - factors=factors) - - transform = TranslationTransform2D() - params0 = None - starting_affine = c_of_mass.affine - translation = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - - transformed = translation.transform(moving) - - transform = RigidTransform2D() - params0 = None - starting_affine = translation.affine - rigid = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - transformed = rigid.transform(moving) - - transform = RotationTransform2D() - params0 = None - starting_affine = rigid.affine - rotation = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - - transformed = rotation.transform(moving) - - transform = AffineTransform2D() - params0 = None - starting_affine = rotation.affine - affine = affreg.optimize(static, moving, transform, params0, - static_grid2world, moving_grid2world, - starting_affine=starting_affine) - - transformed = affine.transform(moving) - - return transformed, affine - -""" -======================================================= -Brain extraction of DWI data -@author: HENAUT Eliott BIOUL Nicolas -======================================================= -""" - -def mask_Wizard(data, r_fill, r_shape, scal=1, geo_shape='ball', height=5, work='2D'): - """ - - Parameters - ---------- - data : TYPE : ArrayList - DESCRIPTION: The brain data that we want to find a mask. - r_fill : TYPE : Interger - DESCRIPTION: The radius of the circle,cylinder,or ball that we use for growing the surface. Exclusively used in the fill function. - r_shape : TYPE : Interger - DESCRIPTION: The radius of the circle, cylinder, or ball that we use for the opening/closing. Exclusively used in the shape_matrix function. - scal : TYPE : Interger, optional - DESCRIPTION: The param λ in the formula y=µ+λ*σ. y is the threshold defining the inclusion of a voxel or not. µ is the mean of the array data. σ is it's standard deviation' - The default is 1. - geo_shape : TYPE : String, optional - DESCRIPTION: The shape of the convolution in the opening/closing. ('ball','cylinder') - The default is 'ball'. - height : TYPE : int, optional - DESCRIPTION: The height of the cylinder of the convolution in the opening/closing. ('ball','cylinder') - The default is 'ball'. - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - mask : TYPE : Arraylist - The matrice of the mask, 1 if we take it, O otherwise. - - """ - if work == '3D': - - (x, y, z) = data.shape - seed = binsearch(data, x, y, z) - init_val = int(np.mean(data) + scal * np.sqrt(np.var(data))) - brainish = fill(seed, data, r_fill, init_val) - geo_shape = shape_matrix(r_shape, geo_shape, height, work) - closing = binary_erosion(binary_dilation(brainish, selem=geo_shape)) - opening = binary_dilation(binary_erosion(closing, selem=geo_shape)) - mask = np.zeros(opening.shape) - mask[opening] = 1 - else: - (x, y, z) = data.shape - b0final = data - for i in range(z): - seed = ((b0final.shape[0]) // 2, (b0final.shape[1]) // 2, i) - if np.std(b0final[:, :, i] / np.mean(b0final[:, :, i])) > 0.5: - seed = binsearch(b0final[:, :, i], x, y, work='2D') - init_val = int(np.mean(b0final[:, :, i]) + scal * np.sqrt(np.var(b0final[:, :, i]))) - brainish = fill([seed], b0final[:, :, i], r_fill, init_val) - mat = shape_matrix(r_shape) - closing = binary_erosion(binary_dilation(brainish, selem=mat), selem=mat) - opening = binary_dilation(binary_erosion(closing, selem=mat), selem=mat) - - # Correction element - final = binary_dilation(opening, selem=shape_matrix(2, work='2D')) - # - inter = np.zeros(final.shape) - inter[final] = 1 - b0final[:, :, i] = inter - else: - b0final[:, :, i] = np.zeros(b0final[:, :, i].shape) - b0final[seed] = 1 - mask = b0final - - return mask - -def fill(position, data, rad, init_val, work='2D'): - """ - - Parameters - ---------- - position : TYPE: List - DESCRIPTION: List containing the seed point or set of mask points on which to start the filling. - data : TYPE: Array - DESCRIPTION: Image array on which to fill. - rad : TYPE: Integer - DESCRIPTION: Radius of the neighbourhood to be taken into account when filling. Exclusively used when calling getVoxel - init_val : TYPE: Float - DESCRIPTION: Inclusing value. If above or equal to this value, the fill function will select the voxel/pixel otherwise it will be excluded. - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - data_new : TYPE: Array - DESCRIPTION: returns the filled out mask with all selected pixels from 2D or 3D image. - - """ - data_new = np.zeros(data.shape) - voxelList = set() - for i in position: - voxelList.add(i) - - while len(voxelList) > 0: - voxel = voxelList.pop() - voxelList = getVoxels(voxel, data, init_val, voxelList, rad, data_new, work) - data_new[voxel] = 1 - return data_new - -def getVoxels(voxel, data, init_val, voxelList, rad, data_new, work='2D'): - """ - - - Parameters - ---------- - voxel : TYPE: Tuple - DESCRIPTION: Contains the coordinates (x,y) in work='2D' or (x,y,z) in work='3D' of the voxel to be analysed - data : TYPE: Array - DESCRIPTION: image array - init_val : TYPE: Float - DESCRIPTION: Inclusing value. If above or equal to this value, the fill function will select the voxel/pixel otherwise it will be excluded. - voxelList : TYPE: set() - DESCRIPTION: Set of all qualified voxels. Each qualifying neighbour of an analysed voxel will be placed in the set adjacentVoxelList, - if they qualify when their analysis comes, they will be placed in voxelList. - rad : TYPE: Integer - DESCRIPTION: Radius of the neighbourhood to be taken into account when filling. Exclusively used when calling getVoxel - data_new : TYPE: Array - DESCRIPTION: Mask in the making. See fill function - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - voxelList : TYPE: set - DESCRIPTION: The original voxelList (see arguments) in which we have added all qualifying voxels of the image. - - """ - - if work == '3D': - (x, y, z) = voxel - adjacentVoxelList = set() - for i in range(rad): - h = int(np.sqrt(rad ** 2 - i ** 2)) - for j in range(-h, h + 1): - for k in range(-1, 2): - adjacentVoxelList.add((x + i, y + j, z + k)) - adjacentVoxelList.add((x - i, y + j, z + k)) - for adja in adjacentVoxelList: - if isInbound(adja, data, work): - if data[adja] >= init_val and data_new[adja] != 1: - voxelList.add(adja) - else: - pixel = voxel - (x, y) = pixel - adjacentPixelList = set() - for i in range(rad): - h = int(np.sqrt(rad ** 2 - i ** 2)) - for j in range(-h, h + 1): - adjacentPixelList.add((x + i, y + j)) - adjacentPixelList.add((x - i, y + j)) - for adja in adjacentPixelList: - if isInbound(adja, data, work): - if data[adja] >= init_val and data_new[adja] != 1: - voxelList.add(adja) - return voxelList - -def isInbound(voxel, data, work='2D'): - """ - Boolean function to know if still in bound. To assess the borders of an array - - Parameters - ---------- - voxel : TYPE: Tuple - DESCRIPTION: Contains the coordinates (x,y) in work='2D' or (x,y,z) in work='3D' of the voxel to be analysed - data : TYPE: Array - DESCRIPTION: image array - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - TYPE: Boolean - DESCRIPTION: True if the voxel lies within the image(array). False otherwise - - """ - if work == '3D': - return voxel[0] < data.shape[0] and voxel[0] >= 0 and voxel[1] < data.shape[1] and voxel[1] >= 0 and voxel[ - 2] < \ - data.shape[2] and voxel[2] >= 0 - else: - return voxel[0] < data.shape[0] and voxel[0] >= 0 and voxel[1] < data.shape[1] and voxel[1] >= 0 - -def binsearch(img, x2, y2, z=0, x1=0, y1=0, work='2D'): - """ - This function is a binary search tree that works using a recursive call. - - Parameters - ---------- - img : TYPE: Array - DESCRIPTION: 2 or 3D array of an image - x2 : TYPE: Integer - DESCRIPTION: Lower cut of the image on x-axis - y2 : TYPE: Integer - DESCRIPTION: Lower cut of the image on y-axis - z : TYPE, optional - DESCRIPTION. The default is 0. - x1 : TYPE, optional: Integer - DESCRIPTION. Upper cut of the image on x-axis. The default is 0. - y1 : TYPE, optional: Integer - DESCRIPTION. Upper cut of the image on y-axis. The default is 0. - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - TYPE Integer - DESCRIPTION: Returns the best - - """ - if work == '3D': - if x2 <= x1 + 1 or y2 <= y1 + 1: - return (x1, y1, z // 2) - cand = [[[x1, (x1 + x2) // 2], [y1, (y1 + y2) // 2]], [[(x1 + x2) // 2, x2], [y1, (y1 + y2) // 2]], - [[x1, (x1 + x2) // 2], [(y1 + y2) // 2, y2]], [[(x1 + x2) // 2, x2], [(y1 + y2) // 2, y2]]] - - Im1 = img[cand[0][0][0]:cand[0][0][1], cand[0][1][0]:cand[0][1][1], z // 2] - Im2 = img[cand[1][0][0]:cand[1][0][1], cand[1][1][0]:cand[1][1][1], z // 2] - Im3 = img[cand[2][0][0]:cand[2][0][1], cand[2][1][0]:cand[2][1][1], z // 2] - Im4 = img[cand[3][0][0]:cand[3][0][1], cand[3][1][0]:cand[3][1][1], z // 2] - - idx = search([Im1, Im2, Im3, Im4], [0, 1, 2, 3]) - - else: - if x2 <= x1 + 1 or y2 <= y1 + 1: - return (x1, y1) - cand = [[[x1, (x1 + x2) // 2], [y1, (y2 + y1) // 2]], [[(x1 + x2) // 2, x2], [y1, (y2 + y1) // 2]], - [[x1, (x1 + x2) // 2], [(y1 + y2) // 2, y2]], [[(x1 + x2) // 2, x2], [(y1 + y2) // 2, y2]]] - Im1 = img[cand[0][0][0]:cand[0][0][1], cand[0][1][0]:cand[0][1][1]] - Im2 = img[cand[1][0][0]:cand[1][0][1], cand[1][1][0]:cand[1][1][1]] - Im3 = img[cand[2][0][0]:cand[2][0][1], cand[2][1][0]:cand[2][1][1]] - Im4 = img[cand[3][0][0]:cand[3][0][1], cand[3][1][0]:cand[3][1][1]] - - idx = search([Im1, Im2, Im3, Im4], [0, 1, 2, 3]) - return binsearch(img, cand[idx][0][1], cand[idx][1][1], z=z, x1=cand[idx][0][0], y1=cand[idx][1][0], work=work) - -def search(l, idx): - """ - - - Parameters - ---------- - l : TYPE - DESCRIPTION. - idx : TYPE - DESCRIPTION. - - Returns - ------- - TYPE - DESCRIPTION. - - """ - if len(idx) == 1: return idx[0] - newidx = [] - for i in range(0, len(idx), 2): - if np.mean(l[idx[i]]) < np.mean(l[idx[i + 1]]): - newidx.append(idx[i + 1]) - else: - newidx.append(idx[i]) - return search(l, newidx) - -def shape_matrix(radius, shape='ball', height=5, work='2D'): - """ - - - Parameters - ---------- - radius : TYPE: float - DESCRIPTION: the radius of the sphere (3D) or circle (2D) - shape : TYPE, optional: String - DESCRIPTION. The default is 'ball'. Only in work='3D' is this parameter important. Switch between 'ball' or 'cylinder' in order to choose the form of the geometric shape that the matrix will depict - height : TYPE, optional: Integer - DESCRIPTION. The default is 5. If the shape is 'cylinder' then a height is to be input. No use if work='2D' or shape='ball' - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - mat : TYPE - DESCRIPTION. - - """ - if work == '3D': - mat = 0 - if shape == 'cylinder': - mat = np.zeros((2 * radius + 1, 2 * radius + 1, height)) - for i in range(radius): - h = int(np.sqrt(radius ** 2 - i ** 2)) - for j in range(-h, h + 1): - for k in range(height): - mat[radius + i, radius + j, k] = 1 - mat[radius - i, radius + j, k] = 1 - - else: - mat = np.zeros((2 * radius + 1, 2 * radius + 1, 2 * radius + 1)) - for i in range(radius): - h = int(np.sqrt(radius ** 2 - i ** 2)) - for j in range(-h, h + 1): - h1 = int(np.sqrt(radius ** 2 - i ** 2 - j ** 2)) - for k in range(-h1, h1 + 1): - mat[radius + i, radius + j, radius + k] = 1 - mat[radius - i, radius + j, radius + k] = 1 - else: - mat = 0 - mat = np.zeros((2 * radius + 1, 2 * radius + 1)) - for i in range(radius): - h = int(np.sqrt(radius ** 2 - i ** 2)) - for j in range(-h, h + 1): - mat[radius + i, radius + j] = 1 - mat[radius - i, radius + j] = 1 - return mat - - -def pos_reader(data, work='2D'): - """ - - Parameters - ---------- - data : TYPE - DESCRIPTION. - work : TYPE : String, optional - DESCRIPTION: The dimension. ('2D','3D') - The default is '2D'. Allows working in 2 or 3 dimensions. Switch to '3D' - - Returns - ------- - pos_list : TYPE - DESCRIPTION. - - """ - pos_list = [] - if work == '2D': - for i in range(np.shape(data)[0]): - for j in range(np.shape(data)[1]): - if data[i, j] != 0: - pos_list.append((i, j)) - - elif work == '3D': - for i in range(np.shape(data)[0]): - for j in range(np.shape(data)[1]): - for k in range(np.shape(data)[2]): - if data[i, j, k] != 0: - pos_list.append((i, j, k)) - return pos_list - -def AffineSwitch(affine, to='2D'): - if to == '2D' and affine.shape == (4, 4): - affine = np.delete(affine, 2, 0) - affine = np.delete(affine, 2, 1) - elif to == '3D' and affine.shape == (3, 3): - temp = np.zeros((4, 4)) - temp[:2, :2] = affine[:2, :2] - temp[:2, 3] = affine[:2, 2] - temp[2:, 2:] = np.eye(2) - affine = temp - if np.linalg.det(affine) == 0.0: - for i in range(affine.shape[0]): - if affine[i, i] == 0.0: - affine[i, i] = 1 - return affine - diff --git a/elikopy/registration.py b/elikopy/registration.py index d5a1e6f..3184cd6 100644 --- a/elikopy/registration.py +++ b/elikopy/registration.py @@ -18,9 +18,11 @@ import pickle +from elikopy.utils import get_patient_ref, update_status + def getTransform(static_volume_file, moving_volume_file, mask_file=None, onlyAffine=False, - diffeomorph=True, sanity_check=False, DWI=False): + diffeomorph=True, sanity_check=False, DWI=False, affine_map=None): ''' @@ -54,8 +56,9 @@ def getTransform(static_volume_file, moving_volume_file, mask_file=None, onlyAff if sanity_check or onlyAffine: - identity = np.eye(4) - affine_map = AffineMap(identity, + if affine_map is None: + affine_map = np.eye(4) + affine_map = AffineMap(affine_map, static.shape, static_grid2world, moving.shape, moving_grid2world) @@ -136,7 +139,7 @@ def getTransform(static_volume_file, moving_volume_file, mask_file=None, onlyAff return mapping -def applyTransform(file_path, mapping, mapping_2=None, mask_file=None, static_file='', output_path='', binary=False, +def applyTransform(file_path, mapping, mapping_2=None, mapping_3=None, mask_file=None, static_file='', output_path='', binary=False, inverse=False, mask_static=None, static_fa_file=''): ''' @@ -162,9 +165,10 @@ def applyTransform(file_path, mapping, mapping_2=None, mask_file=None, static_fi DESCRIPTION. ''' - + print("Applying transform to", file_path) moving = nib.load(file_path) moving_data = moving.get_fdata() + print("Moving data shape:", moving_data.shape) if mask_file is not None: mask, mask_affine = load_nifti(mask_file) @@ -181,19 +185,21 @@ def applyTransform(file_path, mapping, mapping_2=None, mask_file=None, static_fi else: transformed = mapping_2.transform(transformed) + if mapping_3 is not None: + if inverse: + transformed = mapping_3.transform_inverse(transformed) + else: + transformed = mapping_3.transform(transformed) + if binary: transformed[transformed > .5] = 1 transformed[transformed <= .5] = 0 if len(output_path) > 0: - print("OUT", output_path) static = nib.load(static_file) static_fa = nib.load(static_fa_file) - print(static_fa.header) - print(static.header) - if mask_static is not None: mask_static_data, mask_static_affine = load_nifti(mask_static) transformed = applymask(transformed, mask_static_data) @@ -205,7 +211,7 @@ def applyTransform(file_path, mapping, mapping_2=None, mask_file=None, static_fi return transformed -def applyTransformToAllMapsInFolder(input_folder, output_folder, mapping, mapping_2=None, static_file=None, +def applyTransformToAllMapsInFolder(input_folder, output_folder, mapping, mapping_2=None, mapping_3=None, static_file=None, mask_file=None, keywordList=[], inverse=False, mask_static=None, static_fa_file=''): ''' @@ -225,18 +231,106 @@ def applyTransformToAllMapsInFolder(input_folder, output_folder, mapping, mappin ''' for filename in os.listdir(input_folder): - if all(keyword in filename for keyword in keywordList): + + curr_filename = filename + valid = True + for keyword in keywordList: + if keyword in curr_filename: + curr_filename = curr_filename.replace(keyword, '') + else: + valid = False + break + + if valid and all(keyword in filename for keyword in keywordList): # print(filename) try: - applyTransform(input_folder + filename, mapping, mapping_2=mapping_2, static_file=static_file, + applyTransform(input_folder + filename, mapping, mapping_2=mapping_2, mapping_3=mapping_3, static_file=static_file, output_path=output_folder + filename, mask_file=mask_file, binary=False, inverse=inverse, mask_static=mask_static, static_fa_file=static_fa_file) except TypeError: continue + +def regToT1fromB0FSL(reg_path, T1_subject, DWI_B0_subject, mask_file, metrics_dic, folderpath, p, mapping_T1_to_T1MNI, T1_MNI, + mask_static, FA_MNI, longitudinal_transform=None): + if os.path.exists(reg_path + 'mapping_DWI_B0FSL_to_T1.p'): + with open(reg_path + 'mapping_DWI_B0FSL_to_T1.p', 'rb') as handle: + mapping_DWI_to_T1 = pickle.load(handle) + else: + if not (os.path.exists(reg_path)): + try: + os.makedirs(reg_path) + except OSError: + print("Creation of the directory %s failed" % reg_path) + + T1_subject_raw = os.path.join(folderpath, 'subjects', p, 'T1', p + '_T1.nii.gz') + T1_brain_subject = os.path.join(folderpath, 'subjects', p, 'T1', p + '_T1_brain.nii.gz') + b0fsl_reg_path = os.path.join(folderpath, 'subjects', p, 'reg', 'B0FSL_to_T1') + os.makedirs(b0fsl_reg_path, exist_ok=True) + cmd = f"epi_reg --epi={DWI_B0_subject} --t1={T1_subject_raw} --t1brain={T1_brain_subject} --out={b0fsl_reg_path}/{p}_B0toT1 " + cmd_part2 = f"c3d_affine_tool -ref {T1_subject_raw} -src {DWI_B0_subject} {b0fsl_reg_path}/{p}_B0toT1.mat -fsl2ras -oitk {b0fsl_reg_path}/{p}_B0toT1_ANTS.txt -o {b0fsl_reg_path}/{p}_B0toT1_ANTS.mat" + cmd += " && " + cmd_part2 + print("Running command (B0FSL): ", cmd) + import subprocess + process = subprocess.Popen(cmd, shell=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = process.communicate() + + affine_map_fslb0 = np.loadtxt(f"{b0fsl_reg_path}/{p}_B0toT1_ANTS.mat") + mapping_DWI_to_T1 = getTransform(T1_subject, DWI_B0_subject, mask_file=mask_file, onlyAffine=True, diffeomorph=False, sanity_check=False, DWI=False, affine_map=affine_map_fslb0) + with open(reg_path + 'mapping_DWI_B0FSL_to_T1.p', 'wb') as handle: + pickle.dump(mapping_DWI_to_T1, handle, protocol=pickle.HIGHEST_PROTOCOL) + + if not (os.path.exists(folderpath + "/subjects/" + p + "/masks/reg/")): + try: + os.makedirs(folderpath + "/subjects/" + p + "/masks/reg/") + except OSError: + print("Creation of the directory %s failed" % folderpath + "/subjects/" + p + "/masks/reg/") + + for maskType in ["brain_mask_dilated","brain_mask", "wm_mask_MSMT", "wm_mask_AP", "wm_mask_FSL_T1", + "wm_mask_Freesurfer_T1"]: + in_mask_path = folderpath + "/subjects/" + p + "/masks/" + p + "_" + maskType + ".nii.gz" + if os.path.exists(in_mask_path): + if longitudinal_transform is not None: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_B0FSL_" + maskType + "_longitudinal.nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=longitudinal_transform, mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) + else: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_B0FSL_" + maskType + ".nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) + + for key, value in metrics_dic.items(): + input_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '/' + if longitudinal_transform is not None: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_B0FSL_longitudinal/' + else: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_B0FSL/' + + if not (os.path.exists(output_folder)): + try: + os.makedirs(output_folder) + except OSError: + print("Creation of the directory %s failed" % output_folder) + + print("Start of applyTransformToAllMapsInFolder for metrics ", value, ":", key) + if longitudinal_transform is not None: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=longitudinal_transform, + mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, mask_file=mask_file, + keywordList=[p, key], inverse=False, mask_static=mask_static, + static_fa_file=FA_MNI) + else: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, + static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) + + + + def regToT1fromB0(reg_path, T1_subject, DWI_subject, mask_file, metrics_dic, folderpath, p, mapping_T1_to_T1MNI, T1_MNI, - mask_static, FA_MNI): + mask_static, FA_MNI, longitudinal_transform=None): if os.path.exists(reg_path + 'mapping_DWI_B0_to_T1.p'): with open(reg_path + 'mapping_DWI_B0_to_T1.p', 'rb') as handle: mapping_DWI_to_T1 = pickle.load(handle) @@ -260,15 +354,24 @@ def regToT1fromB0(reg_path, T1_subject, DWI_subject, mask_file, metrics_dic, fol for maskType in ["brain_mask_dilated","brain_mask", "wm_mask_MSMT", "wm_mask_AP", "wm_mask_FSL_T1", "wm_mask_Freesurfer_T1"]: in_mask_path = folderpath + "/subjects/" + p + "/masks/" + p + "_" + maskType + ".nii.gz" - reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_B0_" + maskType + ".nii.gz" if os.path.exists(in_mask_path): - applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, - output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, - mask_static=mask_static, static_fa_file=FA_MNI) + if longitudinal_transform is not None: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_B0_" + maskType + "_longitudinal.nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=longitudinal_transform, mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) + else: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_B0_" + maskType + ".nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) for key, value in metrics_dic.items(): input_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '/' - output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_B0/' + if longitudinal_transform is not None: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_B0_longitudinal/' + else: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_B0/' if not (os.path.exists(output_folder)): try: @@ -277,13 +380,19 @@ def regToT1fromB0(reg_path, T1_subject, DWI_subject, mask_file, metrics_dic, fol print("Creation of the directory %s failed" % output_folder) print("Start of applyTransformToAllMapsInFolder for metrics ", value, ":", key) - applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, - static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, - mask_static=mask_static, static_fa_file=FA_MNI) + if longitudinal_transform is not None: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=longitudinal_transform, + mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, mask_file=mask_file, + keywordList=[p, key], inverse=False, mask_static=mask_static, + static_fa_file=FA_MNI) + else: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, + static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) def regToT1fromWMFOD(reg_path, T1_subject, WM_FOD_subject, mask_file, metrics_dic, folderpath, p, mapping_T1_to_T1MNI, - T1_MNI, mask_static, FA_MNI): + T1_MNI, mask_static, FA_MNI, longitudinal_transform=None): if os.path.exists(reg_path + 'mapping_DWI_WMFOD_to_T1.p'): with open(reg_path + 'mapping_DWI_WMFOD_to_T1.p', 'rb') as handle: mapping_DWI_to_T1 = pickle.load(handle) @@ -307,16 +416,25 @@ def regToT1fromWMFOD(reg_path, T1_subject, WM_FOD_subject, mask_file, metrics_di for maskType in ["brain_mask_dilated","brain_mask", "wm_mask_MSMT", "wm_mask_AP", "wm_mask_FSL_T1", "wm_mask_Freesurfer_T1"]: in_mask_path = folderpath + "/subjects/" + p + "/masks/" + p + "_" + maskType + ".nii.gz" - reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_WMFOD_" + maskType + ".nii.gz" if os.path.exists(in_mask_path): - applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, - output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, - mask_static=mask_static, static_fa_file=FA_MNI) + if longitudinal_transform is not None: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_WMFOD_" + maskType + "_longitudinal.nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=longitudinal_transform, mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) + else: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_WMFOD_" + maskType + ".nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) for key, value in metrics_dic.items(): input_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '/' - output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_WMFOD/' + if longitudinal_transform is not None: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_WMFOD_longitudinal/' + else: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_WMFOD/' if not (os.path.exists(output_folder)): try: @@ -325,13 +443,19 @@ def regToT1fromWMFOD(reg_path, T1_subject, WM_FOD_subject, mask_file, metrics_di print("Creation of the directory %s failed" % output_folder) print("Start of applyTransformToAllMapsInFolder for metrics ", value, ":", key) - applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, - static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, - mask_static=mask_static, static_fa_file=FA_MNI) + if longitudinal_transform is not None: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=longitudinal_transform, + mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, mask_file=mask_file, + keywordList=[p, key], inverse=False, mask_static=mask_static, + static_fa_file=FA_MNI) + else: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, + static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) def regToT1fromAP(reg_path, T1_subject, AP_subject, mask_file, metrics_dic, folderpath, p, mapping_T1_to_T1MNI, T1_MNI, - mask_static, FA_MNI): + mask_static, FA_MNI, longitudinal_transform=None): if os.path.exists(reg_path + 'mapping_DWI_AP_to_T1.p'): with open(reg_path + 'mapping_DWI_AP_to_T1.p', 'rb') as handle: mapping_DWI_to_T1 = pickle.load(handle) @@ -355,16 +479,26 @@ def regToT1fromAP(reg_path, T1_subject, AP_subject, mask_file, metrics_dic, fold for maskType in ["brain_mask_dilated","brain_mask", "wm_mask_MSMT", "wm_mask_AP", "wm_mask_FSL_T1", "wm_mask_Freesurfer_T1"]: in_mask_path = folderpath + "/subjects/" + p + "/masks/" + p + "_" + maskType + ".nii.gz" - reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_AP_" + maskType + ".nii.gz" + if os.path.exists(in_mask_path): - applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, - output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, - mask_static=mask_static, static_fa_file=FA_MNI) + if longitudinal_transform is not None: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_AP_" + maskType + "_longitudinal.nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=longitudinal_transform, mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) + else: + reg_mask_path = folderpath + "/subjects/" + p + "/masks/reg/" + p + "_AP_" + maskType + ".nii.gz" + applyTransform(in_mask_path, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, static_file=T1_MNI, + output_path=reg_mask_path, mask_file=None, binary=False, inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) for key, value in metrics_dic.items(): input_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '/' - output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_AP/' + if longitudinal_transform is not None: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_AP_longitudinal/' + else: + output_folder = folderpath + '/subjects/' + p + '/dMRI/microstructure/' + value + '_CommonSpace_T1_AP/' if not (os.path.exists(output_folder)): try: @@ -373,12 +507,18 @@ def regToT1fromAP(reg_path, T1_subject, AP_subject, mask_file, metrics_dic, fold print("Creation of the directory %s failed" % output_folder) print("Start of applyTransformToAllMapsInFolder for metrics ", value, ":", key) - applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, - static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, - mask_static=mask_static, static_fa_file=FA_MNI) + if longitudinal_transform is not None: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=longitudinal_transform, + mapping_3=mapping_T1_to_T1MNI, static_file=T1_MNI, mask_file=mask_file, + keywordList=[p, key], inverse=False, mask_static=mask_static, + static_fa_file=FA_MNI) + else: + applyTransformToAllMapsInFolder(input_folder, output_folder, mapping_DWI_to_T1, mapping_2=mapping_T1_to_T1MNI, + static_file=T1_MNI, mask_file=mask_file, keywordList=[p, key], inverse=False, + mask_static=mask_static, static_fa_file=FA_MNI) -def regallDWIToT1wToT1wCommonSpace(folder_path, p, DWI_type="AP", maskType=None, T1_filepath=None, T1wCommonSpace_filepath="${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz", T1wCommonSpaceMask_filepath="${FSLDIR}/data/standard/MNI152_T1_1mm_brain_mask.nii.gz", metrics_dic={'_FA': 'dti', 'RD': 'dti', 'AD': 'dti', 'MD': 'dti'}): +def regallDWIToT1wToT1wCommonSpace(folder_path, p, DWI_type="B0FSL", maskType="brain_mask", T1_filepath=None, T1wCommonSpace_filepath="${FSLDIR}/data/standard/MNI152_T1_1mm_brain.nii.gz", T1wCommonSpaceMask_filepath="${FSLDIR}/data/standard/MNI152_T1_1mm_brain_mask.nii.gz", metrics_dic={'_FA': 'dti', 'RD': 'dti', 'AD': 'dti', 'MD': 'dti'}, longitudinal=False): preproc_folder = folder_path + '/subjects/' + p + '/dMRI/preproc/' T1_CommonSpace = os.path.expandvars(T1wCommonSpace_filepath) FA_MNI = os.path.expandvars('${FSLDIR}/data/standard/FSL_HCP1065_FA_1mm.nii.gz') @@ -386,7 +526,7 @@ def regallDWIToT1wToT1wCommonSpace(folder_path, p, DWI_type="AP", maskType=None, assert maskType in ["brain_mask_dilated","brain_mask", "wm_mask_MSMT", "wm_mask_AP", "wm_mask_FSL_T1", "wm_mask_Freesurfer_T1", None], "The mask parameter must be one of the following : brain_mask_dilated, brain_mask, wm_mask_MSMT, wm_mask_AP, wm_mask_FSL_T1, wm_mask_Freesurfer_T1, None" - assert DWI_type in ["AP", "WMFOD", "BO"], "The DWI_type parameter must be one of the following : AP, WMFOD, BO" + assert DWI_type in ["AP", "WMFOD", "B0", "B0FSL"], "The DWI_type parameter must be one of the following : AP, WMFOD, B0, B0FSL" mask_path = "" if maskType is not None and os.path.isfile(folder_path + '/subjects/' + p + "/masks/" + p + '_' + maskType + '.nii.gz'): @@ -409,6 +549,7 @@ def regallDWIToT1wToT1wCommonSpace(folder_path, p, DWI_type="AP", maskType=None, DWI_subject = preproc_folder + p + "_dmri_preproc.nii.gz" AP_subject = folder_path + '/subjects/' + p + '/masks/' + p + '_ap.nii.gz' WM_FOD_subject = folder_path + '/subjects/' + p + '/dMRI/ODF/MSMT-CSD/' + p + "_MSMT-CSD_WM_ODF.nii.gz" + DWI_B0_subject = os.path.join(folder_path, 'subjects', p, 'dMRI', 'preproc', p + '_dwiref.nii.gz') reg_path = folder_path + '/subjects/' + p + '/reg/' if not(os.path.exists(reg_path)): @@ -438,6 +579,48 @@ def regallDWIToT1wToT1wCommonSpace(folder_path, p, DWI_type="AP", maskType=None, output_path=folder_path + '/subjects/' + p + '/T1/' + p + '_T1_MNI_FS.nii.gz', binary=False, inverse=False, static_fa_file=T1_CommonSpace) + + if longitudinal != False and longitudinal>0: + print("[Longitudinal Registration] Start of getTransform for T1 to T1_ref") + p_ref = get_patient_ref(root=folder_path, patient=p, suffix_length=longitudinal) + T1_ref_subject = folder_path + '/subjects/' + p_ref + '/T1/' + p_ref + "_T1_brain.nii.gz" + + if os.path.exists(reg_path + 'mapping_T1w_to_T1wRef.p'): + with open(reg_path + 'mapping_T1w_to_T1wRef.p', 'rb') as handle: + mapping_T1w_to_T1wRef = pickle.load(handle) + else: + if not (os.path.exists(reg_path)): + try: + os.makedirs(reg_path) + except OSError: + print("Creation of the directory %s failed" % reg_path) + if p == p_ref: + print("[Longitudinal Registration] Skipping, longitudinal registration with the same subject") + mapping_T1w_to_T1wRef = getTransform(T1_ref_subject, T1_subject, mask_file=mask_file, + onlyAffine=True, diffeomorph=False, + sanity_check=False, DWI=False) + else: + mapping_T1w_to_T1wRef = getTransform(T1_ref_subject, T1_subject, mask_file=mask_file, + onlyAffine=False, diffeomorph=False, + sanity_check=False, DWI=False) + with open(reg_path + 'mapping_T1w_to_T1wRef.p', 'wb') as handle: + pickle.dump(mapping_T1w_to_T1wRef, handle, protocol=pickle.HIGHEST_PROTOCOL) + + applyTransform(T1_subject, mapping_T1w_to_T1wRef, mapping_2=None, mask_file=None, + static_file=T1_ref_subject, + output_path=folder_path + '/subjects/' + p + '/T1/' + p + '_space-T1Ref_type-brain_T1.nii.gz', binary=False, + inverse=False, static_fa_file=T1_ref_subject) + + reg_T1RefToCommonSpace_precomputed = folder_path + '/subjects/' + p_ref + '/reg/' + 'mapping_T1w_to_T1wCommonSpace.p' + if not os.path.exists(reg_T1RefToCommonSpace_precomputed): + raise ValueError("No mapping_T1w_to_T1wCommonSpace.p file found in the reg folder of the reference subject") + with open(reg_T1RefToCommonSpace_precomputed, 'rb') as handle: + mapping_T1w_to_T1wCommonSpace = pickle.load(handle) + else: + mapping_T1w_to_T1wRef = None + + + print("Start of getTransform for DWI to T1") if T1wCommonSpaceMask_filepath is not None: mask_static = os.path.expandvars(T1wCommonSpaceMask_filepath) @@ -445,16 +628,24 @@ def regallDWIToT1wToT1wCommonSpace(folder_path, p, DWI_type="AP", maskType=None, mask_static = None if DWI_type == "B0": - regToT1fromB0(reg_path, T1_subject, DWI_subject, mask_path, metrics_dic, folder_path, p, mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI) + regToT1fromB0(reg_path, T1_subject, DWI_subject, mask_path, metrics_dic, folder_path, p, mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI, longitudinal_transform=mapping_T1w_to_T1wRef) elif DWI_type == "WMFOD": - regToT1fromWMFOD(reg_path, T1_subject, WM_FOD_subject, mask_path, metrics_dic, folder_path, p, mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI) + regToT1fromWMFOD(reg_path, T1_subject, WM_FOD_subject, mask_path, metrics_dic, folder_path, p, mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI, longitudinal_transform=mapping_T1w_to_T1wRef) elif DWI_type == "AP": - regToT1fromAP(reg_path, T1_subject, AP_subject, mask_path, metrics_dic, folder_path, p, mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI) + regToT1fromAP(reg_path, T1_subject, AP_subject, mask_path, metrics_dic, folder_path, p, mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI, longitudinal_transform=mapping_T1w_to_T1wRef) + elif DWI_type == "B0FSL": + regToT1fromB0FSL(reg_path, T1_subject, DWI_B0_subject, mask_path, metrics_dic, folder_path, p, + mapping_T1w_to_T1wCommonSpace, T1_CommonSpace, mask_static, FA_MNI, + longitudinal_transform=mapping_T1w_to_T1wRef) + else: print("DWI_type not recognized") print("End of DWI registration") + longitudinal_txt = "" if not longitudinal else "_longitudinal" + update_status(folder_path, p, f'regallDWIToT1wToT1wCommonSpace_{DWI_type}{longitudinal_txt}') + def regallFAToMNI(folderpath, p, metrics_dic={'_FA': 'dti', 'RD': 'dti', 'AD': 'dti', 'MD': 'dti'}): diff --git a/elikopy/utils.py b/elikopy/utils.py index cbce818..2a54a63 100644 --- a/elikopy/utils.py +++ b/elikopy/utils.py @@ -47,6 +47,7 @@ def submit_job(job_info): slurm_cmd.append("--%s=%s" % (key, value)) if script: slurm_cmd.append(job_info["script"]) + slurm_cmd.append("--hint=multithread") print("[INFO] " + datetime.datetime.now().strftime("%d.%b %Y %H:%M:%S") + ": Generated slurm batch command: '%s'" % slurm_cmd) @@ -678,8 +679,7 @@ def synb0DisCo(folder_path, topuppath, patient_path, static_files_path=None, sta # Skull strip T1 - bet = "bet " + synb0path + "/T1.nii.gz " + synb0path + \ - "/T1_mask.nii.gz -R" # + " -f 0.4 -g -0.2" + bet = shutil.copyfile(os.path.join(folder_path,"subjects",patient_path,"T1",f"{patient_path}_T1_brain.nii.gz"), synb0path + "/T1_mask.nii.gz") # epi_reg distorted b0 to T1; wont be perfect since B0 is distorted @@ -692,7 +692,7 @@ def synb0DisCo(folder_path, topuppath, patient_path, static_files_path=None, sta "/b0.nii.gz " + synb0path + "/epi_reg_d.mat -fsl2ras -oitk " + \ synb0path + "/epi_reg_d_ANTS.txt" - # ANTs register T1 to atla + # ANTs register T1 to atlas antsRegistrationSyNQuick = "antsRegistrationSyNQuick.sh -d 3 -f " + static_files_path + \ "/atlases/mni_icbm152_t1_tal_nlin_asym_09c.nii.gz -m " + \ synb0path + "/T1.nii.gz -o " + synb0path + "/ANTS" @@ -1402,7 +1402,7 @@ def merge_all_reports(folder_path): :param folder_path: Path to the root folder of the study. """ - from PyPDF2 import PdfFileWriter, PdfFileReader + from pypdf import PdfWriter, PdfReader import json import os @@ -1410,17 +1410,17 @@ def merge_all_reports(folder_path): with open(dest_success, 'r') as f: patient_list = json.load(f) - writer = PdfFileWriter() + writer = PdfWriter() for p in patient_list: patient_path = os.path.splitext(p)[0] pdf_path = folder_path + '/subjects/' + patient_path + '/quality_control.pdf' if (os.path.exists(pdf_path)): - reader = PdfFileReader(pdf_path) - for i in range(reader.numPages): - page = reader.getPage(i) - page.compressContentStreams() - writer.addPage(page) + reader = PdfReader(pdf_path) + for i in range(len(reader.pages)): + page = reader.pages[i] + page.compress_content_streams() + writer.add_page(page) with open(folder_path + '/quality_control_all_tmp.pdf', 'wb') as f: writer.write(f) @@ -1444,7 +1444,7 @@ def merge_all_specific_reports(folder_path, merge_wm_report=False, merge_legacy_ :param merge_wm_report: Select wm report. :param merge_legacy_report: Select legacy report. """ - from PyPDF2 import PdfFileWriter, PdfFileReader + from pypdf import PdfWriter, PdfReader import json import os @@ -1453,10 +1453,10 @@ def merge_all_specific_reports(folder_path, merge_wm_report=False, merge_legacy_ patient_list = json.load(f) if merge_wm_report: - wm_writer = PdfFileWriter() + wm_writer = PdfWriter() if merge_legacy_report: - legacy_writer = PdfFileWriter() + legacy_writer = PdfWriter() for p in patient_list: patient_path = os.path.splitext(p)[0] @@ -1465,21 +1465,21 @@ def merge_all_specific_reports(folder_path, merge_wm_report=False, merge_legacy_ pdf_path = folder_path + '/subjects/' + patient_path + \ '/masks/quality_control/qc_report.pdf' if (os.path.exists(pdf_path)): - reader = PdfFileReader(pdf_path) - for i in range(reader.numPages): - page = reader.getPage(i) - page.compressContentStreams() - wm_writer.addPage(page) + reader = PdfReader(pdf_path) + for i in range(len(reader.pages)): + page = reader.pages[i] + page.compress_content_streams() + wm_writer.add_page(page) if merge_legacy_report: pdf_path = folder_path + '/subjects/' + patient_path + \ '/report/report_' + patient_path + '.pdf' if (os.path.exists(pdf_path)): - reader = PdfFileReader(pdf_path) - for i in range(reader.numPages): - page = reader.getPage(i) - page.compressContentStreams() - legacy_writer.addPage(page) + reader = PdfReader(pdf_path) + for i in range(len(reader.pages)): + page = reader.pages[i] + page.compress_content_streams() + legacy_writer.add_page(page) if merge_wm_report: with open(folder_path + '/wm_mask_qc_report_all.pdf', 'wb') as f: @@ -1540,7 +1540,7 @@ def peak_to_tensor(peaks, norm=None, pixdim=[2, 2, 2]): try: if norm is not None: - D = deltas_to_D(dx, dy, dz, vec_len=scaleFactor*norm[xyz]) + D = deltas_to_D(dx, dy, dz, vec_len=scaleFactor/norm[xyz]) else: D = deltas_to_D(dx, dy, dz, vec_len=scaleFactor) except np.linalg.LinAlgError: @@ -1671,6 +1671,7 @@ def mrtrix_fod_to_dipy(sh): return sh + def clean_mask(mask): from skimage.morphology import flood @@ -1694,14 +1695,50 @@ def clean_mask(mask): center = tuple([np.average(indices) for indices in np.where(mask == 1)]) center = tuple([int(point) for point in center]) - mask = flood(mask, center) + mask = flood(mask, center, connectivity=1) mask_cleaned = np.zeros((mask.shape)) mask_cleaned[mask] = 1 - + mask_cleaned = mask_cleaned[tuple(slice(1, dim - 1) for dim in mask_cleaned.shape)] return mask_cleaned + +def get_acquisition_view(affine) -> str: + ''' + Returns the acquisition view corresponding to the affine. + + Parameters + ---------- + :param affine: 2-D array of shape (4,4).Array containing the affine information. + :return str: String of the acquisition view, either 'axial', 'sagittal', 'coronal' or 'oblique'. + ''' + + affine = affine[:3, :3].copy() + + sum_whole_aff = np.sum(abs(affine)) + sum_diag_aff = np.sum(np.diag(abs(affine))) + sum_extra_diag_aff = sum_whole_aff - sum_diag_aff + + a = affine[0, 0] + b = affine[1, 1] + c = affine[2, 2] + d = affine[0, 2] + e = affine[1, 0] + f = affine[2, 1] + g = affine[0, 1] + h = affine[1, 2] + i = affine[2, 0] + + if (a != 0 and b != 0 and c != 0 and sum_extra_diag_aff == 0): + return "axial" + elif (d != 0 and e != 0 and f != 0 and sum_diag_aff == 0): + return "sagittal" + elif (g != 0 and h != 0 and i != 0 and sum_diag_aff == 0): + return "coronal" + else: + return "oblique" + def vbm(folder_path, grp1, grp2, randomise_numberofpermutation=5000, metrics_dic={'FA': 'dti_CommonSpace_T1_AP'}, core_count=1, maskType="brain_mask_dilated"): """ @@ -1914,3 +1951,67 @@ def vbm(folder_path, grp1, grp2, randomise_numberofpermutation=5000, metrics_dic vbm_log.close() + +def get_patient_ref(root: str, patient: str, suffix_length: int = 2): + """ + Retrieve the reference patient from the patient list based on input patient ID. + + Args: + root (str): Root directory containing the patient list JSON file. + patient (str): Patient ID. + suffix_length (int): Number of characters to trim from the patient ID for non-BIDS matching. + + Returns: + str: Reference patient ID. + """ + # Load patient list + patient_list_path = os.path.join(root, 'subjects', 'subj_list.json') + with open(patient_list_path, 'r') as f: + patient_list = json.load(f) + + # Determine if the input patient uses BIDS format + is_bids = "sub-" in patient and "ses-" in patient + + # Extract ses attributes for sorting + def extract_ses(patient_id): + ses_attr = next((attr for attr in patient_id.split("_") if "ses-" in attr), "") + return ses_attr + + if is_bids: + # Sort patient list by ses attribute + patient_list.sort(key=extract_ses) + patient_sub = next((patient_attr for patient_attr in patient.split("_") if "sub-" in patient_attr), None) + # Process BIDS patient + for p in patient_list: + curr_patient_sub = next((patient_attr for patient_attr in p.split("_") if "sub-" in patient_attr), None) + if patient_sub == curr_patient_sub: # Match exact BIDS ID + return p + raise ValueError(f"No reference found for BIDS patient {patient}.") + else: + # Sort the patient list + patient_list.sort() + + # Find the index of the patient in the list + patient_index = patient_list.index(patient) + # Process non-BIDS patient + trimmed_names = [p[:-suffix_length] if len(p) > suffix_length else p for p in patient_list] + + unique_names, unique_indices = np.unique(trimmed_names, return_index=True) + + relative_positions = unique_indices - patient_index + relative_positions[relative_positions > 0] -= len(patient_list) + ref_idx = np.max(relative_positions) + ref_index = (patient_index + ref_idx) % len(patient_list) + return patient_list[ref_index] + +def update_status(folder_path, p, step): + json_status_file = os.path.join(folder_path, "subjects", p, f"{p}_status.json") + if os.path.exists(json_status_file): + with open(json_status_file, "r") as f: + patient_status = json.load(f) + else: + patient_status = {} + patient_status[step] = True + + with open(json_status_file, "w") as f: + json.dump(patient_status, f, indent=6) diff --git a/folder_structure.txt b/folder_structure.txt index 8d7fc70..ff81ac7 100644 --- a/folder_structure.txt +++ b/folder_structure.txt @@ -1,5 +1,6 @@ -CASE/ -CONTROL/ +data_1/ +data_2/ +data_/ T1/ EXPORT_**/ TBSS/ tbss_outputs diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..0e7badb --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,52 @@ +[tool.poetry] +name = "elikopy" +version = "0.4.4" +description = "A set of tools for analysing dMRI" +authors = [ + "qdessain ", + "msimon ", + "ndelinte " +] +license = "AGPL-3.0" # Update this to match the exact SPDX identifier if different +readme = "README.md" +homepage = "https://github.com/Hyedryn/elikopy" +repository = "https://github.com/Hyedryn/elikopy" +documentation = "https://github.com/Hyedryn/elikopy" # Update if you have specific documentation URL + +classifiers = [ + "License :: OSI Approved :: GNU Affero General Public License v3", + "Operating System :: OS Independent", + 'Programming Language :: Python :: 3 :: Only', + "Intended Audience :: Science/Research", +] + + +[tool.poetry.dependencies] +python = ">=3.8,<4.0" +numpy = "^1.24" +dipy = "^1.7.0" +torch = "^2.0.1" +pypdf = "^3.4" +fpdf = "^1.7.2" +matplotlib = "^3.7" +future = ">=0.15" +unravel-python = "^1.4" +scikit-image = ">=0.20" +scipy = "^1.10" +pandas = "^1.3" +torchvision = ">=0.15" +scikit-learn = "^1.3" +nibabel = "^5.0.0" +numba = ">=0.57.0" +surfa = ">=0.5.0" +microstructure-fingerprinting-rensonnetg = { git = "https://github.com/rensonnetg/microstructure_fingerprinting.git" } +dmipy = { git = "https://github.com/Hyedryn/dmipy.git" } + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.poetry.dev-dependencies] +sphinx = "^3.0" # Use the latest version compatible with your project +sphinx-autodoc-typehints = "^1.11" # Optional, for better type hint support +sphinx-rtd-theme = ">=0.5" # Optional, for nice docs rendering diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index d29813c..0000000 --- a/requirements.txt +++ /dev/null @@ -1,23 +0,0 @@ -dipy> -dmipy -scipy -numpy -cvxpy -nibabel -numba -pandas -pathos -boto -sphinx_rtd_theme -sphinx -matplotlib -torch -torchvision -future -fpdf -scikit-image -PyPDF2 -lxml -TIME-python -scikit-learn -scikit-image diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index a96f15b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,4 +0,0 @@ -[metadata] -name = elikopy -long_description = file: README.md -long_description_content_type = text/markdown diff --git a/setup.py b/setup.py deleted file mode 100644 index 6b5377c..0000000 --- a/setup.py +++ /dev/null @@ -1,46 +0,0 @@ -import setuptools - -import io - -try: - with io.open("README.md", "r", encoding="utf-8") as fh: - long_description = fh.read() -except (IOError, OSError) as e: - print(e.errno) - -setuptools.setup( - name="elikopy", - packages = ['elikopy'], - license='agpl-3.0', - version="0.2", - author="qdessain, msimon", - author_email="quentin.dessain@student.uclouvain.be, mathieu.simon@student.uclouvain.be", - description="A set of tools for analysing dMRI", - url="https://github.com/Hyedryn/elikopy", - download_url = 'https://github.com/Hyedryn/elikopy/archive/refs/tags/v0.2.2.tar.gz', - #packages=setuptools.find_packages(), - classifiers=[ - "Programming Language :: Python :: 3", - "License :: OSI Approved :: GNU Affero General Public License v3", - "Operating System :: OS Independent", - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - ], - python_requires='>=3.8', - install_requires=[ - 'numpy', - 'dipy', - 'torch', - #'dmipy', - 'lxml', - 'PyPDF2', - 'fpdf', - 'matplotlib', - 'cvxpy', - 'future', - 'TIME-python', - 'scikit-image' - ], -)