From 025c9e88da8b55a057d97794f0d4199bd92cd13e Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 29 Oct 2025 09:40:05 +1100 Subject: [PATCH 01/16] Major refactoring to match future MRtrix 3.1.0 - Divide the code across multiple source files. - Move source files to new location according to MRtrix 3.1.0 filesystem arrangement. - Build against development branch of MRtrix3 (future version 3.1.0). This includes incorporation of multiple upstream features, such as metadata handling fixes and dwicat dealing with subtle differences in voxel grids. - Make use of dwidenoise2. - Perform gradient non-linearity distortion correction if the requisite deformation field is made available. --- Dockerfile | 348 ++++--- mrtrix3_connectome/__init__.py | 10 + mrtrix3_connectome/anat/__init__.py | 0 mrtrix3_connectome/anat/get.py | 343 +++++++ mrtrix3_connectome/anat/shared.py | 25 + mrtrix3_connectome/citations.py | 294 ++++++ mrtrix3_connectome/execute.py | 119 +++ mrtrix3_connectome/group/__init__.py | 0 mrtrix3_connectome/group/run.py | 522 ++++++++++ mrtrix3_connectome/mrtrix3_connectome.py | 8 + mrtrix3_connectome/participant/__init__.py | 0 mrtrix3_connectome/participant/run.py | 908 +++++++++++++++++ mrtrix3_connectome/participant/shared.py | 401 ++++++++ mrtrix3_connectome/preproc/__init__.py | 0 mrtrix3_connectome/preproc/run.py | 1048 ++++++++++++++++++++ mrtrix3_connectome/preproc/shared.py | 97 ++ mrtrix3_connectome/sessions.py | 113 +++ mrtrix3_connectome/usage.py | 287 ++++++ version | 2 +- 19 files changed, 4398 insertions(+), 127 deletions(-) create mode 100644 mrtrix3_connectome/__init__.py create mode 100644 mrtrix3_connectome/anat/__init__.py create mode 100644 mrtrix3_connectome/anat/get.py create mode 100644 mrtrix3_connectome/anat/shared.py create mode 100644 mrtrix3_connectome/citations.py create mode 100644 mrtrix3_connectome/execute.py create mode 100644 mrtrix3_connectome/group/__init__.py create mode 100644 mrtrix3_connectome/group/run.py create mode 100644 mrtrix3_connectome/mrtrix3_connectome.py create mode 100644 mrtrix3_connectome/participant/__init__.py create mode 100644 mrtrix3_connectome/participant/run.py create mode 100644 mrtrix3_connectome/participant/shared.py create mode 100644 mrtrix3_connectome/preproc/__init__.py create mode 100644 mrtrix3_connectome/preproc/run.py create mode 100644 mrtrix3_connectome/preproc/shared.py create mode 100644 mrtrix3_connectome/sessions.py create mode 100644 mrtrix3_connectome/usage.py diff --git a/Dockerfile b/Dockerfile index 70a19d1..3bf47c9 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,127 +1,239 @@ -FROM ubuntu:18.04 -MAINTAINER Robert E. Smith +ARG MAKE_JOBS="1" +ARG DEBIAN_FRONTEND="noninteractive" -# Core system capabilities required -RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y \ - bc \ +FROM ubuntu:24.04 AS base + +# Core system capabilities that should be present in all images +RUN apt-get update && apt-get install -y \ build-essential \ curl \ - dc \ - git \ - libegl1-mesa-dev \ - libopenblas-dev \ - nano \ - perl-modules-5.26 \ python3 \ tar \ - tcsh \ - tzdata \ unzip \ wget -# PPA for newer version of nodejs, which is required for bids-validator -RUN curl -sL https://deb.nodesource.com/setup_12.x -o nodesource_setup.sh && \ - bash nodesource_setup.sh && \ - rm -f nodesource_setup.sh && \ - apt-get install -y nodejs - -# NeuroDebian setup -COPY neurodebian.gpg /neurodebian.gpg -RUN wget -qO- http://neuro.debian.net/lists/bionic.au.full | \ - tee /etc/apt/sources.list.d/neurodebian.sources.list && \ - apt-key add /neurodebian.gpg && \ - apt-get update - -# Additional dependencies for MRtrix3 compilation -RUN apt-get update && apt-get install -y \ - libeigen3-dev \ - libfftw3-dev \ - libpng-dev \ - libtiff5-dev \ - zlib1g-dev +FROM base AS aal-downloader +WORKDIR /opt/aal +RUN wget https://data.kg.ebrains.eu/zip?container=https://data-proxy.ebrains.eu/api/v1/buckets/p4791e-ext-d000035_AAL1Atlas_pub?prefix=Release2018_SPM12 \ + -O aal1_for_SPM12.zip && \ + unzip aal1_for_SPM12.zip && \ + rm -f aal1_for_SPM12.zip && \ + unzip aal_for_SPM12.zip && \ + rm -f aal_for_SPM12.zip && \ + mv aal_for_SPM12/* . && \ + rmdir aal_for_SPM12 && \ + rm -rf __MACOSX && \ + wget --no-check-certificate -qO- http://www.gin.cnrs.fr/wp-content/uploads/aal2_for_SPM12.tar.gz | \ + tar zx --strip-components=1 -# Neuroimaging software / data dependencies -RUN wget -qO- https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.1.1/freesurfer-linux-centos8_x86_64-7.1.1.tar.gz | \ - tar zx -C /opt \ - --exclude='freesurfer/trctrain' \ - --exclude='freesurfer/subjects/fsaverage_sym' \ - --exclude='freesurfer/subjects/fsaverage3' \ - --exclude='freesurfer/subjects/fsaverage4' \ - --exclude='freesurfer/subjects/fsaverage6' \ - --exclude='freesurfer/subjects/cvs_avg35' \ - --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ - --exclude='freesurfer/subjects/bert' \ - --exclude='freesurfer/subjects/V1_average' \ - --exclude='freesurfer/average/mult-comp-cor' \ - --exclude='freesurfer/lib/cuda' \ - --exclude='freesurfer/lib/qt' +FROM base AS adhd200-downloader +WORKDIR /opt +RUN wget -qO- http://www.nitrc.org/frs/download.php/5906/ADHD200_parcellations.tar.gz | \ + tar zx && \ + rm -f ADHD200_parcellations.tar.gz + +FROM base AS ants-installer +WORKDIR /opt/ants +RUN wget -q https://github.com/ANTsX/ANTs/releases/download/v2.6.2/ants-2.6.2-ubuntu18.04-X64-gcc.zip && \ + unzip ants-2.6.2-ubuntu18.04-X64-gcc.zip + +FROM base AS brainnetome-downloader +WORKDIR /opt/brainnetome +RUN \ + # freesurfer/average/rh.BN_Atlas.gcs + #( wget -q "http://ddl.escience.cn/f/IiyU?func=download&rid=8135438" -O rh.BN_Atlas.gcs || \ + ( curl https://pan.cstcloud.cn/unode/stor/downloadByUrl?downloadId=1.eyJidWNrZXQiOiJkZWZhdWx0IiwibGVuIjozODIzMDY1Nywic2l6ZSI6MzgyMzA2NTcsInBvcyI6MCwibmFtZSI6InJoLkJOX0F0bGFzLmdjcyIsImN0aW1lIjoxNzYxNjE3OTkyLCJrZXkiOiJzTEhJaHcxYkZYZ0RTeGRld3I0ZWtxRjJuQ0VBQUFBQ1IxcUIiLCJhZ2UiOjg2NDAwfQ.3670708009 \ + -o rh.BN_Atlas.gcs || \ + wget -q "https://osf.io/e6zkg/download" -O rh.BN_Atlas.gcs) && \ + # freesurfer/average/lh.BN_Atlas.gcs + #( wget -q "http://ddl.escience.cn/f/IiyP?func=download&rid=8135433" -O lh.BN_Atlas.gcs || \ + ( curl https://pan.cstcloud.cn/unode/stor/downloadByUrl?downloadId=1.eyJidWNrZXQiOiJkZWZhdWx0IiwibGVuIjozNjQ5NzM5OSwic2l6ZSI6MzY0OTczOTksInBvcyI6MCwibmFtZSI6ImxoLkJOX0F0bGFzLmdjcyIsImN0aW1lIjoxNzYxNjE3OTEwLCJrZXkiOiJsMzFISVJNZ3FScXZlREg4S2RPVjRqUXNVRnNBQUFBQ0xPZjMiLCJhZ2UiOjg2NDAwfQ.4292748252 \ + -o lh.BN_Atlas.gcs || \ + wget -q "https://osf.io/af9ut/download" -O lh.BN_Atlas.gcs ) && \ + # freesurfer/average/BN_Atlas_subcortex.gca + #( wget -q "http://ddl.escience.cn/f/PC7Q?func=download&rid=9882718" -O BN_Atlas_subcortex.gca || \ + ( curl https://pan.cstcloud.cn/unode/stor/downloadByUrl?downloadId=1.eyJidWNrZXQiOiJkZWZhdWx0IiwibGVuIjozNzAxODQwOCwic2l6ZSI6MzcwMTg0MDgsInBvcyI6MCwibmFtZSI6IkJOX0F0bGFzX3N1YmNvcnRleC5nY2EiLCJjdGltZSI6MTc2MTYxODA2Niwia2V5IjoicEhiTXdwbjBKRUd4cmJ2Z0NScDFTdzNsS2RrQUFBQUNOTnNvIiwiYWdlIjo4NjQwMH0.1891919902 \ + -o BN_Atlas_subcortex.gca || \ + wget -q "https://osf.io/k2cd8/download" -O BN_Atlas_subcortex.gca ) && \ + # brainnetome/BN_Atlas_246_LUT.txt + #( wget -q "http://ddl.escience.cn/f/PC7O?func=download&rid=9882716" -O BN_Atlas_246_LUT.txt || \ + ( curl https://pan.cstcloud.cn/unode/stor/downloadByUrl?downloadId=1.eyJidWNrZXQiOiJkZWZhdWx0IiwibGVuIjo1ODAyLCJzaXplIjo1ODAyLCJwb3MiOjAsIm5hbWUiOiJCTl9BdGxhc18yNDZfTFVULnR4dCIsImN0aW1lIjoxNzYxNjE4MDkwLCJrZXkiOiJ1bGV4dG1Kd0hJeGNjdVFDZWwzdTZnUVJXUThBQUJhcSIsImFnZSI6ODY0MDAsInBhcnRPbmUiOnsic2l6ZSI6NTgwMiwiZm4iOiJoa194MmRaLVFYZy0wLTU4MDIiLCJjcmMzMiI6ODk4MzQwOTA0LCJiaWQiOjEsImNpZCI6MX19.3302844208 \ + -o BN_Atlas_246_LUT.txt || \ + wget -q "https://osf.io/eb7pm/download" -O BN_Atlas_246_LUT.txt ) && \ + # brainnetome/BNA_MPM_thr25_1.25mm.nii.gz + #( wget -q "http://ddl.escience.cn/f/Bvhg?func=download&rid=6516020" -O BNA_MPM_thr25_1.25mm.nii.gz || \ + ( curl https://pan.cstcloud.cn/unode/stor/downloadByUrl?downloadId=1.eyJidWNrZXQiOiJkZWZhdWx0IiwibGVuIjoxNzEyMzMsInNpemUiOjE3MTIzMywicG9zIjowLCJuYW1lIjoiQk5BX01QTV90aHIyNV8xLjI1bW0ubmlpLmd6IiwiY3RpbWUiOjE3NjE2MTk3NzcsImtleSI6IlFrbzBFajRIRTEyS1U4N0tBOXdVZlVMX1RIY0FBcHpoIiwiYWdlIjo4NjQwMCwicGFydE9uZSI6eyJzaXplIjoxNzEyMzMsImZuIjoibDlsX3ZVVkRRMDAtMC0xNzEyMzMiLCJjcmMzMiI6MjY1NjI1NTM0NywiYmlkIjoxLCJjaWQiOjF9fQ.2692886444 \ + -o BNA_MPM_thr25_1.25mm.nii.gz || \ + wget -q "https://osf.io/dbqep/download" -O BNA_MPM_thr25_1.25mm.nii.gz ) + # cp /opt/brainnetome/BN_Atlas_246_LUT.txt /opt/freesurfer/ + +FROM base AS freesurfer-installer +RUN wget -qO- https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.4.1/freesurfer-linux-centos8_x86_64-7.4.1.tar.gz | \ + tar zx -C /opt \ + --exclude='freesurfer/trctrain' \ + --exclude='freesurfer/subjects/fsaverage_sym' \ + --exclude='freesurfer/subjects/fsaverage3' \ + --exclude='freesurfer/subjects/fsaverage4' \ + --exclude='freesurfer/subjects/fsaverage6' \ + --exclude='freesurfer/subjects/cvs_avg35' \ + --exclude='freesurfer/subjects/cvs_avg35_inMNI152' \ + --exclude='freesurfer/subjects/bert' \ + --exclude='freesurfer/subjects/V1_average' \ + --exclude='freesurfer/average/mult-comp-cor' \ + --exclude='freesurfer/lib/cuda' \ + --exclude='freesurfer/lib/qt' +RUN wget -q "https://s3-eu-west-1.amazonaws.com/pfigshare-u-files/5528816/lh.HCPMMP1.annot" \ + -O /opt/freesurfer/subjects/fsaverage/label/lh.HCPMMP1.annot && \ + wget -q "https://s3-eu-west-1.amazonaws.com/pfigshare-u-files/5528819/rh.HCPMMP1.annot" \ + -O /opt/freesurfer/subjects/fsaverage/label/rh.HCPMMP1.annot RUN echo "cHJpbnRmICJyb2JlcnQuc21pdGhAZmxvcmV5LmVkdS5hdVxuMjg1NjdcbiAqQ3FLLjFwTXY4ZE5rXG4gRlNvbGRZRXRDUFZqNlxuIiA+IC9vcHQvZnJlZXN1cmZlci9saWNlbnNlLnR4dAo=" | base64 -d | sh -RUN apt-get install -y ants=2.2.0-1ubuntu1 -# FSL installer appears to now be ready for use with version 6 -# eddy is also now included in FSL6 + +FROM base AS fsl-installer +# Installer script needs to not reside in destination installation location +WORKDIR / RUN wget -q http://fsl.fmrib.ox.ac.uk/fsldownloads/fslinstaller.py && \ - chmod 775 fslinstaller.py && \ - /fslinstaller.py -d /opt/fsl -V 6.0.6 && \ - rm -f /fslinstaller.py -#RUN which immv || ( echo "FSLPython not properly configured; re-running" && rm -rf /opt/fsl/fslpython && /opt/fsl/etc/fslconf/fslpython_install.sh -f /opt/fsl || ( cat /tmp/fslpython*/fslpython_miniconda_installer.log && exit 1 ) ) + chmod 775 fslinstaller.py && \ + python3 /fslinstaller.py -d /opt/fsl -V 6.0.7.18 + +FROM base AS mni512-downloader +WORKDIR /opt +RUN wget -q https://github.com/AlistairPerry/CCA/raw/master/parcellations/512inMNI.nii + +#FROM base AS mrtrix-30x-builder +#WORKDIR /opt/mrtrix3 +#RUN apt-get install -y \ +# build-essential \ +# git \ +# libeigen3-dev \ +# libfftw3-dev \ +# zlib1g-dev +## Commitish is 3.0.5 plus relevant changes for dwicat and -export_grad_fsl hotfix +#RUN git clone https://github.com/MRtrix3/mrtrix3.git . && \ +# git checkout 906730011b5e21f1449cc7d60ec145375de07479 && \ +# python3 configure -nogui && \ +# python3 build -persistent -nopaginate && \ +# git describe --tags > /mrtrix3_version && \ +# rm -rf .git/ cmd/ core/ src/ testing/ tmp/ && \ +# # Some extra lookup tables for parcellations in use +# wget -q "https://osf.io/v8n5g/download" -O share/mrtrix3/labelconvert/Yeo2011_7N_split.txt && \ +# wget -q "https://osf.io/ug2ef/download" -O share/mrtrix3/labelconvert/Yeo2011_17N_split.txt +# +#FROM base AS mrtrix-31x-builder +#WORKDIR /opt/dev +#RUN apt-get install -y \ +# build-essential \ +# cmake \ +# git \ +# libfftw3-dev \ +# ninja-build \ +# pkg-config \ +# zlib1g-dev +## Committish is tip of MRtrix3 Issue#3029 as at 2025-06-19 +## TODO Is it possible to do a limited build followed by a cmake --install? +## TODO This branch doesn't yet have targets per Python command; +## just remove all unwanted Python commands +#RUN git clone https://github.com/MRtrix3/mrtrix3.git . && \ +# git checkout 3bb025a0f8b9edbed187510fa81c9cce422311d3 && \ +# cmake -Bbuild -GNinja --preset=release -DMRTRIX_BUILD_GUI=OFF && \ +# cmake --build build --target dwidenoise2 && \ +# #cmake --build build --target dwibiasnormmask && \ +# #cmake --build build --target dwicat +# rm -f build/bin/5ttgen build/bin/dwi2response build/bin/dwibiascorrect build/bin/dwinormalise build/bin/population_template build/bin/dwifslpreproc build/bin/dwigradcheck build/bin/dwishellmath build/bin/for_each build/bin/labelsgmfirst build/bin/mask2glass build/bin/mrtrix_cleanup build/bin/responsemean + +FROM base AS mrtrix-builder +WORKDIR /opt/mrtrix3 +RUN apt-get install -y \ + build-essential \ + cmake \ + git \ + libfftw3-dev \ + ninja-build \ + pkg-config \ + zlib1g-dev +# Committish is tip of MRtrix3 #3029 as at 2025-10-21 +RUN git clone https://github.com/MRtrix3/mrtrix3.git . && \ + git checkout 26965d57b374a733ac0c583d3b92bad17923128a +# Main tip as at 2025-10-21 +RUN git clone https://github.com/Lestropie/dwidenoise2.git dwidenoise2 && \ + cd dwidenoise2 && \ + git checkout 37c9b70cf7e1c67ac846311ddd0df925424797f5 && \ + cd ../ && \ + cp -r dwidenoise2/cpp . +# Since external project compilation may not yet be working on 3.1.0, +# just dump the code contents of this repository into the appropriate location, +# and the build process of MRtrix3 itself should deal with the issue +# TODO Could also make use of cmake --install +COPY mrtrix3_connectome/ /opt/mrtrix3/python/mrtrix3/commands/mrtrix3_connectome +RUN cmake -Bbuild -GNinja --preset=release -DMRTRIX_BUILD_GUI=OFF && \ + cmake --build build + +FROM base AS robex-installer +WORKDIR /opt/robex RUN wget -qO- "https://www.nitrc.org/frs/download.php/5994/ROBEXv12.linux64.tar.gz//?i_agree=1&download_now=1" | \ - tar zx -C /opt -RUN npm install -gq bids-validator@1.5.3 + tar zx -# apt cleanup to recover as much space as possible -RUN apt-get remove -y libegl1-mesa-dev && \ +FROM base AS yeo2011-downloader +WORKDIR /opt/Yeo2011 +RUN wget -qO- "https://github.com/ThomasYeoLab/CBIG/archive/v0.11.1-Wu2017_RegistrationFusion.tar.gz" | \ + tar zx && \ + mkdir -p freesurfer/subjects/fsaverage5/label && \ + cp CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/fsaverage5/label/*h.Yeo2011_*Networks_N1000.split_components.annot freesurfer/subjects/fsaverage5/label/ && \ + cp CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/project_to_individual/Yeo2011_*networks_Split_Components_LUT.txt freesurfer/ && \ + cp CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/MNI152/Yeo2011_*Networks_N1000.split_components.FSL_MNI152_*mm.nii.gz . && \ + cp CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/MNI152/*Networks_ColorLUT_freeview.txt . && \ + rm -rf CBIG-0.11.1-Wu2017_RegistrationFusion + +FROM base AS final +# Install runtime system dependencies +RUN apt-get -qq update && \ + apt-get install -yq --no-install-recommends \ + bc \ + dc \ + libfftw3-single3 \ + libfftw3-double3 \ + nano \ + nodejs \ + npm \ + python3 \ + tcsh && \ + # apt cleanup to recover as much space as possible apt-get autoremove -y && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* -# Download additional data for neuroimaging software, e.g. templates / atlases -RUN wget -q https://object.cscs.ch/v1/AUTH_4791e0a3b3de43e2840fe46d9dc2b334/ext-d000035_AAL1Atlas_pub/Release2018_SPM12/aal_for_SPM12.zip && \ - unzip aal_for_SPM12.zip -d /opt && \ - rm -f aal_for_SPM12.zip && \ - wget --no-check-certificate -qO- http://www.gin.cnrs.fr/wp-content/uploads/aal2_for_SPM12.tar.gz | \ - tar zx -C /opt -#RUN wget -q http://www.nitrc.org/frs/download.php/4499/sri24_anatomy_nifti.zip -O sri24_anatomy_nifti.zip && \ -# unzip -qq -o sri24_anatomy_nifti.zip -d /opt/ && \ -# rm -f sri24_anatomy_nifti.zip -#RUN wget -q http://www.nitrc.org/frs/download.php/4502/sri24_anatomy_unstripped_nifti.zip -O sri24_anatomy_unstripped_nifti.zip && \ -# unzip -qq -o sri24_anatomy_unstripped_nifti.zip -d /opt/ && \ -# rm -f sri24_anatomy_unstripped_nifti.zip -#RUN wget -q http://www.nitrc.org/frs/download.php/4508/sri24_labels_nifti.zip -O sri24_labels_nifti.zip && \ -# unzip -qq -o sri24_labels_nifti.zip -d /opt/ && \ -# rm -f sri24_labels_nifti.zip -RUN wget -q https://github.com/AlistairPerry/CCA/raw/master/parcellations/512inMNI.nii -O /opt/512inMNI.nii -#RUN wget -q https://ndownloader.figshare.com/files/3133832 -O oasis.zip && \ -# unzip -qq oasis.zip -d /opt/ && \ -# rm -f oasis.zip -RUN wget -qO- http://www.nitrc.org/frs/download.php/5906/ADHD200_parcellations.tar.gz | \ - tar zx -C /opt -RUN wget -q "https://s3-eu-west-1.amazonaws.com/pfigshare-u-files/5528816/lh.HCPMMP1.annot" \ - -O /opt/freesurfer/subjects/fsaverage/label/lh.HCPMMP1.annot && \ - wget -q "https://s3-eu-west-1.amazonaws.com/pfigshare-u-files/5528819/rh.HCPMMP1.annot" \ - -O /opt/freesurfer/subjects/fsaverage/label/rh.HCPMMP1.annot -RUN mkdir /opt/brainnetome && \ - ( wget -q "http://ddl.escience.cn/f/IiyU?func=download&rid=8135438" -O /opt/freesurfer/average/rh.BN_Atlas.gcs || \ - wget -q "https://osf.io/e6zkg/download" -O /opt/freesurfer/average/rh.BN_Atlas.gcs ) && \ - ( wget -q "http://ddl.escience.cn/f/IiyP?func=download&rid=8135433" -O /opt/freesurfer/average/lh.BN_Atlas.gcs || \ - wget -q "https://osf.io/af9ut/download" -O /opt/freesurfer/average/lh.BN_Atlas.gcs ) && \ - ( wget -q "http://ddl.escience.cn/f/PC7Q?func=download&rid=9882718" -O /opt/freesurfer/average/BN_Atlas_subcortex.gca || \ - wget -q "https://osf.io/k2cd8/download" -O /opt/freesurfer/average/BN_Atlas_subcortex.gca ) && \ - ( wget -q "http://ddl.escience.cn/f/PC7O?func=download&rid=9882716" -O /opt/brainnetome/BN_Atlas_246_LUT.txt || \ - wget -q "https://osf.io/eb7pm/download" -O /opt/brainnetome/BN_Atlas_246_LUT.txt ) && \ - ( wget -q "http://ddl.escience.cn/f/Bvhg?func=download&rid=6516020" -O /opt/brainnetome/BNA_MPM_thr25_1.25mm.nii.gz || \ - wget -q "https://osf.io/dbqep/download" -O /opt/brainnetome/BNA_MPM_thr25_1.25mm.nii.gz ) && \ - cp /opt/brainnetome/BN_Atlas_246_LUT.txt /opt/freesurfer/ -RUN wget -qO- "https://github.com/ThomasYeoLab/CBIG/archive/v0.11.1-Wu2017_RegistrationFusion.tar.gz" | \ - tar zx -C /opt && \ - cp /opt/CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/fsaverage5/label/*h.Yeo2011_*Networks_N1000.split_components.annot /opt/freesurfer/subjects/fsaverage5/label/ && \ - cp /opt/CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/project_to_individual/Yeo2011_*networks_Split_Components_LUT.txt /opt/freesurfer/ && \ - mkdir /opt/Yeo2011 && \ - cp /opt/CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/MNI152/Yeo2011_*Networks_N1000.split_components.FSL_MNI152_*mm.nii.gz /opt/Yeo2011/ && \ - cp /opt/CBIG-0.11.1-Wu2017_RegistrationFusion/stable_projects/brain_parcellation/Yeo2011_fcMRI_clustering/1000subjects_reference/Yeo_JNeurophysiol11_SplitLabels/MNI152/*Networks_ColorLUT_freeview.txt /opt/Yeo2011/ && \ - rm -rf /opt/CBIG-0.11.1-Wu2017_RegistrationFusion +COPY --from=aal-downloader /opt/aal /opt/aal +COPY --from=adhd200-downloader /opt/ADHD200_parcellate_200.nii.gz /opt/ADHD200_parcellate_200.nii.gz +COPY --from=adhd200-downloader /opt/ADHD200_parcellate_400.nii.gz /opt/ADHD200_parcellate_400.nii.gz +COPY --from=ants-installer /opt/ants/ants-2.6.2 /opt/ants +COPY --from=brainnetome-downloader /opt/brainnetome/rh.BN_Atlas.gcs /opt/freesurfer/average/rh.BN_Atlas.gcs +COPY --from=brainnetome-downloader /opt/brainnetome/lh.BN_Atlas.gcs /opt/freesurfer/average/lh.BN_Atlas.gcs +COPY --from=brainnetome-downloader /opt/brainnetome/BN_Atlas_subcortex.gca /opt/freesurfer/average/BN_Atlas_subcortex.gca +COPY --from=brainnetome-downloader /opt/brainnetome/BN_Atlas_246_LUT.txt /opt/brainnetome/BN_Atlas_246_LUT.txt +COPY --from=brainnetome-downloader /opt/brainnetome/BN_Atlas_246_LUT.txt /opt/freesurfer/BN_Atlas_246_LUT.txt +COPY --from=brainnetome-downloader /opt/brainnetome/BNA_MPM_thr25_1.25mm.nii.gz /opt/brainnetome/BNA_MPM_thr25_1.25mm.nii.gz +COPY --from=freesurfer-installer /opt/freesurfer /opt/freesurfer +COPY --from=fsl-installer /opt/fsl /opt/fsl +COPY --from=mni512-downloader /opt/512inMNI.nii /opt/512inMNI.nii +#COPY --from=mrtrix-30x-builder /opt/mrtrix3 /opt/mrtrix3 +#COPY --from=mrtrix-31x-builder /opt/dev/build /opt/dev +COPY --from=mrtrix-builder /opt/mrtrix3 /opt/mrtrix3 +COPY --from=robex-installer /opt/robex /opt/robex +COPY --from=yeo2011-downloader /opt/Yeo2011 /opt/Yeo2011 + +RUN mv /opt/Yeo2011/freesurfer/subjects/fsaverage5/label/* /opt/freesurfer/subjects/fsaverage5/label && \ + mv /opt/Yeo2011/freesurfer/*.* /opt/freesurfer/ && \ + rm -rf /opt/Yeo2011/freesurfer + +# PPA for newer version of nodejs, which is required for bids-validator +#RUN curl -sL https://deb.nodesource.com/setup_12.x -o nodesource_setup.sh && \ +# bash nodesource_setup.sh && \ +# rm -f nodesource_setup.sh && \ +# apt-get install -y nodejs && \ +# npm install -gq bids-validator@1.5.3 +RUN npm install -gq bids-validator@1.15.0 # Setup envvars -ENV ANTSPATH=/usr/lib/ants \ +ENV ANTSPATH=/opt/ants \ FREESURFER_HOME=/opt/freesurfer \ FMRI_ANALYSIS_DIR=/opt/freesurfer/fsfast \ FSF_OUTPUT_FORMAT=nii.gz \ @@ -140,29 +252,13 @@ ENV ANTSPATH=/usr/lib/ants \ FSLMULTIFILEQUIT=TRUE \ FSLTCLSH=/opt/fsl/bin/fsltclsh \ FSLWISH=/opt/fsl/bin/fslwish \ - PATH=/opt/mrtrix3/bin:/usr/lib/ants:/opt/freesurfer/bin:/opt/freesurfer/mni/bin:/opt/fsl/bin:/opt/ROBEX:$PATH \ + PATH=/opt/mrtrix3/build/bin:/opt/ants/bin:/opt/freesurfer/bin:/opt/freesurfer/mni/bin:/opt/fsl/bin:/opt/ROBEX:$PATH \ PYTHONPATH=/opt/mrtrix3/lib -# MRtrix3 setup -# Commitish is 3.0.5 plus relevant changes for dwicat and -export_grad_fsl hotfix -RUN git clone https://github.com/MRtrix3/mrtrix3.git /opt/mrtrix3 && \ - cd /opt/mrtrix3 && \ - git checkout 906730011b5e21f1449cc7d60ec145375de07479 && \ - python3 configure -nogui && \ - python3 build -persistent -nopaginate && \ - git describe --tags > /mrtrix3_version && \ - rm -rf .git/ cmd/ core/ src/ testing/ tmp/ && \ - cd / - -# Acquire extra MRtrix3 data -RUN wget -q "https://osf.io/v8n5g/download" -O /opt/mrtrix3/share/mrtrix3/labelconvert/Yeo2011_7N_split.txt && \ - wget -q "https://osf.io/ug2ef/download" -O /opt/mrtrix3/share/mrtrix3/labelconvert/Yeo2011_17N_split.txt - -# Acquire script to be executed -COPY mrtrix3_connectome.py /mrtrix3_connectome.py -RUN chmod 775 /mrtrix3_connectome.py +## Acquire script to be executed +#COPY mrtrix3_connectome.py /mrtrix3_connectome.py +#RUN chmod 775 /mrtrix3_connectome.py COPY version /version -ENTRYPOINT ["/mrtrix3_connectome.py"] - +ENTRYPOINT ["/opt/mrtrix3/build/bin/mrtrix3_connectome"] diff --git a/mrtrix3_connectome/__init__.py b/mrtrix3_connectome/__init__.py new file mode 100644 index 0000000..c35a84b --- /dev/null +++ b/mrtrix3_connectome/__init__.py @@ -0,0 +1,10 @@ +import os +IS_CONTAINER = os.path.exists('/version') \ + and os.path.exists('/mrtrix3_version') +OPTION_PREFIX = '--' if IS_CONTAINER else '-' + +# pylint: disable=consider-using-with +__version__ = 'BIDS-App \'MRtrix3_connectome\'' \ + f'version {open("/version", "r", encoding="utf-8").read()}' \ + if IS_CONTAINER \ + else 'BIDS-App \'MRtrix3_connectome\' standalone' diff --git a/mrtrix3_connectome/anat/__init__.py b/mrtrix3_connectome/anat/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mrtrix3_connectome/anat/get.py b/mrtrix3_connectome/anat/get.py new file mode 100644 index 0000000..fc08fdb --- /dev/null +++ b/mrtrix3_connectome/anat/get.py @@ -0,0 +1,343 @@ +import json +import pathlib +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import fsl +from mrtrix3 import run + +# Regardless of the source of T1-weighted image information, +# scratch directory will contain at completion of this function: +# - Either: +# - T1w.mif +# or +# - T1w_premasked.mif +# , depending on software used +# (full unmasked T1-weighted image data may not be available) +# - T1w_mask.mif +# TODO Proposing modifications where this is not necessarily guaranteed to be the case; +# DWI pre-processing could proceed omitting the inter-modal registration step, +# though a big fat warning would need to be issued +# that the participant-level analysis is not immediately applicable +def get_t1w_preproc_images(import_path, + session, + t1w_shared, + t1w_preproc): + + session_label = '_'.join(session) + preproc_image_path = None + preproc_image_is_masked = None + preproc_mask_path = None + raw_image_path = None + + if t1w_preproc: + + # Multiple possibilities for how such data may have been provided: + # - Raw path to the image itself + # - Path to anat/ directory + # - Path to subject directory within BIDS Derivatives dataset + # - Path to BIDS Derivatives dataset + if t1w_preproc.is_file(): + preproc_image_path = t1w_preproc + else: + expected_image_basename = session_label + '*_T1w.nii*' + for candidate in [ + pathlib.Path(''), + pathlib.Path('anat'), + pathlib.Path(*session, 'anat') + ]: + + glob_result = list((t1w_preproc / candidate).glob(expected_image_basename)) + if glob_result: + if len(glob_result) == 1: + preproc_image_path = glob_result[0] + break + glob_refined_result = \ + [item for item in glob_result \ + if not '_space-' in item] + if len(glob_refined_result) == 1: + preproc_image_path = glob_refined_result[0] + break + raise MRtrixError( + 'Unable to unambiguously select ' + 'pre-processed T1-weighted image ' + 'due to multiple candidates ' + f'in location "{t1w_preproc / candidate}": ' + f'{";".join(glob_result)}') + + if preproc_image_path is None: + raise MRtrixError( + 'No pre-processed T1w image found ' + f'from specified path "{t1w_preproc}" ' + f'for session {session_label}') + + else: + + # Look inside of import_path to see if there is a pre-processed + # T1w image already there + glob_result = list(pathlib.Path(import_path, *session, 'anat') + .glob(f'{session_label}*_desc-preproc*_T1w.nii*')) + if glob_result: + if len(glob_result) == 1: + preproc_image_path = glob_result[0] + else: + raise MRtrixError( + 'Multiple pre-processed T1w images found ' + f'in import directory "{import_path}": ' + f'{";".join(glob_result)}') + + # Same checks regardless of whether the existing pre-processed image + # comes from the output directory or a user-specified location + if preproc_image_path: + + if '_desc-preproc' not in preproc_image_path.name: + raise MRtrixError( + f'Selected T1-weighted image "{preproc_image_path}" ' + 'not flagged as pre-processed') + + # Check to see if there's a JSON file along with the T1-weighted + # image; if they is, parse it to find out whether or not the + # pre-processed image has been brain-extracted + expected_json_path = pathlib.Path(preproc_image_path) + while expected_json_path.suffix: + expected_json_path = expected_json_path.with_suffix('') + expected_json_path = expected_json_path.with_suffix('.json') + try: + with open(expected_json_path, 'r', encoding='utf-8') as t1_json_file: + t1_json_data = json.load(t1_json_file) + preproc_image_is_masked = t1_json_data.get('SkullStripped', None) + except IOError: + pass + if preproc_image_is_masked is None: + # Try to assess whether or not skull-stripping has occurred + # based on the prevalence of NaNs or zero values + # - Obtain mask that contains: + # - All voxels with non-finite value + # and: + # - All voxels with a value of zero + # - Feed to mrstats, extracting the mean + # - If this is > 25% of the image, it's skull-stripped + frac_voxels_outside_mask = \ + float(run.command(f'mrcalc {preproc_image_path} 0 -eq 1 ' + f'{preproc_image_path} -finite -sub -add - | ' + ' mrstats - -output mean').stdout) + preproc_image_is_masked = frac_voxels_outside_mask > 0.25 + app.warn('No sidecar information for pre-processed ' + f'T1-weighted image "{preproc_image_path}" regarding skull-stripping; ' + 'image has been inferred' + f' to {"be" if preproc_image_is_masked else "not be"} ' + 'pre-masked based on image data ' + f'({int(round(100.0 * frac_voxels_outside_mask))}% of voxels' + ' contain no data)') + + # Copy pre-procesed T1-weighted image into scratch directory + run.command(['mrconvert', + preproc_image_path, + 'T1w_premasked.mif' if preproc_image_is_masked else 'T1w.mif']) + + # If we have been provided with a pre-processed T1-weighted image + # (regardless of where it has come from), check to see if there + # is a corresponding mask image + preproc_mask_path = preproc_image_path.parent / \ + preproc_image_path.name \ + .replace('_desc-preproc', '_desc-brain') \ + .replace('_T1w.nii', '_mask.nii') + if preproc_mask_path.is_file(): + run.command(['mrconvert', + preproc_mask_path, + 'T1w_mask.mif', + '-datatype', 'bit']) + elif preproc_image_is_masked: + run.command(f'mrcalc {preproc_image_path} 0 -gt T1w_mask.mif ' + '-datatype bit') + # No pre-existing mask image, but we also don't want to + # run our own brain extraction + preproc_mask_path = '' + else: + app.console('No brain mask image found alongside ' + f'pre-processed T1-weighted image "{preproc_image_path}"; ' + 'will generate one manually') + preproc_mask_path = None + + else: + + # Check input path for raw un-processed T1w image + glob_result = list(pathlib.Path(import_path, *session, 'anat') + .glob(f'{session_label}*_T1w.nii*')) + if not glob_result: + raise MRtrixError('No raw or pre-processed T1-weighted images ' + f'could be found in input directory "{import_path}" ' + f'for session {session_label}') + if len(glob_result) > 1: + raise MRtrixError('Multiple raw T1w images found in ' + f'input directory "{import_path}" ' + f'for session {session_label}: ' + f'{";".join(glob_result)}') + raw_image_path = glob_result[0] + + # Do we need to do any pre-processing of our own at all? + if preproc_mask_path is None: + + app.console('Performing requisite processing of T1-weighted data') + + # A pre-processed T1-weighted image is present, + # it's just the mask that is absent + if preproc_image_path: + + if t1w_shared.robex_cmd: + app.console(f'Using ROBEX for brain extraction for session {session_label}, ' + 'operating on existing pre-processed T1-weighted image') + elif t1w_shared.fsl_anat_path: + app.console(f'Using fsl_anat for brain extraction for session {session_label} ' + '(due to ROBEX not being installed), ' + 'operating on existing pre-processed T1-weighted image') + else: + raise MRtrixError(f'Unable to continue processing for session {session_label}: ' + 'no pre-processed T1-weighted image mask available / provided, ' + 'and no appropriate brain masking software installed') + + run.command(['mrconvert', + preproc_image_path, + 'T1w.nii', + '-strides', '+1,+2,+3' if t1w_shared.robex_cmd else '-1,+2,+3']) + + if t1w_shared.robex_cmd: + run.command(f'{t1w_shared.robex_cmd} T1w.nii T1w_brain.nii T1w_mask.nii') + run.command(['mrconvert', + 'T1w_mask.nii', + 'T1w_mask.mif', + '-datatype', 'bit']) + elif t1w_shared.fsl_anat_cmd: + run.command(f'{t1w_shared.fsl_anat_cmd} -i T1w.nii' + ' --noseg --nosubcortseg --nobias') + run.command(['mrconvert', + fsl.find_image(pathlib.PurePath('T1w.anat', 'T1_brain_mask')), + 'T1w_mask.mif', + '-datatype', 'bit']) + else: + assert False + + # No pre-processed T1-weighted image available: + # do everything based on the raw T1-weighted image + else: + + # TODO If we're doing pre-processing of the T1w from scratch, + # check to see if GDC has already been applied to the raw data, + # and if not, whether we have the right warp field available; + # if we do, then we'll perform GDC before anything else + gdc_already_applied = None + gdc_to_be_applied = None + raw_json_path = pathlib.Path(raw_image_path) + while raw_json_path.suffix: + raw_json_path = raw_json_path.with_suffix('') + raw_json_path = raw_json_path.with_suffix('.json') + if raw_json_path.is_file(): + with open(raw_json_path, 'r', encoding='utf-8') as json_file: + json_data = json.load(json_file) + if any(item in json_data for item in ('ImageType', 'ImageTypeText')): + image_type = json_data['ImageType'] \ + if 'ImageType' in json_data \ + else 'ImageTypeText' + gdc_already_applied = any(item in image_type for item in ('DIS2D', 'DIS3D')) + if not gdc_already_applied: + # Only now in the scenario where we would like to be applying GDC ourselves + # will we see if we can find the corresponding warp field + gdc_to_be_applied = 'ManufacturersModelName' in json_data \ + and json_data['ManufacturersModelName'] in t1w_shared.gdc_images + + else: + app.warn('Unable to establish whether GDC has already been applied ' + f'to raw T1-weighted image "{raw_image_path}" ' + 'from contents of sidecar JSON file; ' + 'will not apply this correction') + gdc_to_be_applied = False + + else: + app.warn('Unable to establish whether GDC has already been applied ' + f'to raw T1-weighted image "{raw_image_path}" ' + 'due to absence of sidecar JSON file; ' + 'will not apply this correction') + gdc_to_be_applied = False + + if t1w_shared.robex_cmd and t1w_shared.n4_cmd: + app.console('No pre-processed T1-weighted image ' + f'found for session {session_label}; ' + 'will use ROBEX and N4 for ' + 'iterative brain extraction and bias field ' + 'correction from raw T1-weighted image input') + elif t1w_shared.fsl_anat_cmd: + app.console('No pre-processed T1-weighted image ' + f'found for session {session_label}; ' + 'will use fsl_anat for brain extraction and ' + 'bias field correction from raw T1-weighted ' + 'image input' + f'{"" if t1w_shared.robex_cmd else " (ROBEX not installed)"}' + f'{"" if t1w_shared.n4_cmd else " (N4 not installed)"}') + else: + # TODO Make this acceptable in preproc-level analysis with adequate warning + return + #raise MRtrixError('Cannot complete processing for session ' + # + session_label + # + ': no pre-processed T1-weighted image ' + # + 'available, and software tools for ' + # + 'processing raw T1-weighted image ' + # + 'not installed') + + if gdc_to_be_applied: + run.command(['mrtransform', raw_image_path, 'T1w_raw.nii', + '-template', raw_image_path, + '-warp', t1w_shared.gdc_images[json_data['ManufacturersModelName']], + '-interp', 'linear', + '-modulate', 'jac', + '-strides', '+1,+2,+3']) + else: + run.command(['mrconvert', raw_image_path, 'T1w_raw.nii', + '-strides', '+1,+2,+3']) + + if t1w_shared.robex_cmd and t1w_shared.n4_cmd: + + # Do a semi-iterative approach here: + # Get an initial brain mask, use that mask to estimate a + # bias field, then re-compute the brain mask + # TODO Consider making this fully iterative, just like the + # approach in preproc with dwi2mask and mtnormalise + run.command([t1w_shared.robex_cmd, + 'T1w_raw.nii', + 'T1w_initial_brain.nii', + 'T1w_initial_mask.nii']) + app.cleanup('T1w_initial_brain.nii') + run.command([t1w_shared.n4_cmd, + '-i', 'T1w_raw.nii', + '-w', 'T1w_initial_mask.nii', + '-o', 'T1w_biascorr.nii']) + app.cleanup('T1w_initial_mask.nii') + run.command([t1w_shared.robex_cmd, + 'T1w_biascorr.nii', + 'T1w_biascorr_brain.nii', + 'T1w_biascorr_brain_mask.nii']) + app.cleanup('T1w_biascorr_brain.nii') + run.command(['mrconvert', + 'T1w_biascorr.nii', + 'T1w.mif']) + app.cleanup('T1w_biascorr.nii') + run.command(['mrconvert', + 'T1w_biascorr_brain_mask.nii', + 'T1w_mask.mif', + '-datatype', 'bit']) + app.cleanup('T1w_biascorr_brain_mask.nii') + + elif t1w_shared.fsl_anat_cmd: + + run.command(f'{t1w_shared.fsl_anat_cmd} -i T1w_raw.nii --noseg --nosubcortseg') + run.command(['mrconvert', + fsl.find_image(pathlib.PurePath('T1w_raw.anat', + 'T1_biascorr')), + 'T1w_premasked.mif']) + run.command(['mrconvert', + fsl.find_image(pathlib.PurePath('T1w_raw.anat', + 'T1_biascorr_brain_mask')), + 'T1w_mask.mif', + '-datatype', 'bit']) + app.cleanup('T1w_raw.anat') + + else: + assert False diff --git a/mrtrix3_connectome/anat/shared.py b/mrtrix3_connectome/anat/shared.py new file mode 100644 index 0000000..93de3e7 --- /dev/null +++ b/mrtrix3_connectome/anat/shared.py @@ -0,0 +1,25 @@ +import shutil +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import fsl + +class Shared(object): #pylint: disable=useless-object-inheritance + def __init__(self, gdc_images): + self.gdc_images = gdc_images + # TODO If synthstrip is available, + # prioritise using that for brain extraction + try: + self.fsl_anat_cmd = shutil.which(fsl.exe_name('fsl_anat')) + except MRtrixError: + self.fsl_anat_cmd = None + robex_cmd = shutil.which('ROBEX') + self.robex_cmd = robex_cmd if robex_cmd else shutil.which('runROBEX.sh') + self.n4_cmd = shutil.which('N4BiasFieldCorrection') + + if not self.fsl_anat_cmd and not self.robex_cmd: + app.warn('No commands for T1w image processing found; ' + 'command can only proceed if either ' + 'existing pre-processed T1w image data can be found, ' + 'or if doing preproc-level analysis ' + 'where registration of DWI to T1-weighted image data ' + 'will be excluded from processing') diff --git a/mrtrix3_connectome/citations.py b/mrtrix3_connectome/citations.py new file mode 100644 index 0000000..e69a2ac --- /dev/null +++ b/mrtrix3_connectome/citations.py @@ -0,0 +1,294 @@ +def add_citations(cmdline, option_prefix='-'): + cmdline.add_citation( + 'Smith, R. E.; Connelly, A. ' + 'MRtrix3_connectome: A BIDS Application for quantitative structural ' + 'connectome construction. ' + 'In Proc OHBM, 2019, W610', + is_external=False) + cmdline.add_citation( + 'Andersson, J. L.; Skare, S. & Ashburner, J. ' + 'How to correct susceptibility distortions in spin-echo echo-planar ' + 'images: application to diffusion tensor imaging. ' + 'NeuroImage, 2003, 20, 870-888', + condition='If performing preproc-level analysis', + is_external=True) + cmdline.add_citation( + 'Andersson, J. L. R.; Jenkinson, M. & Smith, S. ' + 'Non-linear registration, aka spatial normalisation. ' + 'FMRIB technical report, 2010, TR07JA2', + condition='If performing participant-level analysis ' + + f'using {option_prefix}template_reg fsl', + is_external=True) + cmdline.add_citation( + 'Andersson, J. L. & Sotiropoulos, S. N. ' + 'An integrated approach to correction for off-resonance effects and ' + 'subject movement in diffusion MR imaging. ' + 'NeuroImage, 2015, 125, 1063-1078', + condition='If performing preproc-level analysis', + is_external=True) + cmdline.add_citation( + 'Andersson, J. L. R.; Graham, M. S.; Zsoldos, E. ' + '& Sotiropoulos, S. N. ' + 'Incorporating outlier detection and replacement into a ' + 'non-parametric framework for movement and distortion correction of ' + 'diffusion MR images. ' + 'NeuroImage, 2016, 141, 556-572', + condition='If performing preproc-level analysis', + is_external=True) + cmdline.add_citation( + 'Andersson, J. L. R.; Graham, M. S.; Drobnjak, I.; Zhang, H.; ' + 'Filippini, N. & Bastiani, M. ' + 'Towards a comprehensive framework for movement and distortion ' + 'correction of diffusion MR images: Within volume movement. ' + 'NeuroImage, 2017, 152, 450-466', + condition='If performing preproc-level analysis ' + 'utilising slice-to-volume motion correction', + is_external=True) + cmdline.add_citation( + 'Andersson, J. L. R.; Graham,, M. S.; Drobnjak, I.; Zhang, H. & ' + 'Campbell, J. ' + 'Susceptibility-induced distortion that varies due to motion: ' + 'Correction in diffusion MR without acquiring additional data. ' + 'NeuroImage, 2018, 171, 277-295', + condition='If performing preproc-level analysis', + is_external=True) + cmdline.add_citation( + 'Avants, B. B.; Epstein, C. L.; Grossman, M.; Gee, J. C. ' + 'Symmetric diffeomorphic image registration with cross-correlation: ' + 'Evaluating automated labeling of elderly and neurodegenerative brain. ' + 'Medical Image Analysis, 2008, 12, 26-41', + condition='If performing participant-level analysis, ' + f'using {option_prefix}parcellation [ aal, aal2, ' + 'brainnetome246mni, craddock200, craddock400, perry512, ' + 'yeo7mni or yeo17mni ], ' + f'and not using {option_prefix}template_reg fsl', + is_external=True) + cmdline.add_citation( + 'Bhushan, C.; Haldar, J. P.; Choi, S.; Joshi, A. A.; Shattuck, D. W. & ' + 'Leahy, R. M. ' + 'Co-registration and distortion correction of diffusion and anatomical ' + 'images based on inverse contrast normalization. ' + 'NeuroImage, 2015, 115, 269-280', + condition='If performing preproc-level analysis', + is_external=True) + cmdline.add_citation( + 'Craddock, R. C.; James, G. A.; Holtzheimer, P. E.; Hu, X. P.; ' + 'Mayberg, H. S. ' + 'A whole brain fMRI atlas generated via spatially constrained ' + 'spectral clustering. ' + 'Human Brain Mapping, 2012, 33(8), 1914-1928', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation ' + '[ craddock200 or craddock400 ]', + is_external=True) + cmdline.add_citation( + 'Dale, A. M.; Fischl, B. & Sereno, M. I. ' + 'Cortical Surface-Based Analysis: ' + 'I. Segmentation and Surface Reconstruction. ' + 'NeuroImage, 1999, 9, 179-194', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation ' + '[ brainnetome246fs, desikan, destrieux, hcpmmp1, ' + 'yeo7fs or yeo17fs ]', + is_external=True) + cmdline.add_citation( + 'Desikan, R. S.; Segonne, F.; Fischl, B.; Quinn, B. T.; ' + 'Dickerson, B. C.; Blacker, D.; Buckner, R. L.; Dale, A. M.; ' + 'Maguire, R. P.; Hyman, B. T.; Albert, M. S. & Killiany, R. J. ' + 'An automated labeling system for subdividing the human cerebral ' + 'cortex on MRI scans into gyral based regions of interest. ' + 'NeuroImage, 2006, 31, 968-980', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation desikan', + is_external=True) + cmdline.add_citation( + 'Destrieux, C.; Fischl, B.; Dale, A. & Halgren, E. ' + 'Automatic parcellation of human cortical gyri and sulci using ' + 'standard anatomical nomenclature. ' + 'NeuroImage, 2010, 53, 1-15', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation destrieux', + is_external=True) + cmdline.add_citation( + 'Fan, L.; Li, H.; Zhuo, J.; Zhang, Y.; Wang, J.; Chen, L.; Yang, Z.; ' + 'Chu, C.; Xie, S.; Laird, A.R.; Fox, P.T.; Eickhoff, S.B.; Yu, C.; ' + 'Jiang, T. ' + 'The Human Brainnetome Atlas: ' + 'A New Brain Atlas Based on Connectional Architecture. ' + 'Cerebral Cortex, 2016, 26 (8), 3508-3526', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation ' + '[ brainnetome246fs brainnetome246mni ]', + is_external=True) + cmdline.add_citation( + 'Glasser, M. F.; Coalson, T. S.; Robinson, E. C.; Hacker, C. D.; ' + 'Harwell, J.; Yacoub, E.; Ugurbil, K.; Andersson, J.; Beckmann, C. F.; ' + 'Jenkinson, M.; Smith, S. M. & Van Essen, D. C. ' + 'A multi-modal parcellation of human cerebral cortex. ' + 'Nature, 2016, 536, 171-178', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation hcpmmp1', + is_external=True) + cmdline.add_citation( + 'Iglesias, J. E.; Liu, C. Y.; Thompson, P. M. & Tu, Z. ' + 'Robust Brain Extraction Across Datasets and ' + 'Comparison With Publicly Available Methods. ' + 'IEEE Transactions on Medical Imaging, 2011, 30, 1617-1634', + condition='If performing either preproc-level or participant-level ' + 'analysis and ROBEX is used for T1w brain extraction', + is_external=True) + cmdline.add_citation( + 'Jeurissen, B; Tournier, J-D; Dhollander, T; Connelly, A & Sijbers, J. ' + 'Multi-tissue constrained spherical deconvolution for improved ' + 'analysis of multi-shell diffusion MRI data. ' + 'NeuroImage, 2014, 103, 411-426', + condition='If performing either preproc-level ' + 'or participant-level analysis', + is_external=False) + cmdline.add_citation( + 'Kellner, E.; Dhital, B.; Kiselev, V. G.; Reisert, M. ' + 'Gibbs-ringing artifact removal based on local subvoxel-shifts. ' + 'Magnetic Resonance in Medicine, 2006, 76(5), 1574-1581', + condition='If performing preproc-level analysis', + is_external=True) + cmdline.add_citation( + 'Patenaude, B.; Smith, S. M.; Kennedy, D. N. & Jenkinson, M. ' + 'A Bayesian model of shape and appearance for ' + 'subcortical brain segmentation. ' + 'NeuroImage, 2011, 56, 907-922', + condition='If performing participant-level analysis and ' + f'not using {option_prefix}parcellation ' + '[ brainnetome246fs or brainnetome246mni ]', + is_external=True) + cmdline.add_citation( + 'Perry, A.; Wen, W.; Kochan, N. A.; Thalamuthu, A.; Sachdev, P. S.; ' + 'Breakspear, M. ' + 'The independent influences of age and education on functional brain ' + 'networks and cognition in healthy older adults. ' + 'Human Brain Mapping, 2017, 38(10), 5094-5114', + condition='If performing participant-level analysis and ' + f'using {option_prefix}parcellation perry512', + is_external=True) + cmdline.add_citation( + 'Smith, S. M. ' + 'Fast robust automated brain extraction. ' + 'Human Brain Mapping, 2002, 17, 143-155', + condition='If performing either preproc-level or participant-level ' + 'analysis and FSL is used for T1w brain extraction', + is_external=True) + cmdline.add_citation( + 'Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. ' + 'Anatomically-constrained tractography: ' + 'Improved diffusion MRI streamlines tractography through ' + 'effective use of anatomical information. ' + 'NeuroImage, 2012, 62, 1924-1938', + condition='If performing participant-level analysis', + is_external=False) + cmdline.add_citation( + 'Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. ' + 'The effects of SIFT on the reproducibility and biological ' + 'accuracy of the structural connectome. ' + 'NeuroImage, 2015a, 104, 253-265', + condition='If performing participant-level analysis', + is_external=False) + cmdline.add_citation( + 'Smith, R. E.; Tournier, J.-D.; Calamante, F. & Connelly, A. ' + 'SIFT2: Enabling dense quantitative assessment of brain white matter ' + 'connectivity using streamlines tractography. ' + 'NeuroImage, 2015b, 119, 338-351', + condition='If performing participant-level analysis', + is_external=False) + cmdline.add_citation( + 'Smith, R. E.; Raffelt, D.; Tournier, J.-D.; Connelly, A. ' + 'Quantitative streamlines tractography: Methods and inter-subject ' + 'normalisation. ' + 'OHBM Aperture, 2022, doi: 10.52294/ApertureNeuro.2022.2.NEOD9565', + condition='If performing group-level analysis', + is_external=False) + cmdline.add_citation( + 'Tournier, J.-D.; Calamante, F., Gadian, D.G. & Connelly, A. ' + 'Direct estimation of the fiber orientation density function from ' + 'diffusion-weighted MRI data using spherical deconvolution. ' + 'NeuroImage, 2004, 23, 1176-1185', + condition='If performing either preproc-level or participant-level ' + 'analysis', + is_external=False) + cmdline.add_citation( + 'Tournier, J.-D.; Calamante, F. & Connelly, A. ' + 'Improved probabilistic streamlines tractography by 2nd order ' + 'integration over fibre orientation distributions. ' + 'Proceedings of the International Society for Magnetic ' + 'Resonance in Medicine, 2010, 1670', + condition='If performing participant-level analysis ' + f'and {option_prefix}streamlines 0 is not set', + is_external=False) + cmdline.add_citation( + 'Tustison, N.; Avants, B.; Cook, P.; Zheng, Y.; Egan, A.; ' + 'Yushkevich, P. & Gee, J. ' + 'N4ITK: Improved N3 Bias Correction. ' + 'IEEE Transactions on Medical Imaging, 2010, 29, 1310-1320', + condition='If performing either preproc-level or participant-level ' + 'analysis, and N4 is used for either DWI or T1w bias field correction', + is_external=True) + cmdline.add_citation( + 'Tustison, N.; Avants, B. ' + 'Explicit B-spline regularization in diffeomorphic image ' + 'registration. ' + 'Frontiers in Neuroinformatics, 2013, 7, 39', + condition='If performing participant-level analysis, ' + f'using {option_prefix}parcellation [ aal, aal2, ' + 'brainnetome246mni, craddock200, craddock400, perry512, ' + 'yeo7mni or yeo17mni ], ' + f'and not using {option_prefix}template_reg fsl', + is_external=True) + cmdline.add_citation( + 'Tzourio-Mazoyer, N.; Landeau, B.; Papathanassiou, D.; Crivello, F.; ' + 'Etard, O.; Delcroix, N.; Mazoyer, B. & Joliot, M. ' + 'Automated Anatomical Labeling of activations in SPM using a ' + 'Macroscopic Anatomical Parcellation of the MNI MRI ' + 'single-subject brain. ' + 'NeuroImage, 15(1), 273-289', + condition='If performing participant-level analysis ' + f'and using {option_prefix}parcellation [ aal or aal2 ]', + is_external=True) + cmdline.add_citation( + 'Veraart, J.; Novikov, D. S.; Christiaens, D.; Ades-aron, B.; ' + 'Sijbers, J. & Fieremans, E. ' + 'Denoising of diffusion MRI using random matrix theory. ' + 'NeuroImage, 2016, 142, 394-406', + condition='If performing preproc-level analysis', + is_external=False) + cmdline.add_citation( + 'Yeh, C.H.; Smith, R.E.; Liang, X.; Calamante, F.; Connelly, A. ' + 'Correction for diffusion MRI fibre tracking biases: ' + 'The consequences for structural connectomic metrics. ' + 'Neuroimage, 2016, 142, 150-162', + condition='If utilising connectome outputs from ' + 'participant-level analysis', + is_external=False) + cmdline.add_citation( + 'Yeo, B.T.; Krienen, F.M.; Sepulcre, J.; Sabuncu, M.R.; Lashkari, D.; ' + 'Hollinshead, M.; Roffman, J.L.; Smoller, J.W.; Zollei, L.; ' + 'Polimeni, J.R.; Fischl, B.; Liu, H. & Buckner, R.L. ' + 'The organization of the human cerebral cortex estimated by ' + 'intrinsic functional connectivity. ' + 'J Neurophysiol, 2011, 106(3), 1125-1165', + condition='If performing participant-level analysis ' + f'and using {option_prefix}parcellation ' + '[ yeo7fs, yeo7mni, yeo17fs or yeo17mni ]', + is_external=False) + cmdline.add_citation( + 'Zalesky, A.; Fornito, A.; Harding, I. H.; Cocchi, L.; Yucel, M.; ' + 'Pantelis, C. & Bullmore, E. T. ' + 'Whole-brain anatomical networks: Does the choice of nodes matter? ' + 'NeuroImage, 2010, 50, 970-983', + condition='If performing participant-level analysis ' + 'and using {option_prefix}parcellation perry512', + is_external=True) + cmdline.add_citation( + 'Zhang, Y.; Brady, M. & Smith, S. ' + 'Segmentation of brain MR images through a hidden Markov random ' + 'field model and the expectation-maximization algorithm. ' + 'IEEE Transactions on Medical Imaging, 2001, 20, 45-57', + condition='If performing participant-level analysis', + is_external=True) diff --git a/mrtrix3_connectome/execute.py b/mrtrix3_connectome/execute.py new file mode 100644 index 0000000..0f46258 --- /dev/null +++ b/mrtrix3_connectome/execute.py @@ -0,0 +1,119 @@ +import os +import pathlib +import shutil +from mrtrix3 import CONFIG +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import run +from mrtrix3 import utils +from . import IS_CONTAINER +from . import OPTION_PREFIX +from .preproc.shared import Shared as PreprocShared +from .preproc.run import run_preproc +from .participant.shared import Shared as ParticipantShared +from .participant.run import run_participant +from .group.run import run_group +from .sessions import get_sessions + +def execute(): #pylint: disable=unused-variable + + # TODO Should be able to use pathlib.Path.resolve() + app.ARGS.bids_dir = pathlib.Path(os.path.abspath(app.ARGS.bids_dir)) + app.ARGS.output_dir = pathlib.Path(os.path.abspath(app.ARGS.output_dir)) + app.ARGS.t1w_preproc = pathlib.Path(os.path.abspath(app.ARGS.t1w_preproc)) \ + if app.ARGS.t1w_preproc \ + else None + + # If running within a container, and the --debug option has been + # provided, modify the interlly-stored MRtrix3 configuration + # contents, so that any temporary directories will be constructed + # within the mounted output directory, and therefore temporary + # directory contents will not be lost upon container instance + # destruction if the script fails at any point. + if IS_CONTAINER and app.ARGS.debug: + app.DO_CLEANUP = False + if 'ScriptScratchDir' not in CONFIG and not app.ARGS.scratch: + CONFIG['ScriptScratchDir'] = str(app.ARGS.output_dir) + + if utils.is_windows(): + raise MRtrixError( + 'Script cannot be run on Windows due to FSL dependency') + + if app.ARGS.skipbidsvalidator: + app.console('Skipping BIDS validation based on user request') + elif shutil.which('bids-validator'): + run.command(['bids-validator', app.ARGS.bids_dir]) + else: + app.warn('BIDS validator script not installed; ' + 'proceeding without validation of input data') + + if app.ARGS.output_verbosity < 1 or app.ARGS.output_verbosity > 4: + raise MRtrixError(f'Valid values for {OPTION_PREFIX}output_verbosity' + ' option are from 1 to 4') + + # At output verbosity level 4 we retain all data and move the + # scratch directory to the output + if app.ARGS.output_verbosity == 4: + app.DO_CLEANUP = False + + if app.ARGS.output_dir.name.lower() == f'mrtrix3_connectome-{app.ARGS.analysis_level}': + output_app_path = app.ARGS.output_dir.parent + else: + output_app_path = app.ARGS.output_dir + if output_app_path.is_file(): + raise MRtrixError('Output path cannot be an existing file') + + sessions_to_analyze = None + if app.ARGS.analysis_level in ['preproc', 'participant']: + sessions_to_analyze = get_sessions( + app.ARGS.bids_dir, + participant_label=app.ARGS.participant_label, + session_label=app.ARGS.session_label) + + # TODO All three stages may involve an invocation of dwi2mask; + # need to integrate into Shared the ability to store which algorithm is used, + # and ideally also a command-line option to control what is used + + if app.ARGS.analysis_level == 'preproc': + + preproc_shared = PreprocShared(app.ARGS.gdc) + + for session_to_process in sessions_to_analyze: + app.console(f'Commencing execution for session: {"_".join(session_to_process)}') + run_preproc(app.ARGS.bids_dir, + session_to_process, + preproc_shared, + app.ARGS.t1w_preproc, + app.ARGS.concat_denoise, + app.ARGS.output_verbosity, + output_app_path) + + if app.ARGS.analysis_level == 'participant': + + participant_shared = \ + ParticipantShared(getattr(app.ARGS, 'atlas_path', None), + app.ARGS.parcellation, + app.ARGS.streamlines, + app.ARGS.template_reg) + + for session_to_process in sessions_to_analyze: + app.console(f'Commencing execution for session: {"_".join(session_to_process)}') + run_participant(app.ARGS.bids_dir, + session_to_process, + participant_shared, + app.ARGS.t1w_preproc, + app.ARGS.output_verbosity, + output_app_path) + + elif app.ARGS.analysis_level == 'group': + + if app.ARGS.participant_label: + raise MRtrixError(f'Cannot use {OPTION_PREFIX}participant_label option ' + 'when performing group-level analysis') + if app.ARGS.session_label: + raise MRtrixError(f'Cannot use {OPTION_PREFIX}session_label option ' + + 'when performing group-level analysis') + + run_group(app.ARGS.bids_dir, + app.ARGS.output_verbosity, + output_app_path) diff --git a/mrtrix3_connectome/group/__init__.py b/mrtrix3_connectome/group/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mrtrix3_connectome/group/run.py b/mrtrix3_connectome/group/run.py new file mode 100644 index 0000000..166a6b3 --- /dev/null +++ b/mrtrix3_connectome/group/run.py @@ -0,0 +1,522 @@ +import json +import math +import os +import pathlib +import re +import shutil +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import image +from mrtrix3 import matrix +from mrtrix3 import run +from .. import IS_CONTAINER +from ..sessions import get_sessions + +GROUP_BRAINMASKS_DIR = pathlib.Path('brainmasks') +GROUP_BZEROS_DIR = pathlib.Path('bzeros') +GROUP_CONNECTOMES_DIR = pathlib.Path('connectomes') +GROUP_FA_DIR = pathlib.Path('fa') +GROUP_RESPONSES_DIR = pathlib.Path('responses') +GROUP_WMVOXELS_DIR = pathlib.Path('wmvoxels') +GROUP_WARPS_DIR = pathlib.Path('warps') + +NORMALISATION_JSON_DATA = { + 'rf': { + 'Description': 'Multiplication term based on the difference in ' + 'magnitude between the white matter response ' + 'function used during independent participant-' + 'level analysis, and the group average white ' + 'matter response function generated during group-' + 'level analysis' + }, + 'mu': { + 'Description': 'Value of the "proportionality coefficient" ' + 'within the SIFT model', + 'Units': 'FD/mm' + }, + 'vol': { + 'Description': 'Volume of DWI voxels', + 'Units': 'mm^3' + }, + 'wmb0': { + 'Description': 'Multiplication term based on the median ' + 'intensity of the b=0 image within white matter, ' + 'compared to the mean of this value across ' + 'subjects' + }, + 'norm': { + 'Description': 'Normalisation factor applied to session ' + 'connectome data, calculated as the product of ' + 'the "rf", "mu", "vol" and "wmb0" terms' + } +} + +def run_group(bids_dir, output_verbosity, output_app_dir): + + preproc_dir = output_app_dir / 'MRtrix3_connectome-preproc' + participant_dir = output_app_dir / 'MRtrix3_connectome-participant' + group_dir = output_app_dir / 'MRtrix3_connectome-group' + + # Participant-level analysis no longer generates FA and mean b=0 images + # These really should not be that expensive to compute in series, + # and will keep the output directory cleaner + + # Check presence of all required input files before proceeding + # Pre-calculate paths of all files since many will be used in + # more than one location + class SessionPaths: + def __init__(self, session): + session_label = '_'.join(session) + preproc_root = pathlib.Path(preproc_dir, *session) + participant_root = pathlib.Path(participant_dir, *session) + group_root = pathlib.Path(group_dir, *session) + # Get input DWI path here rather than in function + in_dwi_image_list = (preproc_root / 'dwi').glob('*_dwi.nii*') + if not in_dwi_image_list: + raise MRtrixError(f'No DWI data found for session "{session_label}" output') + if len(in_dwi_image_list) > 1: + raise MRtrixError('More than one DWI mage found' + f' in session "{session_label}" output') + self.in_dwi = in_dwi_image_list[0] + if not '_desc-preproc_' in self.in_dwi: + raise MRtrixError(f'DWI image in output directory for session "{session_label}"' + ' not flagged as pre-processed') + in_dwi_prefix = pathlib.Path(self.in_dwi) + while in_dwi_prefix.suffix: + in_dwi_prefix = in_dwi_prefix.with_suffix('') + self.in_bvec = in_dwi_prefix.with_suffix('.bvec') + self.in_bval = in_dwi_prefix.with_suffix('.bval') + self.in_rf = participant_root / 'dwi' / f'{session_label}_tissue-WM_response.txt' + connectome_files = list((participant_root / 'connectome') + .glob(f'{session_label}_desc-*_connectome.csv')) + if not connectome_files: + raise MRtrixError('No participant-level connectome file ' + f'found for session "{session_label}"') + if len(connectome_files) > 1: + raise MRtrixError('Connectomes from multiple parcellations' + f' detected for session "{session_label}";' + ' this is not yet supported') + self.in_connectome = connectome_files[0] + self.in_mu = participant_root / 'tractogram' / f'{session_label}_mu.txt' + + for entry in vars(self).values(): + if not entry.exists(): + raise MRtrixError('Unable to find critical data ' + f'for session "{session_label}"' + f'(expected location: {entry})') + + self.grad_import_option = ['-fslgrad', self.in_bvec, self.in_bval] + + self.bvalues = [float(value) for value in \ + run.command(['mrinfo', + self.in_dwi, + '-shell_bvalues'] + + self.grad_import_option).stdout.split()] + + self.parcellation = \ + re.findall('(?<=_desc-)[a-zA-Z0-9]*', self.in_connectome.name)[0] + + # Permissible for this to not exist at either location + self.in_mask = preproc_root / 'dwi' / f'{session_label}_desc-brain_mask.nii.gz' + if not self.in_mask.is_file(): + self.in_mask = participant_root / 'dwi' / f'{session_label}_desc-brain_mask.nii.gz' + if not self.in_mask.exists(): + self.in_mask = None + + # Not guaranteed to exist + # Also needs to not just be a directory present, but also + # have the "eddy_quad" contents present (if EddyQC is not + # installed, that directory will still be constructed, it + # just will only contain contents from "eddy" itself) + self.in_eddyqc_dir = preproc_root / 'dwi' / 'eddyqc' + in_eddyqc_file = self.in_eddyqc_dir / 'qc.json' + if not in_eddyqc_file.is_file(): + self.in_eddyqc_dir = None + + self.mu = matrix.load_vector(self.in_mu)[0] + self.rf = matrix.load_matrix(self.in_rf) + + self.temp_mask = GROUP_BRAINMASKS_DIR / f'{session_label}.mif' + self.temp_fa = GROUP_FA_DIR / f'{session_label}.mif' + self.temp_bzero = GROUP_BZEROS_DIR / f'{session_label}.mif' + self.temp_warp = GROUP_WARPS_DIR / f'{session_label}.mif' + self.temp_voxels = GROUP_WMVOXELS_DIR / f'{session_label}.mif' + self.temp_rf = GROUP_RESPONSES_DIR / f'{session_label}.txt' + self.median_bzero = 0.0 + self.dwiintensitynorm_factor = 1.0 + self.rf_multiplier = 1.0 + self.volume_multiplier = 1.0 + self.global_multiplier = 1.0 + self.temp_connectome = GROUP_CONNECTOMES_DIR / f'{session_label}.csv' + self.out_dir = group_root + self.out_connectome_data = \ + group_root / 'connectome' / self.in_connectome.name + self.out_connectome_json = \ + self.out_connectome_data.with_suffix('.json') + + self.session_label = session_label + + session_list = get_sessions(participant_dir) + if not session_list: + raise MRtrixError( + 'No processed session data found' + f' in output directory "{participant_dir}"' + ' for group analysis') + if len(session_list) == 1: + app.warn('Only one session present in participant directory; ' + 'some group-level analysis steps will be skipped') + if os.path.exists(group_dir): + app.warn('Output directory for group-level analysis already exists;' + ' all contents will be erased when this execution completes') + + + bids_session_list = get_sessions(bids_dir) + not_processed = [session for session in bids_session_list \ + if session not in session_list] + if not_processed: + app.warn(f'{len(not_processed)} session{"s" if len(not_processed) > 1 else ""}' + ' present in BIDS directory' + f' {"have" if len(not_processed) > 1 else "has"}' + ' not yet undergone participant-level processing:' + f' {", ".join("_".join(session) for session in not_processed)}') + + sessions = [] + for session in session_list: + sessions.append(SessionPaths(session)) + + + + # Connectome-based calculations can only be performed if the + # parcellation is consistent across all sessions + parcellation = sessions[0].parcellation + consistent_parcellation = \ + all(s.parcellation == parcellation for s in sessions) + out_connectome_path = group_dir / f'desc-{parcellation}_connectome.csv' \ + if consistent_parcellation \ + else None + + app.activate_scratch_dir() + + # Before proceeding, compile session b-values and make sure that: + # - the number of shells is equivalent across sessions + # - the b-values don't vary too much within those shells across sessions + if not all(len(session.bvalues) == len(sessions[0].bvalues) + for session in sessions): + raise MRtrixError('Not all sessions DWI data contain the same ' + 'number of b-value shells') + all_bvalues = [[session.bvalues[index] for session in sessions] + for index in range(0, len(sessions[0].bvalues))] + for shell in all_bvalues: + shell_mean = sum(shell) / len(shell) + if max([max(shell)-shell_mean, shell_mean-min(shell)]) > 50.0: + raise MRtrixError('Excessive deviation of b-values:' + f' mean across subjects b={shell_mean};' + f' range {min(shell)}-{max(shell)}') + + + # First pass through subject data in group analysis: + # Generate mask and FA image directories to be used in + # population template generation. + # If output_verbosity >= 2 then a mask is already provided; + # if not, then one can be quickly calculated from the + # mean b=0 image, which must be provided + progress = app.ProgressBar('Importing and preparing session data', + len(sessions)) + run.function(os.makedirs, GROUP_BRAINMASKS_DIR) + run.function(os.makedirs, GROUP_BZEROS_DIR) + run.function(os.makedirs, GROUP_FA_DIR) + run.function(os.makedirs, GROUP_RESPONSES_DIR) + for s in sessions: + # We need three images for each session: + # - Brain mask: Convert if present, otherwise generate from DWI + # - Mean b=0 image (for scaling): Generate from DWI + # - FA image (for registration): Generate from DWI + if s.in_mask is None: + run.command(['dwi2mask', + 'legacy', + s.in_dwi, + s.temp_mask] + + s.grad_import_option) + else: + run.command(['mrconvert', + s.in_mask, + s.temp_mask, + '-datatype', 'bit']) + run.command(['dwiextract', s.in_dwi, '-bzero', '-'] + + s.grad_import_option + + ['|', + 'mrmath', '-', 'mean', s.temp_bzero, + '-axis', '3']) + run.command(['dwi2tensor', s.in_dwi, '-', + '-mask', s.temp_mask] + + s.grad_import_option + + ['|', + 'tensor2metric', '-', + '-fa', s.temp_fa, + '-mask', s.temp_mask]) + run.function(shutil.copy, s.in_rf, s.temp_rf) + progress.increment() + progress.done() + + # First group-level calculation: + # Generate the population FA template + if len(sessions) == 1: + app.console('Duplicating single-subject FA image as ' + 'population template image') + run.function(shutil.copyfile, + sessions[0].temp_fa, + 'template.mif') + else: + app.console('Generating population template for ' + 'intensity normalisation WM mask derivation') + run.command(['population_template', + GROUP_FA_DIR, + 'template.mif', + '-mask_dir', GROUP_BRAINMASKS_DIR, + '-warp_dir', GROUP_WARPS_DIR, + '-type', 'rigid_affine_nonlinear', + '-rigid_scale', '0.25,0.5,0.8,1.0', + '-affine_scale', '0.7,0.8,1.0,1.0', + '-nl_scale', '0.5,0.75,1.0,1.0,1.0', + '-nl_niter', '5,5,5,5,5', + '-linear_no_pause']) + app.cleanup(GROUP_FA_DIR) + app.cleanup(GROUP_BRAINMASKS_DIR) + + # Generate the group average response function + if len(sessions) == 1: + app.console('Duplicating single-subject WM response function' + ' as group-average response function') + run.function(shutil.copyfile, + sessions[0].temp_rf, + 'response.txt') + else: + app.console('Calculating group-average WM response function') + run.command(['responsemean', + [s.temp_rf for s in sessions], + 'response.txt']) + app.cleanup(GROUP_RESPONSES_DIR) + mean_rf = matrix.load_matrix('response.txt') + mean_rf_lzero = [line[0] for line in mean_rf] + + # Second pass through subject data in group analysis: + # - Warp template FA image back to subject space & + # threshold to define a WM mask in subject space + # - Calculate the median subject b=0 value within this mask + # - Store this in a file, and contribute to calculation of the + # mean of these values across subjects + # - Contribute to the group average response function + if len(sessions) == 1: + app.console('Calculating N=1 intensity normalisation factor') + run.command('mrthreshold template.mif voxels.mif -abs 0.4') + sessions[0].median_bzero = image.statistics(sessions[0].temp_bzero, + mask='voxels.mif').median + app.cleanup(sessions[0].temp_bzero) + sum_median_bzero = sessions[0].median_bzero + app.cleanup('voxels.mif') + else: + progress = app.ProgressBar('Generating intensity normalisation factors', + len(sessions)) + run.function(os.makedirs, GROUP_WMVOXELS_DIR) + sum_median_bzero = 0.0 + for s in sessions: + run.command(['mrtransform', 'template.mif', '-', + '-warp_full', s.temp_warp, + '-from', '2', + '-template', s.temp_bzero, + '|', + 'mrthreshold', '-', s.temp_voxels, + '-abs', '0.4']) + s.median_bzero = image.statistics(s.temp_bzero, + mask=s.temp_voxels).median + app.cleanup(s.temp_bzero) + app.cleanup(s.temp_voxels) + app.cleanup(s.temp_warp) + sum_median_bzero += s.median_bzero + progress.increment() + progress.done() + + app.cleanup(GROUP_BZEROS_DIR) + app.cleanup(GROUP_WMVOXELS_DIR) + app.cleanup(GROUP_WARPS_DIR) + app.cleanup('template.mif') + + + # Second group-level calculation: + # - Calculate the mean of median b=0 values + mean_median_bzero = sum_median_bzero / len(sessions) + + # Third pass through session data in group analysis: + # - Scaling factors for connectome strengths: + # - Multiply by SIFT proportionality coefficient mu + # - Multiply by (mean median b=0) / (subject median b=0) + # - Multiply by (subject RF size) / (mean RF size) + # (needs to account for multi-shell data) + # - Multiply by voxel volume + progress = app.ProgressBar('Computing normalisation scaling factors' + ' for subject connectomes', + len(sessions)) + run.function(os.makedirs, GROUP_CONNECTOMES_DIR) + # Determine, from the minimum connectivity value that can be represented + # in a streamlines-based representation, the maximum across sessions + min_connectivity = 0.0 + for s in sessions: + rf_lzero = [line[0] for line in s.rf] + s.rf_multiplier = 1.0 + for (mean, subj) in zip(mean_rf_lzero, rf_lzero): + s.rf_multiplier = s.rf_multiplier * subj / mean + # Don't want to be scaling connectome independently for + # differences in RF l=0 terms across all shells; + # use the geometric mean of the per-shell scale factors + s.rf_multiplier = math.pow(s.rf_multiplier, 1.0 / len(mean_rf_lzero)) + + s.bzero_multiplier = mean_median_bzero / s.median_bzero + + # Calculate voxel volume + for spacing in image.Header(s.in_dwi).spacing()[0:3]: + s.volume_multiplier *= spacing + + s.global_multiplier = s.mu \ + * s.bzero_multiplier \ + * s.rf_multiplier \ + * s.volume_multiplier + + # Minimum connectivity value that can be reasonably represented is + # 1 streamline prior to scaling + min_connectivity = max(min_connectivity, s.global_multiplier) + + progress.increment() + progress.done() + + # Third group-level calculation: + # Compute normalised connectomes, and generate the group mean connectome + # Can only do this if the parcellation is identical across subjects; + # this needs to be explicitly checked + # Use geometric mean for averaging across subjects, since variance + # across sessions is closer to multiplicative than additive + if consistent_parcellation: + progress = app.ProgressBar('Normalising subject connectomes, ' + 'applying group-wise minimum connectivity, ' + 'and calculating group mean connectome', + len(sessions)+1) + mean_connectome = [] + for s in sessions: + connectome_prenorm = matrix.load_matrix(s.in_connectome) + connectome_postnorm = [[max(v*s.global_multiplier, + min_connectivity) + for v in line] + for line in connectome_prenorm] + matrix.save_matrix(s.temp_connectome, connectome_postnorm) + + if mean_connectome: + mean_connectome = [[c1+math.log(c2) + for c1, c2 in zip(r1, r2)] + for r1, r2 in zip(mean_connectome, + connectome_postnorm)] + else: + mean_connectome = [[math.log(v) + for v in row] + for row in connectome_postnorm] + progress.increment() + + mean_connectome = [[math.exp(v/len(sessions)) + for v in row] + for row in mean_connectome] + progress.done() + else: + app.warn('Different parcellations across sessions, ' + 'cannot calculate a group mean connectome; ' + 'normalising and applying minimum connectivity ' + 'independently for each session') + connectome_prenorm = matrix.load_matrix(s.in_connectome) + connectome_postnorm = [[max(v, 1.0)*s.global_multiplier + for v in line] + for line in connectome_prenorm] + matrix.save_matrix(s.temp_connectome, connectome_postnorm) + + + # Run EddyQC group-level analysis if available + # Do this LAST, as it writes back to the preproc EddyQC directories + # if successful + do_squad = bool(shutil.which('eddy_squad')) + if do_squad: + quad_dirs = [s.in_eddyqc_dir for s in sessions if s.in_eddyqc_dir] + missing_sessions = [s.session_label for s in sessions \ + if not s.in_eddyqc_dir] + if quad_dirs: + if missing_sessions: + app.warn('Some sessions do not contain EddyQC subject data, ' + 'and will be omitted from the group-level analysis: ' + f'{missing_sessions}') + run.command(['eddy_squad', quad_dirs]) + else: + app.warn('No pre-processed sessions contain EddyQC data; ' + '"eddy_squad" skipped') + do_squad = False + else: + app.warn('EddyQC command "eddy_squad" not available; skipped') + + # Write results of interest back to the output directory; + # both per-subject and group information + progress = app.ProgressBar('Writing results to output directory', + len(sessions)+3) + if group_dir.exists(): + run.function(shutil.rmtree, group_dir) + run.function(os.makedirs, group_dir) + for s in sessions: + run.function(os.makedirs, + s.out_dir / 'connectome') + run.function(shutil.copyfile, + s.temp_connectome, + s.out_connectome_data) + json_data = {'Contributions': { + 'RFMagnitude': s.rf_multiplier, + 'SIFTMu': s.mu, + 'VoxelVolume': s.volume_multiplier, + 'WMIntensity': s.dwiintensitynorm_factor}, + 'Multiplier': s.global_multiplier} + with open(s.out_connectome_json, 'w', encoding='utf-8') as json_file: + json.dump(json_data, json_file) + progress.increment() + app.cleanup(GROUP_CONNECTOMES_DIR) + + matrix.save_matrix(group_dir / 'tissue-WM_response.txt', + mean_rf, + force=IS_CONTAINER) + progress.increment() + if consistent_parcellation: + matrix.save_matrix(out_connectome_path, + mean_connectome, + force=IS_CONTAINER) + with open(group_dir / 'normalisation.tsv', 'w', encoding='utf-8') as tsv_file: + tsv_file.write('session_id\trf\tmu\tvol\twmb0\tnorm\n') + for s in sessions: + tsv_file.write(f'{s.session_label}\t' + f'{s.rf_multiplier}\t' + f'{s.mu}\t' + f'{s.volume_multiplier}\t' + f'{s.dwiintensitynorm_factor}\t' + f'{s.global_multiplier}\n') + with open(group_dir / 'normalisation.json', + 'w', + encoding='utf-8') as json_file: + json.dump(NORMALISATION_JSON_DATA, json_file) + progress.increment() + if do_squad: + run.function(os.makedirs, + group_dir / 'eddyqc') + for filename in ['group_db.json', 'group_qc.pdf']: + run.function(shutil.copyfile, + filename, + group_dir / 'eddyqc' / filename) + progress.done() + + # For group-level analysis, function is only executed once, so + # no need to bypass the default scratch cleanup + # Only exception is if we want to capture the whole scratch directory + # in the output path + if output_verbosity == 4: + app.console('Copying scratch directory to output location') + run.function(shutil.copytree, + app.SCRATCH_DIR, + group_dir / 'scratch') diff --git a/mrtrix3_connectome/mrtrix3_connectome.py b/mrtrix3_connectome/mrtrix3_connectome.py new file mode 100644 index 0000000..5a77eca --- /dev/null +++ b/mrtrix3_connectome/mrtrix3_connectome.py @@ -0,0 +1,8 @@ +from .usage import usage as my_usage +from .execute import execute as my_execute + +def usage(cmdline): #pylint: disable=unused-variable + return my_usage(cmdline) + +def execute(): #pylint: disable=unused-variable + return my_execute() diff --git a/mrtrix3_connectome/participant/__init__.py b/mrtrix3_connectome/participant/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mrtrix3_connectome/participant/run.py b/mrtrix3_connectome/participant/run.py new file mode 100644 index 0000000..acd9712 --- /dev/null +++ b/mrtrix3_connectome/participant/run.py @@ -0,0 +1,908 @@ +import glob +import json +import os +import pathlib +import shutil +from collections import namedtuple +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import fsl +from mrtrix3 import image +from mrtrix3 import run +from ..anat.get import get_t1w_preproc_images + +OUT_5TT_JSON_DATA = {'LabelMap': ['CGM', 'SGM', 'WM', 'CSF', 'Path']} + +def run_participant(bids_dir, session, shared, + t1w_preproc_path, output_verbosity, output_app_dir): + + session_label = '_'.join(session) + output_analysis_level_path = output_app_dir / 'MRtrix3_connectome-participant' + output_subdir = pathlib.Path(output_analysis_level_path, *session) + + if output_subdir.exists(): + app.warn('Participant-level output directory' + f' for session "{session_label}" already exists;' + ' all contents will be erased when this execution completes') + + # Check paths of individual output files before script completion + # by building a database of what files are to be written to output + parc_string = f'_desc-{shared.parcellation}' + OutputItem = \ + namedtuple( + 'OutputItem', + 'is_image min_verbosity needs_multishell options path') + output_items = { + 'response_wm.txt': \ + OutputItem(False, 1, False, None, + pathlib.PurePath('dwi', f'{session_label}_tissue-WM_response.txt')), + 'T1w_mask.mif': \ + OutputItem(True, 1, False, + '-strides +1,+2,+3 -datatype uint8', + pathlib.PurePath('anat', f'{session_label}_desc-brain_mask.nii.gz')), + '5TT.mif': \ + OutputItem(True, 2, False, + '-strides +1,+2,+3,+4', + pathlib.PurePath('anat', f'{session_label}_desc-5tt_probseg.nii.gz')), + '5TT.json': \ + OutputItem(False, 2, False, None, + pathlib.PurePath('anat', f'{session_label}_desc-5tt_probseg.json')), + 'vis.mif': \ + OutputItem(True, 2, False, '-strides +1,+2,+3', + pathlib.PurePath('anat', f'{session_label}_desc-vis_probseg.nii.gz')), + 'FOD_WM.mif': \ + OutputItem(True, 2, False, '-strides +1,+2,+3,+4', + pathlib.PurePath('dwi', f'{session_label}_tissue-WM_ODF.nii.gz')), + 'response_gm.txt': \ + OutputItem(False, 2, True, None, + pathlib.PurePath('dwi', f'{session_label}_tissue-GM_response.txt')), + 'response_csf.txt': \ + OutputItem(False, 2, True, None, + pathlib.PurePath('dwi', f'{session_label}_tissue-CSF_response.txt')), + 'FOD_GM.mif': \ + OutputItem(True, 2, True, '-strides +1,+2,+3,+4', + pathlib.PurePath('dwi', f'{session_label}_tissue-GM_ODF.nii.gz')), + 'FOD_CSF.mif': \ + OutputItem(True, 2, True, '-strides +1,+2,+3,+4', + pathlib.PurePath('dwi', f'{session_label}_tissue-CSF_ODF.nii.gz')), + 'tissues.mif': \ + OutputItem(True, 2, True, '-strides +1,+2,+3,+4', + pathlib.PurePath('dwi', f'{session_label}_tissue-all_probseg.nii.gz')) + } + + if shared.parcellation != 'none': + output_items['connectome.csv'] = \ + OutputItem(False, 1, False, None, + pathlib.PurePath('connectome', + f'{session_label}{parc_string}_connectome.csv')) + output_items['mu.txt'] = \ + OutputItem(False, 1, False, None, + pathlib.PurePath('tractogram', + f'{session_label}_mu.txt')) + output_items['parc.mif'] = \ + OutputItem(True, 2, False, '-strides +1,+2,+3', + pathlib.PurePath('anat', + f'{session_label}{parc_string}_dseg.nii.gz')) + output_items['meanlength.csv'] = \ + OutputItem(False, 2, False, None, + pathlib.PurePath('connectome', + f'{session_label}{parc_string}_meanlength.csv')) + output_items['assignments.csv'] = \ + OutputItem(False, 3, False, None, + pathlib.PurePath('connectome', + f'{session_label}{parc_string}_assignments.csv')) + output_items['nodes_smooth.obj'] = \ + OutputItem(False, 3, False, None, + pathlib.PurePath('anat', + f'{session_label}{parc_string}_dseg.obj')) + output_items['exemplars.tck'] = \ + OutputItem(False, 3, False, None, + pathlib.PurePath('connectome', + f'{session_label}{parc_string}_exemplars.tck')) + output_items['parcRGB.mif'] = \ + OutputItem(True, 3, False, '-strides +1,+2,+3,+4', + pathlib.PurePath('anat', + f'{session_label}{parc_string}_desc-rgb_dseg.nii.gz')) + + if shared.streamlines or shared.parcellation != 'none': + output_items['tractogram.tck'] = \ + OutputItem(False, 3, False, None, + pathlib.PurePath('tractogram', + f'{session_label}_tractogram.tck')) + output_items['weights.csv'] = \ + OutputItem(False, 3, False, None, + pathlib.PurePath('tractogram', + f'{session_label}_weights.csv')) + output_items['tdi_dwi.mif'] = \ + OutputItem(True, 3, False, '-strides +1,+2,+3', + pathlib.PurePath('tractogram', + f'{session_label}_space-dwi_tdi.nii.gz')) + output_items['tdi_T1w.mif'] = \ + OutputItem(True, 3, False, '-strides +1,+2,+3', + pathlib.PurePath('tractogram', + f'{session_label}_space-T1w_tdi.nii.gz')) + output_items['tdi_hires.mif'] = \ + OutputItem(True, 3, False, '-strides +1,+2,+3', + pathlib.PurePath('tractogram', + f'{session_label}_space-superres_tdi.nii.gz')) + + + def do_import(import_path): + in_dwi_image_list = list(pathlib.Path(import_path, *session, 'dwi').glob('*_dwi.nii*')) + if not in_dwi_image_list: + raise MRtrixError(f'No DWIs found for session "{session_label}"') + if len(in_dwi_image_list) > 1: + raise MRtrixError('To run participant-level analysis, ' + 'input directory should contain only one DWI image file; ' + f'session "{session_label}" contains {len(in_dwi_image_list)}') + in_dwi_path = in_dwi_image_list[0] + if not '_desc-preproc_' in in_dwi_path: + raise MRtrixError(f'Input DWI image "{in_dwi_path}" not flagged as pre-processed data') + in_dwi_path_prefix = pathlib.Path(in_dwi_path) + while in_dwi_path_prefix.suffix: + in_dwi_path_prefix = in_dwi_path_prefix.with_suffix('') + # Don't look for bvec / bval in a lower directory in this case + in_bvec_path = in_dwi_path_prefix.with_suffix('.bvec') + in_bval_path = in_dwi_path_prefix.with_suffix('.bval') + if not in_bvec_path.is_file() \ + or not in_bval_path.is_file(): + raise MRtrixError('Did not find bvec / bval pair ' + f'corresponding to image {in_dwi_path} ' + f'(expected locations: "{in_bvec_path}" "{in_bval_path}")') + # JSON isn't compulsory in this case + in_dwi_json_path = in_dwi_path_prefix.with_suffix('.json') + in_dwi_json_import_option = ['-json_import', in_dwi_json_path] \ + if in_dwi_json_path.is_file() \ + else [] + # Is there a mask present? + in_dwi_mask_image_list = list(pathlib.Path(import_path, *session, 'dwi') + .glob('*_desc-brain*_mask.nii*')) + if len(in_dwi_mask_image_list) > 1: + raise MRtrixError(f'More than one DWI mask found for session "{session_label}"') + in_dwi_mask_path = in_dwi_mask_image_list[0] \ + if in_dwi_mask_image_list \ + else None + if not in_dwi_mask_path: + output_items['dwi_mask.mif'] = \ + OutputItem(True, 1, False, + '-strides +1,+2,+3 -datatype uint8', + pathlib.PurePath('dwi', f'{session_label}_desc-brain_mask.nii.gz')) + + app.console('Importing pre-processed data into scratch directory') + + run.command(['mrconvert', + in_dwi_path, + 'dwi.mif', + '-fslgrad', in_bvec_path, in_bval_path, + '-strides', '0,0,0,1'] + + in_dwi_json_import_option) + + if in_dwi_mask_path: + run.command(['mrconvert', + in_dwi_mask_path, + 'dwi_mask.mif', + '-datatype', 'bit']) + + get_t1w_preproc_images(import_path, + session, + shared.t1w_shared, + t1w_preproc_path) + if pathlib.Path('T1w_premasked.mif').is_file(): + t1w_is_premasked = True + elif pathlib.Path('T1w.mif').is_file(): + t1w_is_premasked = False + else: + raise MRtrixError('No T1-weighted image found ' + f'for session {session_label}; ' + 'cannot perform participant-level analysis') + if shared.do_freesurfer and t1w_is_premasked: + raise MRtrixError('Cannot execute FreeSurfer for obtaining parcellation:' + ' input T1-weighted image is already skull-stripped') + # End of do_import() function + + + # We first make an attempt at loading all requisite data from + # "bids_dir" (since the user may have used that path to request + # that the pre-processed data be utilised from some path other than + # "mrtrix3_connectome-preproc/"); if that doesn't work, we wipe the + # scratch directory and try again based on the latter + cwd = pathlib.Path.cwd() + app.activate_scratch_dir() + try: + do_import(bids_dir) + except MRtrixError as e_frombids: + for item in app.SCRATCH_DIR.iterdir(): + os.remove(item) + try: + preproc_dir = output_app_dir / 'MRtrix3_connectome-preproc' + do_import(preproc_dir) + except MRtrixError as e_fromoutput: + err = 'Unable to import requisite pre-processed data' \ + ' from either specified input directory' \ + ' or MRtrix3_connectome output directory\n' + err += f'Error when attempting load from "{bids_dir}":\n' + err += str(e_frombids) + '\n' + err += f'Error when attempting load from "{preproc_dir}":\n' + err += str(e_fromoutput) + raise MRtrixError(err) # pylint: disable=raise-missing-from + + + # T1-weighted data are always written to output directory regardless; + # output paths can only be constructed now + t1w_image = pathlib.Path('T1w_premasked.mif') + t1w_is_premasked = True + if not t1w_image.is_file(): + t1w_image = pathlib.Path('T1w.mif') + assert t1w_image.is_file() + t1w_is_premasked = False + output_items[str(t1w_image)] = \ + OutputItem(True, 1, False, ' -strides +1,+2,+3', + pathlib.PurePath('anat', f'{session_label}_desc-preproc_T1w.nii.gz')) + t1w_json_data = {"SkullStripped": t1w_is_premasked} + t1w_json_path = pathlib.Path(t1w_image) + while t1w_json_path.suffix: + t1w_json_path = t1w_json_path.with_suffix('') + t1w_json_path = t1w_json_path.with_suffix('.json') + with open(t1w_json_path, 'w', encoding='utf-8') as t1w_json_file: + json.dump(t1w_json_data, t1w_json_file) + output_items[str(t1w_json_path)] = \ + OutputItem(False, 1, False, None, + pathlib.PurePath('anat', f'{session_label}_desc-preproc_T1w.json')) + + # Before we can begin: Are there any data we require + # that were not imported from the output directory? + if not pathlib.Path('dwi_mask.mif').is_file(): + app.console('Generating DWI brain mask ' + '(was not already present in pre-processing directory)') + run.command(f'dwi2mask {shared.dwi2mask_algo} dwi.mif dwi_mask.mif') + + # Step 1: Estimate response functions for spherical deconvolution + app.console('Estimating tissue response functions for ' + 'spherical deconvolution') + run.command('dwi2response dhollander dwi.mif ' + 'response_wm.txt response_gm.txt response_csf.txt ' + '-mask dwi_mask.mif') + + # Determine whether we are working with single-shell or multi-shell data + bvalues = [ + int(round(float(value))) + for value in image.mrinfo('dwi.mif', 'shell_bvalues') \ + .strip().split()] + multishell = len(bvalues) > 2 + + # Step 2: Perform spherical deconvolution + # Don't even use a processing mask: + # ACT should be responsible for stopping streamlines before they + # reach the edge of the DWI mask + # Also means that any subsequent manual use of the FOD + # images can't possibly be detrimentally affected by + # bad masking + deconvolution_msg = 'multi-tissue ODF images' \ + if multishell \ + else 'Fibre Orientation Distribution image' + app.console(f'Estimating {deconvolution_msg}') + # TODO Update to use similar code to preproc? + # Would have consequences for group-level analysis... + if multishell: + run.command('dwi2fod msmt_csd dwi.mif ' + 'response_wm.txt FOD_WM.mif ' + 'response_gm.txt FOD_GM.mif ' + 'response_csf.txt FOD_CSF.mif ' + '-lmax 10,0,0') + run.command('mrconvert FOD_WM.mif - -coord 3 0 | ' + 'mrcat FOD_CSF.mif FOD_GM.mif - tissues.mif -axis 3') + else: + # Still use the msmt_csd algorithm with single-shell data: + # Use hard non-negativity constraint + # Also incorporate the CSF response to provide some fluid attenuation + run.command('dwi2fod msmt_csd dwi.mif ' + 'response_wm.txt FOD_WM.mif ' + 'response_csf.txt FOD_CSF.mif ' + '-lmax 10,0') + app.cleanup('FOD_CSF.mif') + + # Step 3: Generate 5TT image for ACT + # Use T1w brain mask generated from elsewhere: + # don't particularly trust the raw "bet" call inside + # 5ttgen fsl + app.console('Generating five-tissue-type (5TT) image for ' + 'Anatomically-Constrained Tractography (ACT)') + run.command(['5ttgen', 'fsl', + t1w_image, + '5TT.mif'] + + (['-premasked'] \ + if t1w_is_premasked \ + else ['-mask', 'T1w_mask.mif'])) + if output_verbosity > 1: + with open('5TT.json', 'w', encoding='utf-8') as out_5tt_json_file: + json.dump(OUT_5TT_JSON_DATA, out_5tt_json_file) + run.command('5tt2vis 5TT.mif vis.mif') + + # Step 4: Generate the grey matter parcellation + # The necessary steps here will vary significantly depending on + # the parcellation scheme selected + if shared.do_freesurfer: + app.console('Getting grey matter parcellation in ' + 'subject space using FreeSurfer') + + # Since we're instructing recon-all to use a different subject + # directory, we need to construct softlinks to a number of + # directories provided by FreeSurfer that recon-all will + # expect to find in the same directory as the overridden + # subject path + subdirs = ['fsaverage', 'lh.EC_average', 'rh.EC_average'] + if shared.parcellation in ['yeo7fs', 'yeo17fs']: + subdirs.append('fsaverage5') + for subdir in subdirs: + run.function(shared.freesurfer_template_link_function, + shared.freesurfer_subjects_dir / subdir, + subdir) + + # Run FreeSurfer pipeline on this subject's T1w image + # If the pre-processed T1-weighted image is not brain-extracted, + # we'll use that here; but if it is, fingers crossed we have the + # raw T1-weighted image that was used to generate it... + # TODO Improve on this + if t1w_is_premasked: + freesurfer_t1w_input = pathlib.Path('T1w_raw.nii') + if not os.path.isfile('T1w_raw.nii'): + raise MRtrixError( + 'Cannot run FreeSurfer: ' + 'pre-processed T1-weighted image is skull-stripped') + else: + freesurfer_t1w_input = 'T1w.nii' + run.command(f'mrconvert T1w.mif {freesurfer_t1w_input} -strides +1,+2,+3') + run.command(['recon-all', + '-sd', app.SCRATCH_DIR, + '-subjid', 'freesurfer', + '-i', freesurfer_t1w_input]) + run.command(['recon-all', + '-sd', app.SCRATCH_DIR, + '-subjid', 'freesurfer', + '-all'] + + shared.reconall_multithread_options) + + # Grab the relevant parcellation image and + # target lookup table for conversion + parc_image_path = pathlib.Path(app.SCRATCH_DIR, 'freesurfer', 'mri') + if shared.parcellation == 'desikan': + parc_image_path = parc_image_path / 'aparc+aseg.mgz' + elif shared.parcellation == 'destrieux': + parc_image_path = parc_image_path / 'aparc.a2009s+aseg.mgz' + else: + # Non-standard parcellations are not applied as part of + # the recon-all command; need to explicitly map them to + # the subject + # This requires SUBJECTS_DIR to be set; + # commands don't have a corresponding -sd option like recon-all + env = run.shared.env + env['SUBJECTS_DIR'] = str(app.SCRATCH_DIR) + if shared.parcellation == 'brainnetome246fs': + for index, hemi in enumerate(['l', 'r']): + run.command([ + 'mris_ca_label', + '-l', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'label', + f'{hemi}h.cortex.label'), + 'freesurfer', + f'{hemi}h', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'surf', + f'{hemi}h.sphere.reg'), + shared.brainnetome_cortex_gcs_paths[index], + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'label', + f'{hemi}h.BN_Atlas.annot')], + env=env) + run.command([ + 'mri_label2vol', + '--annot', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'label', + f'{hemi}h.BN_Atlas.annot'), + '--temp', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'mri', + 'brain.mgz'), + '--o', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'mri', + f'{hemi}h.BN_Atlas.mgz'), + '--subject', 'freesurfer', + '--hemi', f'{hemi}h', + '--identity', + '--proj', 'frac', '0', '1', '.1'], + env=env) + run.command([ + 'mri_ca_label', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'mri', + 'brain.mgz'), + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'mri', + 'transforms', + 'talairach.m3z'), + shared.brainnetome_sgm_gca_path, + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'mri', + 'BN_Atlas_subcortex.mgz')], + env=env) + parc_image_path = parc_image_path / 'aparc.BN_Atlas+aseg.mgz' + # Need to deal with prospect of overlapping mask labels + # - Any overlap between the two hemisphere ribbons + # = set to zero + # - Any overlap between cortex and sub-cortical + # = retain cortex + run.command(['mrcalc', + [pathlib.Path('freesurfer', + 'mri', + f'{hemi}h.BN_Atlas.mgz') + for hemi in ['l', 'r']], + '-mult', + 'cortex_overlap.mif', + '-datatype', 'bit']) + run.command(['mrcalc', + [pathlib.Path('freesurfer', + 'mri', + f'{hemi}h.BN_Atlas.mgz') + for hemi in ['l', 'r']], + '-add', + pathlib.Path('freesurfer', + 'mri', + 'BN_Atlas_subcortex.mgz'), + '-mult', + 'sgm_overlap.mif', + '-datatype', 'bit']) + run.command(['mrcalc', + [pathlib.Path('freesurfer', + 'mri', + f'{hemi}h.BN_Atlas.mgz') + for hemi in ['l', 'r']], + '-add', + '1.0', + 'cortex_overlap.mif', + '-sub', + '-mult', + pathlib.Path('freesurfer', + 'mri', + 'BN_Atlas_subcortex.mgz'), + '1.0', + 'sgm_overlap.mif', + '-sub', + '-mult', + '-add', + parc_image_path]) + app.cleanup('cortex_overlap.mif') + app.cleanup('sgm_overlap.mif') + + elif shared.parcellation == 'hcpmmp1': + parc_image_path = parc_image_path / 'aparc.HCPMMP1+aseg.mgz' + for index, hemi in enumerate(['l', 'r']): + run.command(['mri_surf2surf', + '--srcsubject', 'fsaverage', + '--trgsubject', 'freesurfer', + '--hemi', f'{hemi}h', + '--sval-annot', + shared.hcpmmp1_annot_paths[index], + '--tval', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'label', + f'{hemi}h.HCPMMP1.annot')], + env=env) + run.command(['mri_aparc2aseg', + '--s', 'freesurfer', + '--old-ribbon', + '--annot', 'HCPMMP1', + '--o', parc_image_path], + env=env) + elif shared.parcellation in ['yeo7fs', 'yeo17fs']: + num = '7' if shared.parcellation == 'yeo7fs' else '17' + parc_image_path = parc_image_path / f'aparc.Yeo{num}+aseg.mgz' + for index, hemi in enumerate(['l', 'r']): + run.command(['mri_surf2surf', + '--srcsubject', 'fsaverage5', + '--trgsubject', 'freesurfer', + '--hemi', f'{hemi}h', + '--sval-annot', + shared.yeo_annot_paths[index], + '--tval', + pathlib.Path(app.SCRATCH_DIR, + 'freesurfer', + 'label', + f'{hemi}h.Yeo{num}.annot')], + env=env) + run.command(['mri_aparc2aseg', + '--s', 'freesurfer', + '--old-ribbon', + '--annot', f'Yeo{num}', + '--o', parc_image_path], + env=env) + else: + assert False + + if shared.mrtrix_lut_file: + # If necessary: + # Perform the index conversion + run.command(['labelconvert', + parc_image_path, + shared.parc_lut_file, + shared.mrtrix_lut_file, + 'parc_init.mif']) + # Substitute the sub-cortical grey matter parcellations + # with estimates from FSL FIRST + run.command(['labelsgmfix', + 'parc_init.mif', + freesurfer_t1w_input, + shared.mrtrix_lut_file, + 'parc.mif']) + app.cleanup('parc_init.mif') + else: + # Non-standard sub-cortical parcellation; + # labelsgmfix not applicable + run.command(f'mrconvert {parc_image_path} parc.mif ' + '-datatype uint32') + app.cleanup('freesurfer') + + + elif shared.do_mni: + app.console('Registering to MNI template and transforming grey ' + 'matter parcellation back to subject space') + + # Use non-dilated brain masks for performing + # histogram matching & linear registration + t1w_histmatched_path = pathlib.Path('T1w_histmatch.nii') + histmatched_strides = '-1,+2,+3' \ + if shared.template_registration_software == 'fsl' \ + else '+1,+2,+3' + run.command(f'mrhistmatch linear {t1w_image} {shared.template_image_path}' + ' -mask_input T1w_mask.mif' + f' -mask_target {shared.template_mask_path} - | ' + f'mrconvert - {t1w_histmatched_path}' + f' -strides {histmatched_strides}') + + assert shared.template_registration_software + if shared.template_registration_software == 'ants': + + # Use ANTs SyN for registration to template + # From Klein and Avants, Frontiers in Neuroinformatics 2013: + ants_prefix = 'template_to_t1_' + run.command(['antsRegistration', + '--dimensionality', '3', + '--output', ants_prefix, + '--use-histogram-matching', '1', + '--initial-moving-transform', + f'[{t1w_histmatched_path},{shared.template_image_path},1]', + '--transform', 'Rigid[0.1]', + '--metric', + f'MI[{t1w_histmatched_path},{shared.template_image_path},1,32,Regular,0.25]', + '--convergence', '1000x500x250x100', + '--smoothing-sigmas', '3x2x1x0', + '--shrink-factors', '8x4x2x1', + '--transform', 'Affine[0.1]', + '--metric', + f'MI[{t1w_histmatched_path},{shared.template_image_path},1,32,Regular,0.25]', + '--convergence', '1000x500x250x100', + '--smoothing-sigmas', '3x2x1x0', + '--shrink-factors', '8x4x2x1', + '--transform', 'BSplineSyN[0.1,26,0,3]', + '--metric', + f'CC[{t1w_histmatched_path},{shared.template_image_path},1,4]', + '--convergence', '100x70x50x20', + '--smoothing-sigmas', '3x2x1x0', + '--shrink-factors', '6x4x2x1']) + transformed_atlas_path = 'atlas_transformed.nii' + run.command(['antsApplyTransforms', + '--dimensionality', '3', + '--input', shared.parc_image_path, + '--reference-image', t1w_histmatched_path, + '--output', transformed_atlas_path, + '--n', 'GenericLabel', + '--transform', f'{ants_prefix}1Warp.nii.gz', + '--transform', f'{ants_prefix}0GenericAffine.mat', + '--default-value', '0']) + app.cleanup(glob.glob(f'{ants_prefix}*')) + + elif shared.template_registration_software == 'fsl': + + # Subject T1w, brain masked; for flirt -in + flirt_in_path = pathlib.Path(t1w_histmatched_path) + if not t1w_is_premasked: + while flirt_in_path.suffix: + flirt_in_path = flirt_in_path.with_suffix('') + flirt_in_path = f'{flirt_in_path}_masked.nii' + run.command(f'mrcalc {t1w_histmatched_path} T1w_mask.mif -mult {flirt_in_path}') + # Template T1w, brain masked; for flirt -ref + flirt_ref_path = pathlib.Path('template_masked.nii') + run.command(f'mrcalc {shared.template_image_path} {shared.template_mask_path}' + ' -mult - | ' + f'mrconvert - {flirt_ref_path} -strides -1,+2,+3') + # Now have data required to run flirt + run.command([shared.flirt_cmd, + '-ref', flirt_ref_path, + '-in', flirt_in_path, + '-omat', 'T1w_to_template.mat', + '-dof', '12', + '-cost', 'leastsq']) + if not t1w_is_premasked: + app.cleanup(flirt_in_path) + app.cleanup(flirt_ref_path) + + # If possible, use dilated brain masks for non-linear + # registration to mitigate mask edge effects; + # if T1-weighted image is premasked, can't do this + fnirt_in_path = t1w_histmatched_path + fnirt_ref_path = shared.template_image_path + if t1w_is_premasked: + fnirt_in_mask_path = pathlib.Path('T1w_mask.nii') + run.command(f'mrconvert T1w_mask.mif {fnirt_in_mask_path} -strides -1,+2,+3') + fnirt_ref_mask_path = shared.template_mask_path + else: + fnirt_in_mask_path = pathlib.Path('T1w_mask_dilated.nii') + run.command('maskfilter T1w_mask.mif dilate - -npass 3 | ' + f'mrconvert - {fnirt_in_mask_path} -strides -1,+2,+3') + fnirt_ref_mask_path = pathlib.Path('template_mask_dilated.nii') + run.command(f'maskfilter {shared.template_mask_path} dilate {fnirt_ref_mask_path}' + ' -npass 3') + + run.command([shared.fnirt_cmd, + f'--config={shared.fnirt_config_basename}', + f'--ref={fnirt_ref_path}', + f'--in={fnirt_in_path}', + '--aff=T1w_to_template.mat', + f'--refmask={fnirt_ref_mask_path}', + f'--inmask={fnirt_in_mask_path}', + '--cout=T1w_to_template_warpcoef.nii']) + app.cleanup(fnirt_in_mask_path) + if not t1w_is_premasked: + app.cleanup(fnirt_ref_mask_path) + app.cleanup('T1w_to_template.mat') + fnirt_warp_subject2template_path = \ + fsl.find_image('T1w_to_template_warpcoef') + + # Use result of registration to transform atlas + # parcellation to subject space + run.command([shared.invwarp_cmd, + f'--ref={t1w_histmatched_path}', + f'--warp={fnirt_warp_subject2template_path}', + '--out=template_to_T1w_warpcoef.nii']) + app.cleanup(fnirt_warp_subject2template_path) + fnirt_warp_template2subject_path = \ + fsl.find_image('template_to_T1w_warpcoef') + run.command([shared.applywarp_cmd, + f'--ref={t1w_histmatched_path}', + f'--in={shared.parc_image_path}', + f'--warp={fnirt_warp_template2subject_path}', + '--out=atlas_transformed.nii', + '--interp=nn']) + app.cleanup(fnirt_warp_template2subject_path) + transformed_atlas_path = fsl.find_image('atlas_transformed') + + app.cleanup(t1w_histmatched_path) + + if shared.parc_lut_file and shared.mrtrix_lut_file: + run.command(['labelconvert', + transformed_atlas_path, + shared.parc_lut_file, + shared.mrtrix_lut_file, + 'parc.mif']) + else: + # Not all parcellations need to go through the labelconvert step; + # they may already be numbered incrementally from 1 + run.command(f'mrconvert {transformed_atlas_path} parc.mif') + app.cleanup(transformed_atlas_path) + + + if output_verbosity > 2 and shared.parcellation != 'none': + if shared.mrtrix_lut_file: + label2colour_lut_option = ['-lut', shared.mrtrix_lut_file] + elif shared.parc_lut_file: + label2colour_lut_option = ['-lut', shared.parc_lut_file] + else: + # Use random colouring if no LUT available, but + # still generate the image + label2colour_lut_option = [] + run.command(['label2colour', 'parc.mif', 'parcRGB.mif'] + + label2colour_lut_option) + + # If no parcellation is requested, it is still possible to + # generate a whole-brain tractogram by explicitly providing + # the -streamlines option + num_streamlines = None + if shared.streamlines: + num_streamlines = shared.streamlines + elif shared.parcellation != 'none': + # If not manually specified, determine the appropriate + # number of streamlines based on the number of nodes + # in the parcellation: + # mean edge weight of 1,000 streamlines + num_nodes = int(image.statistics('parc.mif').max) + num_streamlines = 500 * num_nodes * (num_nodes-1) + if num_streamlines: + + # Step 5: Generate the tractogram + app.console('Performing whole-brain fibre-tracking') + tractogram_filepath = pathlib.Path(f'tractogram_{num_streamlines}.tck') + run.command(f'tckgen FOD_WM.mif {tractogram_filepath}' + ' -act 5TT.mif -backtrack -crop_at_gmwmi' + ' -maxlength 250' + ' -power 0.33' + f' -select {num_streamlines}' + ' -seed_dynamic FOD_WM.mif') + + # Step 6: Use SIFT2 to determine streamline weights + app.console('Running the SIFT2 algorithm to assign ' + 'weights to individual streamlines') + fd_scale_gm_option = [] + if not multishell: + fd_scale_gm_option = ['-fd_scale_gm'] + # If SIFT2 fails, reduce number of streamlines and try again + while num_streamlines: + try: + run.command(['tcksift2', + tractogram_filepath, + 'FOD_WM.mif', + 'weights.csv', + '-act', '5TT.mif', + '-out_mu', 'mu.txt'] + + fd_scale_gm_option) + break + except run.MRtrixCmdError: + app.warn('SIFT2 failed, likely due to running out of RAM; ' + 'reducing number of streamlines and trying again') + num_streamlines = int(num_streamlines // 2) + new_tractogram_filepath = \ + pathlib.Path(f'tractogram_{num_streamlines}.tck') + run.command(f'tckedit {tractogram_filepath} {new_tractogram_filepath}' + f' -number {num_streamlines}') + app.cleanup(tractogram_filepath) + tractogram_filepath = new_tractogram_filepath + if not num_streamlines: + raise MRtrixError('Unable to run SIFT2 algorithm for ' + 'any number of streamlines') + run.function(shutil.move, tractogram_filepath, 'tractogram.tck') + tractogram_filepath = pathlib.Path('tractogram.tck') + + + if output_verbosity > 2: + # Generate TDIs: + # - A TDI at DWI native resolution, with SIFT mu scaling, + # and precise mapping + # (for comparison to WM ODF l=0 term, to + # verify that SIFT2 has worked correctly) + app.console('Producing Track Density Images (TDIs)') + # TODO This may have a header + with open('mu.txt', 'r', encoding='utf-8') as f: + mu = float(f.read()) + # In the space of the DWI image + run.command(f'tckmap {tractogram_filepath} -' + ' -tck_weights_in weights.csv' + ' -template FOD_WM.mif' + ' -precise' + ' | ' + f'mrcalc - {mu} -mult tdi_dwi.mif') + # In the space of the T1-weighted image + run.command(f'tckmap {tractogram_filepath} -' + ' -tck_weights_in weights.csv' + f' -template {t1w_image}' + ' -precise' + ' | ' + f'mrcalc - {mu} -mult tdi_T1w.mif') + # - Conventional TDI at super-resolution + # (mostly just because we can) + run.command(f'tckmap {tractogram_filepath} tdi_hires.mif' + ' -tck_weights_in weights.csv' + ' -vox 0.25' + ' -datatype uint16') + + + if shared.parcellation != 'none': + # Step 7: Generate the connectome + # Also get the mean length for each edge; + # this is the most likely alternative contrast to be useful + app.console('Combining whole-brain tractogram with grey matter ' + 'parcellation to produce the connectome') + assignment_option = \ + ['-assignment_radial_search', '5'] \ + if shared.parcellation in ['yeo7mni', 'yeo17mni'] \ + else [] + run.command(['tck2connectome', tractogram_filepath, 'parc.mif', 'connectome.csv', + '-tck_weights_in', 'weights.csv', + '-out_assignments', 'assignments.csv'] + + assignment_option) + run.command(['tck2connectome', tractogram_filepath, 'parc.mif', 'meanlength.csv', + '-tck_weights_in', 'weights.csv', + '-scale_length', + '-stat_edge', 'mean'] + + assignment_option) + + if output_verbosity > 2: + # Produce additional data that can be used for + # visualisation within mrview's connectome toolbar + app.console('Generating geometric data for ' + 'enhanced connectome visualisation') + run.command(f'connectome2tck {tractogram_filepath} assignments.csv exemplars.tck' + ' -tck_weights_in weights.csv' + ' -exemplars parc.mif' + ' -files single') + run.command('label2mesh parc.mif nodes.obj') + run.command('meshfilter nodes.obj smooth nodes_smooth.obj') + app.cleanup('nodes.obj') + + + + # Prepare output path for writing + app.console(f'Processing for session "{session_label}" completed;' + ' writing results to output directory') + subdirs_to_make = ['anat', 'dwi', 'tractogram'] + if shared.parcellation != 'none': + subdirs_to_make.insert(1, 'connectome') + for subdir in subdirs_to_make: + full_subdir_path = output_subdir / subdir + if full_subdir_path.exists(): + run.function(shutil.rmtree, full_subdir_path) + run.function(os.makedirs, full_subdir_path) + + # Generate a copy of the lookup table file: + # - Use the post-labelconvert file if it's used; + # otherwise, if the atlas itself comes with a lookup table + # that didn't require conversion, write that; + # - In the group directory rather than the subject directory; + # - If it doesn't already exist. + lut_export_file = shared.mrtrix_lut_file \ + if shared.mrtrix_lut_file \ + else shared.parc_lut_file + if lut_export_file: + lut_export_path = \ + output_analysis_level_path / \ + f'{parc_string[1:]}_lookup{lut_export_file.suffix}' + try: + shutil.copy(lut_export_file, lut_export_path) + except OSError: + pass + + # Copy / convert necessary files to output directory + for scratch_file, output_item in output_items.items(): + if output_verbosity >= output_item.min_verbosity \ + and (multishell or not output_item.needs_multishell): + full_output_path = output_subdir / output_item.path + if output_item.is_image: + run.command(f'mrconvert {scratch_file} {full_output_path}' + ' -clear_property comments' + + (' ' + output_item.options + if output_item.options + else ''), + force=full_output_path.exists()) + else: + run.function(shutil.copyfile, + scratch_file, + full_output_path) + + # Manually wipe and zero the scratch directory + # (since we might be processing more than one subject) + os.chdir(cwd) + if app.DO_CLEANUP: + app.console(f'Deleting scratch directory {app.SCRATCH_DIR}') + # Can't use run.function() here; + # it'll try to write to the log file that resides + # in the scratch directory just deleted + app.cleanup(app.SCRATCH_DIR) + elif output_verbosity == 4: + app.console('Copying scratch directory to output location') + run.function(shutil.copytree, + app.SCRATCH_DIR, + output_subdir / 'scratch') + else: + app.console('Contents of scratch directory kept; ' + f'location: {app.SCRATCH_DIR}') + app.SCRATCH_DIR = None diff --git a/mrtrix3_connectome/participant/shared.py b/mrtrix3_connectome/participant/shared.py new file mode 100644 index 0000000..b36277b --- /dev/null +++ b/mrtrix3_connectome/participant/shared.py @@ -0,0 +1,401 @@ +import os +import pathlib +import shutil +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import fsl +from .. import OPTION_PREFIX +from ..anat.shared import Shared as T1wShared + +class Shared(object): #pylint: disable=useless-object-inheritance + def __init__(self, atlas_path, parcellation, + streamlines, template_reg): + + if not parcellation: + raise MRtrixError( + 'For participant-level analysis, ' + 'desired parcellation must be provided using the ' + f'{OPTION_PREFIX}parcellation option') + self.parcellation = parcellation + + self.streamlines = streamlines + + fsl_path = os.environ.get('FSLDIR', None) + if fsl_path: + fsl_path = pathlib.Path(fsl_path) + if not fsl_path.is_dir(): + raise MRtrixError( + 'Environment variable FSLDIR does not point' + ' to an existing directory') + else: + raise MRtrixError( + 'Environment variable FSLDIR is not set; ' + 'please run appropriate FSL configuration script') + + self.dwi2mask_algo = 'synthstrip' + if not shutil.which('mri_synthstrip'): + self.dwi2mask_algo = 'legacy' + app.warn('FreeSurfer command mri_synthstrip not present;' + ' legacy dwi2mask algorithm will be used if necessary' + ' (ie. if pre-processed data do not provide a brain mask)') + + # No GDC data provided; + # won't be doing any T1w pre-processing + self.t1w_shared = T1wShared({}) + + self.do_freesurfer = parcellation in ['brainnetome246fs', + 'desikan', + 'destrieux', + 'hcpmmp1', + 'yeo7fs', + 'yeo17fs'] + self.do_mni = parcellation in ['aal', + 'aal2', + 'brainnetome246mni', + 'craddock200', + 'craddock400', + 'perry512', + 'yeo7mni', + 'yeo17mni'] + if parcellation != 'none': + assert self.do_freesurfer or self.do_mni + + if template_reg: + if self.do_mni: + self.template_registration_software = template_reg + else: + app.warn('Volumetric template registration ' + 'not being performed; ' + f'{OPTION_PREFIX}template_reg option ignored') + self.template_registration_software = '' + else: + self.template_registration_software = 'ants' if self.do_mni else '' + if self.template_registration_software == 'ants': + if not shutil.which('antsRegistration') \ + or not shutil.which('antsApplyTransforms'): + raise MRtrixError( + 'Commands \'antsRegistration\' and \'antsApplyTransforms\' ' + 'must be present in PATH to use ' + 'ANTs software for template registration') + elif self.template_registration_software == 'fsl': + self.flirt_cmd = fsl.exe_name('flirt') + self.fnirt_cmd = fsl.exe_name('fnirt') + self.invwarp_cmd = fsl.exe_name('invwarp') + self.applywarp_cmd = fsl.exe_name('applywarp') + self.fnirt_config_basename = 'T1_2_MNI152_2mm.cnf' + self.fnirt_config_path = fsl_path / \ + 'etc' / \ + 'flirtsch' / \ + self.fnirt_config_basename + if not self.fnirt_config_path.is_file(): + raise MRtrixError( + 'Unable to find configuration file for FNI FNIRT ' + f'(expected location: {self.fnirt_config_path})') + + self.template_image_path = None + self.template_mask_path = None + self.parc_image_path = None + self.parc_lut_file = None + self.mrtrix_lut_file = None + + # TODO This might need to change if MRtrix3 is installed + # rather than accessing the build directory + mrtrix_lut_dir = pathlib.Path(app.__file__).parents[2] / \ + 'share' / \ + 'mrtrix3' / \ + 'labelconvert' + + if self.do_freesurfer: + self.freesurfer_home = os.environ.get('FREESURFER_HOME', None) + if not self.freesurfer_home: + raise MRtrixError( + 'Environment variable FREESURFER_HOME not set; ' + 'please verify FreeSurfer installation') + self.freesurfer_home = pathlib.Path(self.freesurfer_home) + if not self.freesurfer_home.is_dir(): + raise MRtrixError( + 'Environment variable FREESURFER_HOME' + ' does not point to an existing directory' + f' ({self.freesurfer_home})' + ) + if not shutil.which('recon-all'): + raise MRtrixError( + 'Could not find FreeSurfer script "recon-all"; ' + 'please verify FreeSurfer installation') + self.freesurfer_subjects_dir = pathlib.Path(os.environ['SUBJECTS_DIR']) \ + if 'SUBJECTS_DIR' in os.environ \ + else (self.freesurfer_home / 'subjects') + if not self.freesurfer_subjects_dir.is_dir(): + raise MRtrixError( + 'Could not find FreeSurfer subjects directory ' + f'(expected location: {self.freesurfer_subjects_dir})') + for subdir in ['fsaverage', + 'fsaverage5', + 'lh.EC_average', + 'rh.EC_average']: + if not (self.freesurfer_subjects_dir / subdir).is_dir(): + raise MRtrixError( + 'Could not find requisite FreeSurfer subject ' + f'directory "{subdir}" ' + f'(expected location: {self.freesurfer_subjects_dir / subdir})') + self.reconall_path = shutil.which('recon-all') + if not self.reconall_path: + raise MRtrixError( + 'Could not find FreeSurfer script "recon-all"; ' + 'please verify FreeSurfer installation') + if parcellation in ['hcpmmp1', 'yeo7fs', 'yeo17fs']: + if parcellation == 'hcpmmp1': + + def hcpmmp_annot_path(hemi): + return self.freesurfer_subjects_dir / \ + 'fsaverage' / \ + 'label' / \ + f'{hemi}h.HCPMMP1.annot' + + self.hcpmmp1_annot_paths = [hcpmmp_annot_path(hemi) + for hemi in ['l', 'r']] + if not all(path.is_file() for path in self.hcpmmp1_annot_paths): + raise MRtrixError( + 'Could not find necessary annotation labels ' + 'for applying HCPMMP1 parcellation ' + f'(expected location: {hcpmmp_annot_path("?")})') + else: # yeo7fs, yeo17fs + + def yeo_annot_path(hemi): + return self.freesurfer_subjects_dir / \ + 'fsaverage5' / \ + 'label' / \ + f'{hemi}h.Yeo2011_' \ + f'{"7" if parcellation == "yeo7fs" else "17"}' \ + 'Networks_N1000.split_components.annot' + + self.yeo_annot_paths = [yeo_annot_path(hemi) \ + for hemi in ['l', 'r']] + if not all(path.is_file() for path in self.yeo_annot_paths): + raise MRtrixError( + 'Could not find necessary annotation labels ' + 'for applying Yeo2011 parcellation ' + f'(expected location: {yeo_annot_path("?")})') + for cmd in ['mri_surf2surf', 'mri_aparc2aseg']: + if not shutil.which(cmd): + raise MRtrixError( + f'Could not find FreeSurfer command {cmd} ' + '(necessary for applying HCPMMP1 parcellation); ' + 'please verify FreeSurfer installation') + elif parcellation == 'brainnetome246fs': + + def brainnetome_gcs_path(hemi): + return self.freesurfer_home / 'average' / f'{hemi}h.BN_Atlas.gcs' + + self.brainnetome_cortex_gcs_paths = [ + brainnetome_gcs_path(hemi) + for hemi in ['l', 'r']] + if not all(path.is_file() for path in self.brainnetome_cortex_gcs_paths): + raise MRtrixError( + 'Could not find necessary GCS files for applying ' + 'Brainnetome cortical parcellation via FreeSurfer ' + f'(expected location: {brainnetome_gcs_path("?")})') + self.brainnetome_sgm_gca_path = \ + self.freesurfer_home / 'average' / 'BN_Atlas_subcortex.gca' + if not self.brainnetome_sgm_gca_path.is_file(): + raise MRtrixError( + 'Could not find necessary GCA file for applying ' + 'Brainnetome sub-cortical parcellation ' + 'via FreeSurfer ' + f'(expected location: {self.brainnetome_sgm_gca_path})') + for cmd in ['mri_label2vol', + 'mri_ca_label', + 'mris_ca_label']: + if not shutil.which(cmd): + raise MRtrixError( + f'Could not find FreeSurfer command {cmd} ' + '(necessary for applying Brainnetome parcellation); ' + 'please verify FreeSurfer installation') + + # Query contents of recon-all script, + # looking for "-openmp" and "-parallel" occurences + # Add options to end of recon-all -all call, + # based on which of these options are available + # as well as the value of app.numThreads + # - In 5.3.0, just the -openmp option is available + # - In 6.0.0, -openmp needs to be preceded by -parallel + self.reconall_multithread_options = [] + if app.NUM_THREADS is None or app.NUM_THREADS > 1: + with open(self.reconall_path, 'r', encoding='utf-8') as f: + reconall_text = f.read().splitlines() + for line in reconall_text: + line = line.strip() + if line == 'case "-parallel":': + self.reconall_multithread_options = \ + ['-parallel'] + self.reconall_multithread_options + # If number of threads in this script is not being + # explicitly controlled, allow recon-all to use + # its own default number of threads + elif line == 'case "-openmp":' \ + and app.NUM_THREADS is not None: + self.reconall_multithread_options.extend( + ['-openmp', str(app.NUM_THREADS)]) + app.debug(self.reconall_multithread_options) + + if parcellation == 'brainnetome246fs': + self.parc_lut_file = self.freesurfer_home / 'BN_Atlas_246_LUT.txt' + self.mrtrix_lut_file = None + elif parcellation == 'desikan': + self.parc_lut_file = self.freesurfer_home / 'FreeSurferColorLUT.txt' + self.mrtrix_lut_file = mrtrix_lut_dir / 'fs_default.txt' + elif parcellation == 'destrieux': + self.parc_lut_file = self.freesurfer_home / 'FreeSurferColorLUT.txt' + self.mrtrix_lut_file = mrtrix_lut_dir / 'fs_a2009s.txt' + elif parcellation == 'hcpmmp1': + self.parc_lut_file = mrtrix_lut_dir / 'hcpmmp1_original.txt' + self.mrtrix_lut_file = mrtrix_lut_dir / 'hcpmmp1_ordered.txt' + elif parcellation in ['yeo7fs', 'yeo17fs']: + self.parc_lut_file = \ + self.freesurfer_home / \ + 'Yeo2011_' \ + f'{"7" if parcellation == "yeo7fs" else "17"}' \ + 'networks_Split_Components_LUT.txt' + self.mrtrix_lut_file = \ + mrtrix_lut_dir / \ + 'Yeo2011_' \ + f'{"7" if parcellation == "yeo7fs" else "17"}' \ + 'N_split.txt' + else: + assert False + + # If running in a container environment, and --debug is used + # (resulting in the scratch directory being a mounted drive), + # it's possible that attempting to construct a softlink may + # lead to an OSError + # As such, run a test to determine whether or not it is + # possible to construct a softlink within the scratch + # directory; if it is not possible, revert to performing + # deep copies of the relevant FreeSurfer template directories + self.freesurfer_template_link_function = os.symlink + try: + self.freesurfer_template_link_function( + self.freesurfer_subjects_dir, + 'test_softlink') + os.remove('test_softlink') + app.debug('Using softlinks to FreeSurfer template directories') + except OSError: + app.debug('Unable to create softlinks; ' + 'will perform deep copies of FreeSurfer ' + 'template directories') + self.freesurfer_template_link_function = shutil.copytree + + elif self.do_mni: + self.template_image_path = \ + fsl_path / 'data' / 'standard' / 'MNI152_T1_2mm.nii.gz' + self.template_mask_path = \ + fsl_path / 'data' / 'standard' / 'MNI152_T1_2mm_brain_mask.nii.gz' + if parcellation == 'aal': + self.parc_image_path = pathlib.Path(pathlib.Path().root, + 'opt', + 'aal', + 'ROI_MNI_V4.nii') + self.parc_lut_file = self.parc_image_path.with_suffix('.txt') + self.mrtrix_lut_file = mrtrix_lut_dir / 'aal.txt' + elif parcellation == 'aal2': + self.parc_image_path = pathlib.Path(pathlib.Path().root, + 'opt', + 'aal', + 'ROI_MNI_V5.nii') + self.parc_lut_file = self.parc_image_path.with_suffix('.txt') + self.mrtrix_lut_file = mrtrix_lut_dir / 'aal2.txt' + elif parcellation == 'brainnetome246mni': + self.parc_image_path = \ + pathlib.Path(pathlib.Path().root, + 'opt', + 'brainnetome', + 'BNA_MPM_thr25_1.25mm.nii.gz') + self.parc_lut_file = \ + pathlib.Path(pathlib.Path().root, + 'opt', + 'brainnetome', + 'BN_Atlas_246_LUT.txt') + self.mrtrix_lut_file = None + elif parcellation == 'craddock200': + self.parc_image_path = \ + pathlib.Path(pathlib.Path().root, + 'opt', + 'ADHD200_parcellate_200.nii.gz') + self.parc_lut_file = None + self.mrtrix_lut_file = None + elif parcellation == 'craddock400': + self.parc_image_path = \ + pathlib.Path(pathlib.Path().root, + 'opt', + 'ADHD200_parcellate_400.nii.gz') + self.parc_lut_file = None + self.mrtrix_lut_file = None + elif parcellation == 'perry512': + self.parc_image_path = \ + pathlib.Path(pathlib.Path().root, + 'opt', + '512inMNI.nii') + self.parc_lut_file = None + self.mrtrix_lut_file = None + elif parcellation == 'yeo7mni': + self.parc_image_path = \ + pathlib.Path( + pathlib.Path().root, + 'opt', + 'Yeo2011', + 'Yeo2011_7Networks_N1000.split_components.FSL_MNI152_1mm.nii.gz') + self.parc_lut_file = \ + pathlib.Path( + pathlib.Path().root, + 'opt', + 'Yeo2011', + '7Networks_ColorLUT_freeview.txt') + self.mrtrix_lut_file = None + elif parcellation == 'yeo17mni': + self.parc_image_path = \ + pathlib.Path( + pathlib.Path().root, + 'opt', + 'Yeo2011', + 'Yeo2011_17Networks_N1000.split_components.FSL_MNI152_1mm.nii.gz') + self.parc_lut_file = \ + pathlib.Path( + pathlib.Path().root, + 'opt', + 'Yeo2011', + '17Networks_ColorLUT_freeview.txt') + self.mrtrix_lut_file = None + else: + assert False + + def find_atlas_file(filepath, description): + if not filepath: + return None + if filepath.is_file(): + return filepath + if not atlas_path: + raise MRtrixError(f'Could not find {description} ' + f'(expected location: {filepath})') + newpath = atlas_path.parent / filepath.name + if newpath.is_file(): + return newpath + raise MRtrixError(f'Could not find {description} ' + f'(tested locations: "{filepath}", ' + f'"{newpath}")') + + self.template_image_path = \ + find_atlas_file(self.template_image_path, + 'template image') + self.template_mask_path = \ + find_atlas_file(self.template_mask_path, + 'template brain mask image') + self.parc_image_path = \ + find_atlas_file(self.parc_image_path, + 'parcellation image') + self.parc_lut_file = \ + find_atlas_file(self.parc_lut_file, + 'parcellation lookup table file') + + if self.mrtrix_lut_file and not self.mrtrix_lut_file.is_file(): + raise MRtrixError( + 'Could not find MRtrix3 connectome lookup table file ' + f'(expected location: {self.mrtrix_lut_file})') diff --git a/mrtrix3_connectome/preproc/__init__.py b/mrtrix3_connectome/preproc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py new file mode 100644 index 0000000..bad2223 --- /dev/null +++ b/mrtrix3_connectome/preproc/run.py @@ -0,0 +1,1048 @@ +import json +import math +import os +import pathlib +import re +import shutil +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import image +from mrtrix3 import matrix +from mrtrix3 import run +from ..anat.get import get_t1w_preproc_images + +# Seem that for problematic data, running more than two iterations may +# cause divergence from the ideal mask; therefore cap at two iterations +DWIBIASNORMMASK_MAX_ITERS = 2 + +# Use a threshold on the balanced tissue sum image +# as a replacement of dwi2mask within the iterative +# bias field correction / brain masking loop in preprpoc +TISSUESUM_THRESHOLD = 0.5 / math.sqrt(4.0 * math.pi) + +OUT_DWI_JSON_DATA = {'SkullStripped': False} + +# TODO Further split code across multiple files + +def run_preproc(bids_dir, session, shared, + t1w_preproc_path, concat_denoise, + output_verbosity, output_app_dir): + + session_label = '_'.join(session) + output_subdir = pathlib.Path(output_app_dir, 'MRtrix3_connectome-preproc', *session) + if output_subdir.exists(): + app.warn(f'Preproc-level output directory for session "{session_label}" already exists; ' + 'all contents will be erased when this execution completes') + + cwd = pathlib.Path.cwd() + app.activate_scratch_dir() + + # Need to perform an initial import of JSON data using mrconvert; + # so let's grab the diffusion gradient table as well + # If no bvec/bval present, need to go down the directory listing + # Only try to import JSON file if it's actually present + # direction in the acquisition they'll need to be split + # across multiple files + # May need to concatenate more than one input DWI, since if there's + # more than one phase-encode direction in the acquired DWIs + # (i.e. not just those used for estimating the inhomogeneity field), + # they will need to be stored as separate NIfTI files in the + # 'dwi/' directory. + app.console('Importing DWI data into scratch directory') + in_dwi_path = pathlib.Path(bids_dir, *session, 'dwi') + in_dwi_image_list = sorted(in_dwi_path.glob('*_dwi.nii*')) + if not in_dwi_image_list: + raise MRtrixError(f'No DWI data found for session {session_label} ' + f'(search location: {in_dwi_path}') + dwi_index = 0 + + re_is_complex = re.compile(r'_part-(mag|phase)_') + + # TODO Check the contents of each JSON: + # - Check to see if gradient non-linearity distortion correction + # has already been applied prior; + # if it has, then denoising and Gibbs ringing removal + # should be omitted from the pipeline + # (For now, don't attempt to handle this on a per-image basis; + # just make sure whatever the result is is consistent + # across all input images for the session) + # - If no gradient non-linearity distortion correction + # has yet been applied, + # and the user has specified a directory + # containing gradient non-linearity warp fields, + # then load the name of the scanner, + # and check to see if a warp field is available + scanner_name = None + gdc_already_applied = None + gdc_to_be_applied = None + + for entry in in_dwi_image_list: + phase_rescale_factor = None + # Is this one image in a magnitude-phase pair? + is_complex = re_is_complex.search(entry.name) + if is_complex: + matching_text = is_complex.group(1) + complex_part = matching_text.strip('_').split('-')[-1] + assert complex_part in ['mag', 'phase'] + if complex_part == 'mag': + # Find corresponding phase image + in_phase_image = entry.replace('_part-mag_', '_part-phase_') + if in_phase_image not in in_dwi_image_list: + raise MRtrixError( + f'Image {entry} does not have corresponding phase image') + # Check if phase image is stored in radians + phase_stats = image.statistics(in_phase_image, allvolumes=True) + if abs(2.0*math.pi - (phase_stats.max - phase_stats.min)) \ + > 0.01: + app.console(f'Phase image {in_phase_image} is not stored in radian units ' + f'(values from {phase_stats.min} to {phase_stats.max}); ' + 'data will be rescaled automatically') + # Are the values stored as integers? If so, assume that + # taking the maximum phase value observed in the image + # intensities, incrementing it by one, and having it + # offset + scaled based on the header properties, would + # result in a phase that is 2pi greater than the minimum + # It would be better to do this rescaling based on testing + # whether the image datatype is an integer; but there + # does not yet exist a module for interpreting MRtrix3 + # data types + phase_header = image.Header(in_phase_image) + add_to_max_phase = phase_header.intensity_scale() \ + if phase_stats.min.is_integer() \ + and phase_stats.max.is_integer() \ + else 0 + phase_rescale_factor = 2.0 * math.pi / \ + (phase_stats.max + + add_to_max_phase + - phase_stats.min) + else: + phase_rescale_factor = None + + else: + # Make sure we also have the corresponding magnitude image + if entry.replace('_part-phase_', + '_part-mag_') not in in_dwi_image_list: + raise MRtrixError( + f'Image {entry} does not have corresponding mag image') + # Do nothing for the second image in the pair + continue + else: + + in_phase_image = None + + # Find sidecar files + # dcm2bids will have separate bvecs / bvals / json for the + # magnitude and phase images; + # for proper BIDS compliance, these sidecar files will be stored + # without the "_part-[mag|phase]" part + + dwi_prefix = pathlib.Path(entry) + while dwi_prefix.suffix: + dwi_prefix = dwi_prefix.with_suffix('') + sidecar_prefixes = [dwi_prefix, + dwi_prefix.parent / dwi_prefix.name.replace('_part-mag_', '_'), + bids_dir / dwi_prefix, + bids_dir / \ + dwi_prefix.parent / \ + dwi_prefix.name.replace('_part-mag_', '_')] + entry_bval = None + entry_bvec = None + entry_json = None + for prefix in sidecar_prefixes: + candidate_bval = prefix.with_suffix('.bval') + candidate_bvec = prefix.with_suffix('.bvec') + candidate_json = prefix.with_suffix('.json') + if not entry_bvec and \ + candidate_bval.is_file() and \ + candidate_bvec.is_file(): + entry_bval = candidate_bval + entry_bvec = candidate_bvec + if not entry_json and \ + candidate_json.is_file(): + entry_json = candidate_json + if not entry_bvec: + raise MRtrixError( + 'Unable to locate valid diffusion gradient table ' + f'for image "{entry}"') + if not entry_json: + raise MRtrixError( + 'Unable to locate valid JSON sidecar file ' + f'for image "{entry}"') + + grad_import_option = ['-fslgrad', entry_bvec, entry_bval] + json_import_option = ['-json_import', entry_json] + + # Import the data + dwi_index += 1 + if in_phase_image: + run.command(['mrconvert', entry, '-'] + + grad_import_option + + json_import_option + + ['|', 'mrcalc', '-', in_phase_image] + + ([str(phase_rescale_factor), '-mult'] \ + if phase_rescale_factor \ + else []) + + ['-polar', f'dwi{dwi_index}.mif']) + else: + run.command(['mrconvert', entry, f'dwi{dwi_index}.mif'] + + grad_import_option + + json_import_option) + + # Load the JSON explicitly as we need to check coupla things + with open(entry_json, 'r', encoding='utf-8') as json_file: + json_data = json.load(json_file) + + if any(item in json_data for item in ('ImageType', 'ImageTypeText')): + # Load whichever field is present, and convert into list-of-strings if necessary + # TODO Does this ever come in a form other than a list-of-strings? + image_type = json_data['ImageTypeText'] \ + if 'ImageTypeText' in json_data \ + else json_data['ImageType'] + this_gdc_already_applied = any(item in image_type for item in ('DIS2D', 'DIS3D')) + if gdc_already_applied is None: + gdc_already_applied = this_gdc_already_applied + elif this_gdc_already_applied != gdc_already_applied: + raise MRtrixError('Inconsistency in prior application ' + 'of gradient non-linearity distortion correction ' + f'for DWI data for session {session_label}') + + if 'ManufacturersModelName' in json_data: + if scanner_name is None: + scanner_name = json_data['ManufacturersModelName'] + if shared.gdc_dir is not None and gdc_to_be_applied is None: + if scanner_name in shared.gdc_images: + gdc_to_be_applied = True + else: + app.warn('No gradient non-linearity warp file ' + f'for scanner "{scanner_name}" ' + f'found in GDC directory {shared.gdc_dir}; ' + 'no gradient non-linearity distortion correction to be applied ' + f'for session {session_label}') + gdc_to_be_applied = False + elif json_data['ManufacturersModelName'] != scanner_name: + raise MRtrixError('Inconsistent "ManufacturersModelName" value in input DWI data') + elif scanner_name is not None: + raise MRtrixError('Inconsistent appearance of "ManufacturersModelName" in input DWI data') + else: + gdc_to_be_applied = False + + + if gdc_already_applied: + app.warn('Gradient non-linearity distortion correction already applied to input DWI data; ' + 'some pre-processing steps will need to be omitted accordingly') + gdc_to_be_applied = False + if gdc_to_be_applied: + app.console(f'Scanner model "{scanner_name}" matches file "{shared.gdc_images[scanner_name]}",' + ' and no prior application of gradient non-linearity distortion correction indicated in image metadata;' + ' correction to be applied after dwifslpreproc') + + dwi_image_list = [pathlib.Path(f'dwi{index}.mif') for index in range(1, dwi_index+1)] + + # Go hunting for reversed phase-encode data + # dedicated to field map estimation + in_fmap_image_list = [] + fmap_dir = pathlib.Path(bids_dir, *session, 'fmap') + fmap_index = 0 + fmap_image_list = [] + if fmap_dir.is_dir(): + app.console('Importing fmap data into scratch directory') + in_fmap_image_list = sorted(fmap_dir.glob('*_dir-*_epi.nii*')) + for entry in in_fmap_image_list: + prefix = pathlib.Path(entry) + while prefix.suffix: + prefix = prefix.with_suffix('') + json_path = prefix.with_suffix('.json') + try: + with open(json_path, 'r', encoding='utf-8') as json_file: + json_elements = json.load(json_file) + except OSError: + app.warn(f'No JSON file found for image "{entry}"; not importing') + continue + if 'IntendedFor' in json_elements: + if isinstance(json_elements['IntendedFor'], list): + if not any(any(str(i).endswith(target) for i in in_dwi_image_list) + for target in json_elements['IntendedFor']): + app.console(f'Image "{entry}" is not intended ' + 'for use with DWIs; skipping') + continue + elif not any(str(i).endswith(json_elements['IntendedFor']) + for i in in_dwi_image_list): + app.console(f'Image "{entry}" is not intended ' + 'for use with DWIs; skipping') + continue + # Verify that the presence / absence + # of prior gradient non-linearity distortion correction + # is consistent between DWIs and fmap/ data + if any(item in json_elements for item in ('ImageType', 'ImageTypeText')) \ + and gdc_already_applied is not None: + image_type = json_elements['ImageType'] \ + if 'ImageType' in json_elements \ + else json_elements['ImageTypeText'] + if any(item in image_type for item in ('DIS2D', 'DIS3D')) != gdc_already_applied: + raise MRtrixError('Inconsistency in prior application ' + 'of gradient non-linearity distortion correction ' + 'between dwi/ and fmap/ data ' + f'for session {session_label}') + # fmap files will not come with any gradient encoding in the JSON; + # therefore we need to add it manually ourselves so that + # mrcat / mrconvert can appropriately handle the table once + # these images are concatenated with the DWIs + fmap_index += 1 + fmap_image_size = image.Header(entry).size() + fmap_image_num_volumes = \ + 1 if len(fmap_image_size) == 3 else fmap_image_size[3] + fmap_dwscheme_file = pathlib.Path(f'fmap{fmap_index}.b') + with open(fmap_dwscheme_file, 'w', encoding='utf-8') as f: + for _ in range(0, fmap_image_num_volumes): + f.write('0,0,1,0\n') + run.command(['mrconvert', + entry, + f'fmap{fmap_index}.mif', + '-json_import', json_path, + '-grad', fmap_dwscheme_file]) + app.cleanup(fmap_dwscheme_file) + + fmap_image_list = [pathlib.Path(f'fmap{index}.mif') for index in range(1, fmap_index+1)] + + # No need to explicitly check whether fmap/ images are defined + # on a common image grid; these we are happy to resample + + # If there's no usable data in fmap/ directory, + # need to check to see if there's any phase-encoding + # contrast within the input DWI(s) + if not fmap_image_list and len(dwi_image_list) < 2: + raise MRtrixError('Inadequate data for pre-processing of session ' + f'"{session_label}": ' + 'No phase-encoding contrast in input DWIs, ' + 'and no fmap/ directory, ' + 'so EPI distortion correction cannot be performed') + + # Get T1-weighted image data + # (could be generated from raw data, or grabbed from a + # user-specified path source) + get_t1w_preproc_images(bids_dir, + session, + shared.t1w_shared, + t1w_preproc_path) + # TODO This will no longer be compulsory: + # if these data are not available, + # then issue a warning to the user, + # and exclude the relevant registration & export + if pathlib.Path('T1w_premasked.mif').is_file(): + t1w_image = pathlib.Path('T1w_premasked.mif') + t1w_is_premasked = True + elif pathlib.Path('T1w.mif').is_file(): + t1w_image = pathlib.Path('T1w.mif') + t1w_is_premasked = False + else: + app.warn(f'Pre-processing of session "{session_label}" ' + 'proceeding in the absence of any T1-weighted image; ' + 'DWI data will be pre-processed, ' + 'but no alignment to T1-weighted image will occur, ' + 'and participant-level analysis will not be applicable') + t1w_image = None + t1w_is_premasked = None + + dwifslpreproc_se_epi = '' + dwifslpreproc_se_epi_option = '' + + if len(dwi_image_list) == 1: + run.function(os.rename, dwi_image_list[0], 'dwi.mif', show=False) + dwi_image_list[0] = pathlib.Path('dwi.mif') + + # First two steps of pre-processing are not applicable + # if the input data have undergone gradient non-linearity distortion correction: + # that requires interpolation of image intensities: + # which violates the assumptions of these algorithms + if gdc_already_applied: + app.warn('Skipping MPPCA denoising and Gibbs ringing removal ' + 'due to prior application of gradient non-linearity distortion correction ' + 'to the input DWI data') + else: + + if concat_denoise == 'before' and len(dwi_image_list) > 1: + # TODO We need to determine first whether these images can be trivially concatenated: + # if execution of dwicat would result in resampling, + # and that would then preclude the application of denoising / Gibbs ringing removal, + # then we should override this and do the concatenation after these steps + # How do we tell if the concatenation will not involve resampling? + # Think we should just run the process and then query the contents of stderr + app.console('Concatenating DWI series prior to denoising') + new_dwi_image = pathlib.Path('dwi_cat.mif') + dwicat_stderr = run.command(['dwicat', + list(map(str, dwi_image_list)), + new_dwi_image]).stderr + if 'data will be resampled onto a new average header space' in dwicat_stderr: + app.warn('DWI series could not be concatenated without involving resampling, ' + 'which would invalidate denoising and Gibbs ringing removal; ' + 'will instead denoise separately, and concatenate after') + os.remove(new_dwi_image) + else: + app.cleanup(dwi_image_list) + dwi_image_list = [new_dwi_image] + + # Step 1: Denoise + app.console('Denoising DWI data') + for entry in dwi_image_list: + run.command([shared.dwidenoise_cmd, + entry, + f'{entry.with_suffix("")}_denoised.mif']) + app.cleanup(entry) + dwi_image_list = [pathlib.Path(f'{entry.with_suffix("")}_denoised.mif') \ + for entry in dwi_image_list] + + # If data are complex, take the magnitude + new_dwi_image_list = [] + for entry in dwi_image_list: + if image.Header(entry).datatype().startswith('CFloat'): + mag_entry = pathlib.Path(f'{entry.with_suffix("")}_mag.mif') + run.command(f'mrcalc {entry} -abs {mag_entry}') + app.cleanup(entry) + new_dwi_image_list.append(mag_entry) + else: + new_dwi_image_list.append(entry) + dwi_image_list = new_dwi_image_list + + # Step 2: Gibbs ringing removal + # TODO Can newer implementations use complex data + # for 2D Gibbs ringing removal? + app.console('Performing Gibbs ringing removal for DWI' + f'{" and fmap" if fmap_image_list else ""} data') + for i in dwi_image_list: + run.command(f'mrdegibbs {i} {i.with_suffix("")}_degibbs.mif' + ' -nshifts 50') + app.cleanup(i) + dwi_image_list = [pathlib.Path(f'{i.with_suffix("")}_degibbs.mif') \ + for i in dwi_image_list] + for i in fmap_image_list: + run.command(f'mrdegibbs {i} {i.with_suffix("")}_degibbs.mif' + ' -nshifts 50') + app.cleanup(i) + fmap_image_list = [pathlib.Path(f'{i.with_suffix("")}_degibbs.mif') \ + for i in fmap_image_list] + + # We need to concatenate the DWI and fmap/ data (separately) + # before they can be fed into dwifslpreproc + if len(dwi_image_list) > 1 or fmap_image_list: + app.console(f'{"Concatenating" if len(dwi_image_list) > 1 else "Preparing"}' + f' DWI{" and fmap" if fmap_image_list else ""} data' + ' onto common voxel grid') + if len(dwi_image_list) == 1: + dwifslpreproc_input = dwi_image_list[0] + else: + dwifslpreproc_input = pathlib.Path('dwifslpreproc_in.mif') + run.command(['dwicat', list(map(str, dwi_image_list)), dwifslpreproc_input]) + + # Some decisions regarding pre-processing depend on whether or not + # a twice-refocused sequence has been used: a single-refocused + # sequence may have residual eddy current distortions in b=0 + # volumes + dwifslpreproc_input_header = image.Header(dwifslpreproc_input) + monopolar = 'DiffusionScheme' in dwifslpreproc_input_header.keyval() \ + and dwifslpreproc_input_header \ + .keyval()['DiffusionScheme'] == 'Monopolar' + + if fmap_image_list: + + fmap_transformed_image_list = [] + for item in fmap_image_list: + affine_transform_filepath = pathlib.Path(f'{item.with_suffix("")}2dwi_affine.txt') + rigid_transform_filepath = pathlib.Path(f'{item.with_suffix("")}2dwi_rigid.txt') + fmap_transformed_filepath = pathlib.Path(f'{item.with_suffix("")}_transformed.mif') + run.command(['transformcalc', + item, + dwifslpreproc_input, + 'header', + affine_transform_filepath]) + run.command(['transformcalc', + affine_transform_filepath, + 'rigid', + rigid_transform_filepath]) + run.command(['mrtransform', + item, + '-linear', rigid_transform_filepath, + '-reorient_fod', 'no', + fmap_transformed_filepath]) + fmap_transformed_image_list.append(fmap_transformed_filepath) + app.cleanup(affine_transform_filepath) + app.cleanup(rigid_transform_filepath) + app.cleanup(fmap_image_list) + if len(fmap_transformed_image_list) == 1: + dwifslpreproc_se_epi = fmap_transformed_image_list[0] + else: + dwifslpreproc_se_epi = pathlib.Path('dwifslpreproc_seepi.mif') + try: + run.command(['mrcat', + list(map(str, fmap_transformed_image_list)), + dwifslpreproc_se_epi, + '-axis', '3']) + except run.MRtrixCmdError: + app.warn('Unable to rigidly align fmap/ images to DWI voxel grid; ' + 'performing explicit interpolation') + fmap_resampled_image_list = [] + for item in fmap_transformed_image_list: + fmap_resampled_image_path = pathlib.Path(f'{item.with_suffix("")}_resampled.mif') + run.command(['mrtransform', + item, + '-template', dwifslpreproc_input, + '-interp', 'sinc', + fmap_resampled_image_path]) + fmap_resampled_image_list.append(fmap_resampled_image_path) + app.cleanup(fmap_transformed_image_list) + run.command(['mrcat', + list(map(str, fmap_resampled_image_list)), + dwifslpreproc_se_epi, + '-axis', '3']) + app.cleanup(fmap_resampled_image_list) + dwifslpreproc_se_epi_option = ['-se_epi', dwifslpreproc_se_epi, + '-align_seepi'] + + else: # No fmap/ images + + dwifslpreproc_se_epi = None + dwifslpreproc_se_epi_option = [] + + # If no images in fmap/ directory, but DWIs are monopolar, then + # don't want to let dwifslpreproc automatically grab all of the + # b=0 volumes and just use those; instead, grab, for each + # input DWI series, just the b=0 volumes at the start of the + # series + if len(dwi_image_list) > 1 and monopolar: + try: + bzero_image_list = [] + for dwi_image in dwi_image_list: + first_nonzero_volume = \ + min([int(indices[0]) for indices in + image.mrinfo(dwi_image, 'shell_indices') + .split(' ')[1:]]) + if not first_nonzero_volume: + raise MRtrixError('First DWI volume is not b=0; ' + 'cannot utilise b=0 volumes ' + 'prior to DWI volumes only') + bzero_image = pathlib.Path(f'{dwi_image.with_suffix("")}_bzero.mif') + run.command(['mrconvert', + dwi_image, + bzero_image, + '-coord', + '3', + ','.join(str(i) for i in + range(0, first_nonzero_volume))]) + bzero_image_list.append(bzero_image) + dwifslpreproc_se_epi = pathlib.Path('dwifslpreproc_seepi.mif') + run.command(['mrcat', + bzero_image_list, + dwifslpreproc_se_epi, + '-axis', '3']) + app.cleanup(bzero_image_list) + dwifslpreproc_se_epi_option = ['-se_epi', str(dwifslpreproc_se_epi)] + except MRtrixError: + dwifslpreproc_se_epi = None + dwifslpreproc_se_epi_option = None + app.warn('DWIs detected as using monopolar diffusion sensitisation,' + ' but error encountered in extracting pre-DWI b=0 volumes;' + ' topup field estimate may be affected by eddy current distortions' + ' in b=0 volumes') + + # If only one image, this is fed directly to dwifslpreproc as-is + if len(dwi_image_list) > 1: + app.cleanup(dwi_image_list) + + # Step 3: Distortion correction + app.console('Performing various geometric corrections of DWIs') + dwifslpreproc_input_header = image.Header(dwifslpreproc_input) + have_slice_timing = 'SliceTiming' in dwifslpreproc_input_header.keyval() + app.debug(f'Have slice timing: {have_slice_timing}') + mb_factor = int(dwifslpreproc_input_header.keyval() + .get('MultibandAccelerationFactor', '1')) + app.debug(f'Multiband factor: {mb_factor}') + if 'SliceDirection' in dwifslpreproc_input_header.keyval(): + slice_direction_code = \ + dwifslpreproc_input_header.keyval()['SliceDirection'] + if 'i' in slice_direction_code: + num_slices = dwifslpreproc_input_header.size()[0] + elif 'j' in slice_direction_code: + num_slices = dwifslpreproc_input_header.size()[1] + elif 'k' in slice_direction_code: + num_slices = dwifslpreproc_input_header.size()[2] + else: + num_slices = dwifslpreproc_input_header.size()[2] + app.warn('Error reading BIDS field "SliceDirection" ' + f'(value: "{slice_direction_code}"); ' + 'assuming third axis') + else: + num_slices = dwifslpreproc_input_header.size()[2] + app.debug(f'Number of slices: {num_slices}') + mporder = 1 + int(math.ceil(num_slices/(mb_factor*4))) + app.debug(f'MPorder: {mporder}') + + # With Cima-X single-refocus data, cubic first-level model is required; + # for homogeneity of processing, activate regardless of scanner model + # TODO This may no longer be required after a scanner update; + # seek clarification from Siemens RE whether this can be determined from input data + eddy_options = ['--flm=cubic'] + if shared.eddy_repol: + eddy_options.append('--repol') + if shared.eddy_mporder and have_slice_timing: + eddy_options.append('--mporder=' + str(mporder)) + # Disabled: is only necessary for cohorts with very large motion, + # and significantly increases execution time + #if shared.eddy_mbs: + # eddy_options.append('--estimate_move_by_susceptibility') + # + # High b-value monopolar data still has eddy current distortions + # in b=0 images + # This appears to result in processing failure too regularly + # Error messages include the following: + # - terminate called after throwing an instance of + # 'NEWMAT::SingularException' + # - matrix multiplication: problem with matrix inverse; + # suggest to use solve() instead + # EDDY::: ECScanClasses.cpp::: void EDDY::ECScanManager:: + # SeparateFieldOffsetFromMovement(EDDY::ScanType, EDDY::OffsetModel): + # Exception thrown + # - eddy: msg=ECScanManager::set_slice_to_vol_reference: + # ref index out of bounds + #if monopolar: + # eddy_options.append('--b0_flm=linear') + + # Make sure that MRtrix3 identifies the concatenated data as shelled + # (If FSL eddy complains, we can force it to comply; + # but it's essential for many aspects of future processing that MRtrix3 + # consider the comprehensive set of data to be shelled) + try: + run.command(['mrinfo', dwifslpreproc_input, '-shell_bvalues'], show=False) + except run.MRtrixCmdError as exc: + raise MRtrixError('Combined DWI data are not classified as shelled') from exc + + shell_asymmetries = \ + [float(value) for value in + run.command(f'dirstat {dwifslpreproc_input} -output asym', + show=False)[0] + .splitlines()] + app.debug(f'Shell asymmetries: {shell_asymmetries}') + if any(value > 0.1 for value in shell_asymmetries): + app.console('Utilising eddy linear second-level model due to poor ' + 'distribution of diffusion gradient direction polarities') + eddy_options.append('--slm=linear') + + run.function(os.makedirs, 'eddyqc', show=False) + dwifslpreproc_output = pathlib.Path( + 'dwifslpreproc_out.mif' \ + if dwifslpreproc_input == 'dwifslpreproc_in.mif' \ + else (f'{dwifslpreproc_input.with_suffix("")}_preproc.mif')) + + eddy_olnstd_value = 4.0 # The internal eddy default + eddy_olnstd_option = [] + eddy_force_shelled_option = [] + + while not dwifslpreproc_output.is_file(): + + try: + + # If dwifslpreproc fails due to: + # EDDY::: DoVolumeToVolumeRegistration: Unable to find volume + # with no outliers in shell 0 with b-value=549.375 + # , want to progressively increase the outlier rejection threshold + # until this error no longer occurs. + eddy_all_options = eddy_options \ + + eddy_olnstd_option \ + + eddy_force_shelled_option + dwifslpreproc_eddy_options = \ + ['-eddy_options', + ' '.join(eddy_all_options)] \ + if eddy_all_options \ + else [] + + run.command(['dwifslpreproc', + dwifslpreproc_input, + dwifslpreproc_output, + '-rpe_header', + '-eddyqc_text', 'eddyqc/'] + + dwifslpreproc_se_epi_option + + dwifslpreproc_eddy_options + + ([] \ + if app.DO_CLEANUP \ + else ['-scratch', app.SCRATCH_DIR, '-nocleanup'])) + + except run.MRtrixCmdError as e_dwifslpreproc: + if any(item in str(e_dwifslpreproc) for item in [ + 'msg=ECScanManager::set_slice_to_vol_reference: ' \ + 'ref index out of bounds', + 'Unable to find volume with no outliers']): + eddy_olnstd_value += 0.5 + eddy_olnstd_option = [f'--ol_nstd={eddy_olnstd_value}'] + app.warn('FSL eddy failed due to outlier rejection; ' + 're-running with increased threshold') + elif 'Data not shelled' in str(e_dwifslpreproc): + eddy_force_shelled_option = ['--data_is_shelled'] + app.warn('FSL eddy failed due to reporting DWI data as not ' + 'being shelled; despite MRtrix3 classifying as ' + 'shelled; re-running with --data_is_shelled option') + elif not eddy_force_shelled_option: + eddy_force_shelled_option = ['--data_is_shelled'] + app.warn('FSL eddy failed with unrecognised error; ' + 're-running with --data_is_shelled option ' + 'in case it resolves issue') + else: + raise + shutil.rmtree('eddyqc/') + + app.cleanup(dwifslpreproc_input) + app.cleanup(dwifslpreproc_se_epi) + + # TODO Step 4: Gradient non-linearity distortion correction + # This needs to happen before computing mask or registration to the T1w image + # TODO Longer-term, could compose the gradient non-linearity warp field + # with the DWI->T1w transformation + # to resample the DWI data on a voxel grid aligned with the T1w image grid + # with only a single interpolation step + # TODO Would like to change where / how this is applied: + # - If only doing volume motion correction, + # this should be applied after Gibbs ringing removal before eddy; + # - If doing slice-to-volume motion correction, + # would like to apply in-plane 2D distortion correction + # after Gibbs ringing removal before eddy, + # and apply the distortion correction along the slice axis after eddy + # To achieve this would require augmentation of probably warpconvert + # to have the ability to project a warp field to identity along specific directions + if gdc_to_be_applied: + assert scanner_name is not None + assert scanner_name in shared.gdc_images + app.console('Applying gradient non-linearity distortion correction') + dwi_gdc_image = pathlib.Path('dwi_gdc.mif') + run.command(['mrtransform', dwifslpreproc_output, dwi_gdc_image, + '-template', dwifslpreproc_output, + '-warp', shared.gdc_images[scanner_name], + '-interp', 'cubic', + '-modulate', 'jac']) + dwi_image = dwi_gdc_image + else: + app.console('Skipping gradient non-linearity distortion correction') + dwi_image = dwifslpreproc_output + + # Step 5: + # Combined bias field correction, + # intensity normalisation, + # and brain mask derivation + if shared.have_dwibiasnormmask: + app.console('Running simultaneous bias field correction, ' + 'intensity normalisation, ' + 'and DWI brain mask derivation ' + 'via dwibiasnormmask command') + dwi_biasnorm_image = pathlib.Path('dwi_biasnorm.mif') + dwi_mask_image = pathlib.Path('dwi_mask.mif') + # Note that: + # 1. The first of these results in the synthstrip command + # being utilised in the initial dwi2mask call + # to derive an initial mask prior to the first iteration + # 2. The second of these results in the synthstrip command + # being run with the ODF sum image as input + # during the iterative process + mask_algo_options = ['-config', 'Dwi2maskAlgo', 'synthstrip', + '-mask_algo', 'synthstrip'] \ + if shared.dwi2mask_algo == 'synthstrip' \ + else [] + run.command(['dwibiasnormmask', + dwi_image, + dwi_biasnorm_image, + dwi_mask_image] + + mask_algo_options) + app.cleanup(dwi_image) + dwi_image = dwi_biasnorm_image + + else: # No dwibiasnormmask available + + # TODO Strongly consider removing + + # Step 5.1: Generate an image containing all voxels where the + # DWI contains valid data + dwi_validdata_image = pathlib.Path('dwi_validdata_mask.mif') + run.command(f'mrmath {dwi_image} max -axis 3 - | ' + f'mrthreshold - {dwi_validdata_image} -abs 0.0 -comparison gt') + + # Determine whether we are working with single-shell or multi-shell data + bvalues = [ + int(round(float(value))) + for value in image.mrinfo(dwi_image, 'shell_bvalues') \ + .strip().split()] + multishell = len(bvalues) > 2 + + # Step 5.2: Initial DWI brain mask + dwi_mask_image = pathlib.Path('dwi_mask_init.mif') + app.console('Performing intial DWI brain masking') + run.command(f'dwi2mask {shared.dwi2mask_algo} {dwi_image} {dwi_mask_image}') + + # Step 5.3: Combined RF estimation / CSD / mtnormalise / mask revision + # DWI brain masking may be inaccurate due to residual bias field. + # We want to perform: + # - Response function estimation; + # - Multi-tissue CSD (with a lower lmax for speed); + # - Mtnormalise to remove any bias field; + # - Re-calculation of brain mask; + # in an iterative fashion, as all steps may influence the others. + class Tissue(object): #pylint: disable=useless-object-inheritance + def __init__(self, name, index): + self.name = name + iter_string = f'_iter{index}' + self.rf = pathlib.Path(f'response_{name}{iter_string}.txt') + self.fod_init = pathlib.Path(f'FODinit_{name}{iter_string}.mif') + self.fod_norm = pathlib.Path(f'FODnorm_{name}{iter_string}.mif') + + app.console('Commencing iterative DWI bias field correction ' + 'and brain masking') + iteration = 0 + step = 'initialisation' + dice_coefficient = 0.0 + def msg(): + return f'Iteration {iteration}; ' \ + f'{step} step; ' \ + f'previous Dice coefficient {dice_coefficient}' + progress = app.ProgressBar(msg) + dwi_input_image = dwi_image + for iteration in range(0, DWIBIASNORMMASK_MAX_ITERS): + iter_string = f'_iter{iteration+1}' + + tissues = [Tissue('WM', iteration), + Tissue('GM', iteration), + Tissue('CSF', iteration)] + + step = 'dwi2response' + progress.increment() + run.command(['dwi2response', 'dhollander', dwi_input_image, + '-mask', dwi_mask_image] + + [tissue.rf for tissue in tissues]) + + # Remove GM if we can't deal with it + lmaxes = '4,0,0' + if not multishell: + app.cleanup(tissues[1].rf) + tissues = tissues[::2] + lmaxes = '4,0' + + step = 'dwi2fod' + progress.increment() + run.command(f'dwi2fod msmt_csd {dwi_input_image} ' + f'-lmax {lmaxes} ' + + ' '.join(f'{tissue.rf} {tissue.fod_init}' + for tissue in tissues)) + + step = 'mtnormalise' + progress.increment() + field_path = pathlib.Path(f'field{iter_string}.mif') + factors_path = pathlib.Path(f'factors{iter_string}.txt') + run.command(f'maskfilter {dwi_mask_image} erode - | ' + 'mtnormalise -mask - -balanced ' + f'-check_norm {field_path} ' + f'-check_factors {factors_path} ' + + ' '.join(f'{tissue.fod_init} {tissue.fod_norm}' + for tissue in tissues)) + app.cleanup([tissue.fod_init for tissue in tissues]) + + # Apply both estimated bias field, and appropiate + # scaling factor, to DWIs + step = 'mrcalc_dwi' + progress.increment() + csf_rf = matrix.load_matrix(tissues[-1].rf) + csf_rf_bzero_lzero = csf_rf[0][0] + app.cleanup([tissue.rf for tissue in tissues]) + balance_factors = matrix.load_vector(factors_path) + csf_balance_factor = balance_factors[-1] + app.cleanup(factors_path) + scale_multiplier = (1000.0 * math.sqrt(4.0*math.pi)) / \ + (csf_rf_bzero_lzero / csf_balance_factor) + new_dwi_image = pathlib.Path(f'dwi{iter_string}.mif') + run.command(f'mrcalc {dwi_image} {field_path} -div ' + f'{scale_multiplier} -mult {new_dwi_image}') + app.cleanup(field_path) + app.cleanup(dwi_image) + dwi_image = new_dwi_image + + step = 'dwi2mask' + progress.increment() + new_dwi_mask_image = pathlib.Path(f'dwi_mask{iter_string}.mif') + run.command(f'mrconvert {tissues[0].fod_norm} -coord 3 0 - | ' + f'mrmath - {" ".join(tissue.fod_norm for tissue in tissues[1:])} sum - | ' + f'mrthreshold - -abs {TISSUESUM_THRESHOLD} - | ' + 'maskfilter - connect -largest - | ' + 'mrcalc 1 - -sub - -datatype bit | ' + 'maskfilter - connect -largest - | ' + 'mrcalc 1 - -sub - -datatype bit | ' + 'maskfilter - clean - | ' + f'mrcalc - {dwi_validdata_image} -mult {new_dwi_mask_image} -datatype bit') + app.cleanup([tissue.fod_norm for tissue in tissues]) + + # Compare input and output masks + step = 'mrcalc_mask' + progress.increment() + dwi_old_mask_count = image.statistics(dwi_mask_image, + mask=dwi_mask_image).count + dwi_new_mask_count = image.statistics(new_dwi_mask_image, + mask=new_dwi_mask_image).count + app.debug(f'Old mask: {dwi_old_mask_count}') + app.debug(f'New mask: {dwi_new_mask_count}') + dwi_mask_overlap_image = pathlib.Path(f'dwi_mask_overlap{iter_string}.mif') + run.command(['mrcalc', + dwi_mask_image, + new_dwi_mask_image, + '-mult', + dwi_mask_overlap_image]) + app.cleanup(dwi_mask_image) + dwi_mask_image = new_dwi_mask_image + mask_overlap_count = image.statistics(dwi_mask_overlap_image, + mask=dwi_mask_overlap_image).count + app.debug(f'Mask overlap: {mask_overlap_count}') + dice_coefficient = 2.0 * mask_overlap_count / \ + (dwi_old_mask_count + dwi_new_mask_count) + app.debug(f'Dice coefficient: {dice_coefficient}') + if dice_coefficient > (1.0 - 1e-3): + app.debug('Exiting iterative loop due to mask convergence') + break + progress.done() + app.cleanup(dwi_validdata_image) + + # Step 6: Crop images to reduce storage space + # (but leave some padding on the sides) + dwi_cropped_image = pathlib.Path('dwi_crop.mif') + dwi_cropped_mask_image = pathlib.Path('mask_crop.mif') + run.command(f'mrgrid {dwi_image} crop {dwi_cropped_image} ' + f'-mask {dwi_mask_image} -uniform -3') + app.cleanup(dwi_image) + dwi_image = dwi_cropped_image + run.command(f'mrgrid {dwi_mask_image} crop {dwi_cropped_mask_image} ' + f'-mask {dwi_mask_image} -uniform -3') + app.cleanup(dwi_mask_image) + dwi_mask_image = dwi_cropped_mask_image + + # Step 7: DWI->T1 registration + if t1w_image: + + # Step 7.1: Generate target images for T1w->DWI registration + app.console('Generating contrast-matched images for ' + 'inter-modal registration between DWIs and T1w') + run.command(f'dwiextract {dwi_image} -bzero - | ' + 'mrcalc - 0.0 -max - | ' + 'mrmath - mean -axis 3 dwi_meanbzero.mif') + run.command(f'mrcalc 1 dwi_meanbzero.mif -div {dwi_mask_image} -mult - | ' + f'mrhistmatch nonlinear - {t1w_image} dwi_pseudoT1w.mif ' + f'-mask_input {dwi_mask_image} ' + '-mask_target T1w_mask.mif') + run.command(f'mrcalc 1 {t1w_image} -div ' + f'{"" if t1w_is_premasked else "T1w_mask.mif -mult "}- | ' + 'mrhistmatch nonlinear - dwi_meanbzero.mif T1w_pseudobzero.mif ' + '-mask_input T1w_mask.mif ' + f'-mask_target {dwi_mask_image}') + + # Step 7.2: Perform DWI->T1w registration + # Note that two registrations are performed: + # Even though we have a symmetric registration, generation of the + # two histogram-matched images means that you will get slightly + # different answers depending on which synthesized image & + # original image you use + app.console('Performing registration between DWIs and T1w') + transform_pt1w_t1w = pathlib.Path('rigid_pseudoT1w_to_T1w.txt') + transform_b0_pb0 = pathlib.Path('rigid_bzero_to_pseudobzero.txt') + run.command(f'mrregister dwi_pseudoT1w.mif {t1w_image}' + ' -type rigid' + f' -mask1 {dwi_mask_image}' + ' -mask2 T1w_mask.mif' + f' -rigid {transform_pt1w_t1w}') + run.command('mrregister dwi_meanbzero.mif T1w_pseudobzero.mif' + ' -type rigid' + f' -mask1 {dwi_mask_image}' + ' -mask2 T1w_mask.mif' + f' -rigid {transform_b0_pb0}') + app.cleanup('dwi_meanbzero.mif') + + # Step 7.3: Perform DWI->T1w transformation + # In this scenario, we're going to transform the DWI data to the T1w + # rather than the other way around, since the T1w is more likely to + # be used as a common reference across multiple analysis pipelines, + # and we're transforming DWIs rather than FODs + transform_average = pathlib.Path('rigid_dwi_to_T1w.txt') + run.command(['transformcalc', + transform_pt1w_t1w, + transform_b0_pb0, + 'average', + transform_average]) + app.cleanup(transform_pt1w_t1w) + app.cleanup(transform_b0_pb0) + transformed_dwi_image = pathlib.Path(f'{dwi_image.with_suffix("")}_transform.mif') + transformed_dwi_mask_image = pathlib.Path(f'{dwi_mask_image.with_suffix("")}_transform.mif') + run.command(['mrtransform', + dwi_image, + transformed_dwi_image, + '-linear', transform_average]) + app.cleanup(dwi_image) + dwi_image = transformed_dwi_image + run.command(['mrtransform', + dwi_mask_image, + transformed_dwi_mask_image, + '-linear', transform_average]) + app.cleanup(dwi_mask_image) + app.cleanup(transform_average) + dwi_mask_image = transformed_dwi_mask_image + + # Processing completed; export + app.console(f'Processing completed for session "{session_label}"; ' + 'writing results to output directory') + if output_subdir.exists(): + run.function(shutil.rmtree, output_subdir) + run.function(os.makedirs, output_subdir) + if t1w_image: + run.function(os.makedirs, output_subdir / 'anat') + run.function(os.makedirs, output_subdir / 'dwi') + run.command(['mrconvert', + dwi_image, + output_subdir / 'dwi' / f'{session_label}_desc-preproc_dwi.nii.gz', + '-export_grad_fsl', + output_subdir / 'dwi' / f'{session_label}_desc-preproc_dwi.bvec', + output_subdir / 'dwi' / f'{session_label}_desc-preproc_dwi.bval', + '-strides', '+1,+2,+3,+4']) + with open(output_subdir / 'dwi' / f'{session_label}_desc-preproc_dwi.json', + 'w', + encoding='utf-8') as out_dwi_json_file: + json.dump(OUT_DWI_JSON_DATA, out_dwi_json_file) + run.command(['mrconvert', + dwi_mask_image, + output_subdir / 'dwi' / f'{session_label}_desc-brain_mask.nii.gz', + '-datatype', 'uint8', + '-strides', '+1,+2,+3']) + # Even if EddyQC software is not installed, a directory is still + # generated containing some eddy outputs + run.function(shutil.copytree, + 'eddyqc', + output_subdir / 'dwi' / 'eddyqc') + if t1w_image: + run.command(['mrconvert', + t1w_image, + output_subdir / 'anat' / f'{session_label}_desc-preproc_T1w.nii.gz', + '-strides', '+1,+2,+3']) + t1w_json_data = {"SkullStripped": t1w_is_premasked} + with open(output_subdir / 'anat' / f'{session_label}_desc-preproc_T1w.json', + 'w', + encoding='utf-8') as t1w_json_file: + json.dump(t1w_json_data, t1w_json_file) + run.command(['mrconvert', + 'T1w_mask.mif', + output_subdir / 'anat' / f'{session_label}_desc-brain_mask.nii.gz', + '-datatype', 'uint8', + '-strides', '+1,+2,+3']) + + # Manually wipe and zero the scratch directory + # (since we might be processing more than one subject) + os.chdir(cwd) + if app.DO_CLEANUP: + app.console(f'Deleting scratch directory {app.SCRATCH_DIR}') + # Can't use run.function() here; + # it'll try to write to the log file that resides + # in the scratch directory just deleted + app.cleanup(app.SCRATCH_DIR) + elif output_verbosity == 4: + app.console('Copying scratch directory to output location') + run.function(shutil.copytree, + app.SCRATCH_DIR, + output_subdir / 'scratch') + else: + app.console('Contents of scratch directory kept; ' + f'location: {app.SCRATCH_DIR}') + app.SCRATCH_DIR = None diff --git a/mrtrix3_connectome/preproc/shared.py b/mrtrix3_connectome/preproc/shared.py new file mode 100644 index 0000000..e555e73 --- /dev/null +++ b/mrtrix3_connectome/preproc/shared.py @@ -0,0 +1,97 @@ +import os +import shutil +from mrtrix3 import MRtrixError +from mrtrix3 import app +from mrtrix3 import fsl +from mrtrix3 import run +from ..anat.shared import Shared as T1wShared + +class Shared(object): #pylint: disable=useless-object-inheritance + def __init__(self, gdc_dir): + self.gdc_dir = gdc_dir + self.gdc_images = {} + if self.gdc_dir is not None: + gdc_dir_images = self.gdc_dir.glob('*.*') + for item in gdc_dir_images: + scanner_name = item.name.split('.')[0] + if scanner_name in self.gdc_images: + raise MRtrixError( + f'Duplicate images for scanner "{scanner_name}" ' + f'in GDC directory {self.gdc_dir}') + self.gdc_images[item.name.split('.')[0]] = item + app.debug(f'{len(self.gdc_images)} gradient non-linearity warp ' + f'field images found in directory {self.gdc_dir}:' + f'{self.gdc_images}') + + fsl_path = os.environ.get('FSLDIR', '') + if not fsl_path: + raise MRtrixError( + 'Environment variable FSLDIR is not set; ' + 'please run appropriate FSL configuration script') + + self.t1w_shared = T1wShared(self.gdc_images) + + dwidenoise2_cmd = shutil.which('dwidenoise2') + if dwidenoise2_cmd: + self.dwidenoise_cmd = [dwidenoise2_cmd] + else: + app.warn('dwidenoise2 command not available; ' + 'original dwidenoise implementation will be used') + self.dwidenoise_cmd = 'dwidenoise' + + def get_eddy_help(binary_name): + try: + return run.command([binary_name, '--help'], show=False).stderr + except run.MRtrixCmdError as eddy_except: + return eddy_except.stderr + + self.eddy_binary = fsl.eddy_binary(True) + if self.eddy_binary: + self.eddy_cuda = True + eddy_help = get_eddy_help(self.eddy_binary) + if 'error while loading shared libraries' in eddy_help: + app.warn('CUDA version of FSL "eddy" present on system, ' + 'but does not execute successfully; OpenMP version ' + 'will instead be used') + self.eddy_binary = None + self.eddy_cuda = False + eddy_help = '' + if not self.eddy_binary: + self.eddy_binary = fsl.eddy_binary(False) + if not self.eddy_binary: + raise MRtrixError('Could not find FSL program "eddy"') + self.eddy_cuda = False + eddy_help = get_eddy_help(self.eddy_binary) + + app.debug('Eddy binary: ' + str(self.eddy_binary)) + app.debug('Eddy is CUDA version: ' + str(self.eddy_cuda)) + + self.eddy_repol = False + self.eddy_mporder = False + self.eddy_mbs = False + for line in eddy_help.splitlines(): + line = line.lstrip() + if line.startswith('--repol'): + self.eddy_repol = True + elif line.startswith('--mporder') and self.eddy_cuda: + self.eddy_mporder = True + elif line.startswith('--estimate_move_by_susceptibility'): + self.eddy_mbs = True + + self.dwibiascorrect_algo = 'ants' + if not self.t1w_shared.n4_cmd: + self.dwibiascorrect_algo = None + app.warn('Could not find ANTs program "N4BiasFieldCorrection"; ' + 'will proceed without performing initial b=0 - based ' + 'DWI bias field correction') + + self.dwi2mask_algo = 'synthstrip' + if not shutil.which('mri_synthstrip'): + self.dwi2mask_algo = 'legacy' + app.warn('FreeSurfer command mri_synthstrip not present;' + ' legacy dwi2mask algorithm will be used') + + self.have_dwibiasnormmask = bool(shutil.which('dwibiasnormmask')) + if not self.have_dwibiasnormmask: + app.warn('MRtrix3 command dwibiasnormmask not present; ' + 'process will be completed using manual code') diff --git a/mrtrix3_connectome/sessions.py b/mrtrix3_connectome/sessions.py new file mode 100644 index 0000000..c8d0f48 --- /dev/null +++ b/mrtrix3_connectome/sessions.py @@ -0,0 +1,113 @@ +import os +from mrtrix3 import MRtrixError +from mrtrix3 import app + +# Examine the contents of a directory (whether the raw BIDS dataset or a +# derivatives directory), and return a list of sessions. +# This list may optionally be filtered based on the use of batch processing +# command-line options; e.g. resticting the participant or session IDs. +def get_sessions(root_dir, **kwargs): + + participant_labels = kwargs.pop('participant_label', None) + session_labels = kwargs.pop('session_label', None) + if kwargs: + raise TypeError(f'Unsupported keyword arguments passed to get_session(): {kwargs}') + + # Perform a recursive search through the BIDS dataset directory, + # looking for anything that resembles a BIDS session + # For any sub-directory that itself contains directories "anat/" and "dwi/", + # store the list of sub-directories required to navigate to that point + # This becomes the list of feasible processing targets for any level + # of analysis + # From there: + # - "--participant_label" can be used to remove entries from the list + # - "--session_label" can be used to remove entries from the list + all_sessions = [] + for dir_name, subdir_list, _ in os.walk(root_dir): + subdir_list[:] = [entry \ + for entry in subdir_list \ + if entry.lower() != 'derivatives'] + if all(item in subdir_list for item in ('anat', 'dwi')): + all_sessions.append(os.path.relpath(dir_name, start=root_dir)) + del subdir_list + all_sessions = sorted(all_sessions) + app.debug(str(all_sessions)) + + result = [] + + # Need to alert user if they have nominated a particular participant / + # session label, and no such data were found in the input dataset + sub_found = {label: False for label in participant_labels} \ + if participant_labels \ + else {} + ses_found = {label: False for label in session_labels} \ + if session_labels \ + else {} + + # Define worker function for applying the --participant_label and + # --session_label restrictions + def find_and_flag(ses, prefix, labels, found): + for dirname in ses: + if dirname.startswith(prefix): + present = False + for label in labels: + if label == dirname[len(prefix):]: + found[label] = True + present = True + break + if not present: + return False + return True + + invalid_sessions = [] + for session in all_sessions: + session = os.path.normpath(session).split(os.sep) + if all(any(subdir.startswith(prefix) for prefix in ['sub-', 'ses-']) + for subdir in session): + process = True + if participant_labels: + if not find_and_flag(session, + 'sub-', + participant_labels, + sub_found): + process = False + if session_labels: + if not find_and_flag(session, + 'ses-', + session_labels, + ses_found): + process = False + if process: + result.append(session) + else: + invalid_sessions.append(os.sep.join(session)) + + if invalid_sessions: + app.warn(f'Entr{"ies" if len(invalid_sessions) > 1 else "y"}' + f' in "{root_dir}"' + ' found with valid anat/ and dwi/ sub-directories,' + f'but invalid directory name{"s" if len(invalid_sessions) > 1 else ""}:' + f' {invalid_sessions}') + + if not result: + raise MRtrixError('No sessions were selected for processing') + + app.console(f'{len(all_sessions)} total sessions found' + f' in directory "{root_dir}";' + f' {"all" if len(result) == len(all_sessions) else str(len(result))}' + ' will be processed') + sub_not_found = [key for key, value in sub_found.items() if not value] + if sub_not_found: + app.warn(f'{len(sub_not_found)} nominated participant' + f' label{"s were" if len(sub_not_found) > 1 else " was"}' + ' not found in input dataset:' + f' {", ".join(sub_not_found)}') + ses_not_found = [key for key, value in ses_found.items() if not value] + if ses_not_found: + app.warn(f'{len(ses_not_found)} nominated session' + f' label{"s were" if len(ses_not_found) > 1 else " was"}' + ' not found in input dataset:' + f' {", ".join(ses_not_found)}') + + app.debug(str(result)) + return result diff --git a/mrtrix3_connectome/usage.py b/mrtrix3_connectome/usage.py new file mode 100644 index 0000000..f921f9c --- /dev/null +++ b/mrtrix3_connectome/usage.py @@ -0,0 +1,287 @@ +from mrtrix3 import app + +from . import IS_CONTAINER +from . import OPTION_PREFIX +from . import __version__ + +COPYRIGHT = '''Copyright (c) 2016-2025 The Florey Institute of Neuroscience +and Mental Health. + +This Source Code Form is subject to the terms of the Mozilla Public +License, v. 2.0. If a copy of the MPL was not distributed with this +file, You can obtain one at http://mozilla.org/MPL/2.0/. + +Covered Software is provided under this License on an "as is" +basis, without warranty of any kind, either expressed, implied, or +statutory, including, without limitation, warranties that the +Covered Software is free of defects, merchantable, fit for a +particular purpose or non-infringing. +See the Mozilla Public License v. 2.0 for more details.''' + +ANALYSIS_CHOICES = ['preproc', 'participant', 'group'] + +PARCELLATION_CHOICES = ['aal', + 'aal2', + 'brainnetome246fs', + 'brainnetome246mni', + 'craddock200', + 'craddock400', + 'desikan', + 'destrieux', + 'hcpmmp1', + 'none', + 'perry512', + 'yeo7fs', + 'yeo7mni', + 'yeo17fs', + 'yeo17mni'] + +REGISTRATION_CHOICES = ['ants', 'fsl'] + +def usage(cmdline): #pylint: disable=unused-variable + cmdline.set_author('Robert E. Smith (robert.smith@florey.edu.au)') + cmdline.set_synopsis( + 'Generate structural connectomes based on diffusion-weighted ' + 'and T1-weighted image data using state-of-the-art reconstruction ' + 'tools, particularly those provided in MRtrix3') + + cmdline.set_copyright(COPYRIGHT) + + # If running within a container, erase existing standard options, and + # fill with only desired options + if IS_CONTAINER: + # pylint: disable=protected-access + for option in reversed(cmdline._actions): + cmdline._handle_conflict_resolve( + None, [(option.option_strings[0], option)]) + # cmdline._action_groups[2] is "Standard options" + # that was created earlier by the API + cmdline._action_groups[2].add_argument( + '-d', '--debug', + dest='debug', + action='store_true', + help='In the event of encountering an issue with the script, ' + 're-run with this flag set to provide more useful ' + 'information to the developer') + cmdline._action_groups[2].add_argument( + '-h', '--help', + dest='help', + action='store_true', + help='Display help information for the script') + cmdline._action_groups[2].add_argument( + '-n', '--n_cpus', + type=app.Parser.Int(0), + metavar='number', + dest='nthreads', + help='Use this number of threads in MRtrix3 ' + 'multi-threaded applications ' + '(0 disables multi-threading)') + cmdline._action_groups[2].add_argument( + '-scratch', '--scratch', + dest='scratch', + type=app.Parser.DirectoryOut(), + help='Set location for script scratch directory') + cmdline._action_groups[2].add_argument( + '-skip', '--skip-bids-validator', + dest='skipbidsvalidator', + action='store_true', + help='Skip BIDS validation') + cmdline._action_groups[2].add_argument( + '-v', '--version', + action='version', + version=__version__) + else: + cmdline._action_groups[2].add_argument( # pylint: disable=protected-access + OPTION_PREFIX + 'skip-bids-validator', + dest='skipbidsvalidator', + action='store_true', + help='Skip BIDS validation') + + cmdline.add_description( + 'While preproc-level analysis only requires data within the ' + 'BIDS directory, participant-level analysis requires that the ' + 'output directory be pre-populated with the results from ' + 'preproc-level processing; similarly, group-level analysis ' + 'requires that the output directory be pre-populated with the ' + 'results from participant-level analysis.') + cmdline.add_description( + 'The operations performed by each of the three levels of analysis ' + 'are as follows:') + cmdline.add_description( + '"preproc": ' + 'DWI: denoising (if applicable); ' + 'Gibbs ringing removal (if applicable); ' + 'motion, eddy current and EPI distortion correction ' + 'and outlier detection & replacement; ' + 'gradient non-linearity distortion correction ' + '(if available & necessary); ' + 'brain masking, bias field correction and intensity normalisation; ' + 'rigid-body registration & transformation to T1-weighted image (if available). ' + 'T1-weighted image: ' + 'gradient non-linearity distortion correction ' + '(if available & necessary); ' + 'bias field correction; ' + 'brain masking.') + cmdline.add_description( + '"participant": ' + 'DWI: Response function estimation; FOD estimation. ' + f'T1-weighted image (if {OPTION_PREFIX}parcellation ' + 'is not none): ' + 'Tissue segmentation; grey matter parcellation. ' + f'Combined (if {OPTION_PREFIX}parcellation is not none, ' + f'or {OPTION_PREFIX}streamlines is provided): ' + 'Whole-brain streamlines tractography; SIFT2; ' + 'connectome construction.') + cmdline.add_description( + '"group": ' + 'Generation of FA-based population template; ' + 'warping of template-based white matter mask to subject spaces; ' + 'calculation of group mean white matter response function; ' + 'scaling of connectomes based on white matter b=0 intensity, ' + 'response function used during participant-level analysis, and ' + 'SIFT model proportioinality coefficient; ' + 'generation of group mean connectome.') + cmdline.add_description( + 'The label(s) provided to the ' + f'{OPTION_PREFIX}participant_label and ' + f'{OPTION_PREFIX}session_label options ' + 'correspond(s) to sub- and ' + 'ses- from the BIDS spec (so they do _not_ ' + 'include "sub-" or "ses-"). Multiple participants / sessions ' + 'can be specified with a space-separated list.') + cmdline.add_description( + 'For both preproc-level and participant-level analyses, if no ' + 'specific participants or sessions are nominated by the user ' + '(or the user explicitly specifies multiple participants / ' + 'sessions), the script will process each of these in series. ' + 'It is additionally possible for the user to invoke multiple ' + 'instances of this script in order to process multiple subjects ' + 'at once in parallel, ensuring that no single participant / ' + 'session is being processed in parallel, and that preproc-level ' + 'output data are written fully before commencing participant-level ' + 'analysis.') + cmdline.add_description( + f'The {OPTION_PREFIX}output_verbosity option principally ' + 'affects the participant-level analysis, modulating how many ' + 'derivative files are written to the output directory. Permitted ' + 'values are from 1 to 4: 1 writes only those files requisite for ' + 'group-level analysis; 2 additionally writes files typically ' + 'useful for post-hoc analysis (the default); 3 additionally ' + 'generates files for enhanced connectome visualisation and copies ' + 'the entire whole-brain tractogram; 4 additionally generates a ' + 'full copy of the script scratch directory (with all intermediate ' + 'files retained) to the output directory (and this applies to ' + 'all analysis levels)') + if not IS_CONTAINER: + cmdline.add_description( + 'If running participant-level analysis using the script as a ' + 'standalone tool rather than inside the provided container, ' + 'data pertaining to atlas parcellations can no longer be ' + 'guaranteed to be stored at a specific location on the ' + 'filesystem. In this case, the user will most likely need to ' + 'manually specify the location where the corresponding ' + 'parcellation is stored using the -atlas_path option.') + cmdline.add_description( + 'As the production of individual connectomes is split into two ' + 'stages, being preproc-level and participant-level analyses, ' + 'there is scope for erroneous usage to lead to sub-optimal results. ' + 'In particular, it is possible for registration between DWI and ' + 'T1-weighted images to be either absent from the pre-processing ' + 'step, or for different T1-weighted images to be utilised ' + 'between the two stages of analysis, resulting in misalignment ' + 'between DWI and anatomical information. It is up to the user ' + 'to ensure that the expectation of the participant-level analyses ' + 'of such alignment having already been achieved is not violated.' + ) + + cmdline.add_argument( + 'bids_dir', + type=app.Parser.DirectoryIn(), + help='The directory with the input dataset formatted ' + 'according to the BIDS standard.') + cmdline.add_argument( + 'output_dir', + type=app.Parser.DirectoryIn(), + help='The existing directory where BIDS Derivatives will be written') + cmdline.add_argument( + 'analysis_level', + help='Level of analysis that will be performed; ' + f'options are: {", ".join(ANALYSIS_CHOICES)}.', + choices=ANALYSIS_CHOICES) + + cmdline.add_argument( + f'{OPTION_PREFIX}output_verbosity', + type=app.Parser.Int(1, 4), + default=2, + help='The verbosity of script output (number from 1 to 4).') + + batch_options = cmdline.add_argument_group( + 'Options specific to the batch processing of participant data') + batch_options.add_argument( + f'{OPTION_PREFIX}participant_label', + nargs='+', + help='The label(s) of the participant(s) that should be analyzed.') + batch_options.add_argument( + f'{OPTION_PREFIX}session_label', + nargs='+', + help='The session(s) within each participant that should be analyzed.') + + preproc_options = \ + cmdline.add_argument_group( + 'Options that are relevant to preproc-level analysis') + preproc_options.add_argument( + f'{OPTION_PREFIX}gdc', + metavar='path', + type=app.Parser.DirectoryIn(), + help='Provide a directory containing pre-computed warps ' + 'for gradient non-linearity distortion correction') + preproc_options.add_argument( + f'{OPTION_PREFIX}concat_denoise', + choices=('before', 'after'), + default='before', + help='Specify whether, ' + 'in the presence of multiple DWI series for a session, ' + 'one prefers concatenation of those files into a single series ' + 'before vs. after denoising ' + '(not guaranteed to be used depending on data)') + + preproc_participant_options = \ + cmdline.add_argument_group( + 'Options that are relevant to both preproc-level and ' + 'participant-level analyses') + preproc_participant_options.add_argument( + f'{OPTION_PREFIX}t1w_preproc', + metavar='path', + type=app.Parser.DirectoryIn(), + help='Provide a path by which pre-processed T1-weighted image data ' + 'may be found for the processed participant(s) / session(s)') + + participant_options = \ + cmdline.add_argument_group( + 'Options that are relevant to participant-level analysis') + if not IS_CONTAINER: + participant_options.add_argument( + f'{OPTION_PREFIX}atlas_path', + metavar='path', + type=app.Parser.DirectoryIn(), + help='The filesystem path in which to search for atlas ' + 'parcellation files.') + participant_options.add_argument( + f'{OPTION_PREFIX}parcellation', + help='The choice of connectome parcellation scheme ' + '(compulsory for participant-level analysis); ' + f'options are: {", ".join(PARCELLATION_CHOICES)}.', + choices=PARCELLATION_CHOICES) + participant_options.add_argument( + f'{OPTION_PREFIX}streamlines', + type=app.Parser.Int(0), + default=0, + help='The number of streamlines to generate for each subject ' + '(will be determined heuristically if not explicitly set).') + participant_options.add_argument( + f'{OPTION_PREFIX}template_reg', + metavar='software', + help='The choice of registration software for mapping subject to ' + 'template space; ' + f'options are: {", ".join(REGISTRATION_CHOICES)}.', + choices=REGISTRATION_CHOICES) diff --git a/version b/version index a918a2a..38f8e88 100644 --- a/version +++ b/version @@ -1 +1 @@ -0.6.0 +dev From 296da43432fdeadae7b62bc72ca4584e8350ad80 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 29 Oct 2025 10:27:13 +1100 Subject: [PATCH 02/16] Remove branch with manual implementation of dwibiasnormmask contents --- mrtrix3_connectome/preproc/run.py | 199 ++++----------------------- mrtrix3_connectome/preproc/shared.py | 5 - 2 files changed, 24 insertions(+), 180 deletions(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index bad2223..2a31ed6 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -724,181 +724,30 @@ def run_preproc(bids_dir, session, shared, # Combined bias field correction, # intensity normalisation, # and brain mask derivation - if shared.have_dwibiasnormmask: - app.console('Running simultaneous bias field correction, ' - 'intensity normalisation, ' - 'and DWI brain mask derivation ' - 'via dwibiasnormmask command') - dwi_biasnorm_image = pathlib.Path('dwi_biasnorm.mif') - dwi_mask_image = pathlib.Path('dwi_mask.mif') - # Note that: - # 1. The first of these results in the synthstrip command - # being utilised in the initial dwi2mask call - # to derive an initial mask prior to the first iteration - # 2. The second of these results in the synthstrip command - # being run with the ODF sum image as input - # during the iterative process - mask_algo_options = ['-config', 'Dwi2maskAlgo', 'synthstrip', - '-mask_algo', 'synthstrip'] \ - if shared.dwi2mask_algo == 'synthstrip' \ - else [] - run.command(['dwibiasnormmask', - dwi_image, - dwi_biasnorm_image, - dwi_mask_image] - + mask_algo_options) - app.cleanup(dwi_image) - dwi_image = dwi_biasnorm_image - - else: # No dwibiasnormmask available - - # TODO Strongly consider removing - - # Step 5.1: Generate an image containing all voxels where the - # DWI contains valid data - dwi_validdata_image = pathlib.Path('dwi_validdata_mask.mif') - run.command(f'mrmath {dwi_image} max -axis 3 - | ' - f'mrthreshold - {dwi_validdata_image} -abs 0.0 -comparison gt') - - # Determine whether we are working with single-shell or multi-shell data - bvalues = [ - int(round(float(value))) - for value in image.mrinfo(dwi_image, 'shell_bvalues') \ - .strip().split()] - multishell = len(bvalues) > 2 - - # Step 5.2: Initial DWI brain mask - dwi_mask_image = pathlib.Path('dwi_mask_init.mif') - app.console('Performing intial DWI brain masking') - run.command(f'dwi2mask {shared.dwi2mask_algo} {dwi_image} {dwi_mask_image}') - - # Step 5.3: Combined RF estimation / CSD / mtnormalise / mask revision - # DWI brain masking may be inaccurate due to residual bias field. - # We want to perform: - # - Response function estimation; - # - Multi-tissue CSD (with a lower lmax for speed); - # - Mtnormalise to remove any bias field; - # - Re-calculation of brain mask; - # in an iterative fashion, as all steps may influence the others. - class Tissue(object): #pylint: disable=useless-object-inheritance - def __init__(self, name, index): - self.name = name - iter_string = f'_iter{index}' - self.rf = pathlib.Path(f'response_{name}{iter_string}.txt') - self.fod_init = pathlib.Path(f'FODinit_{name}{iter_string}.mif') - self.fod_norm = pathlib.Path(f'FODnorm_{name}{iter_string}.mif') - - app.console('Commencing iterative DWI bias field correction ' - 'and brain masking') - iteration = 0 - step = 'initialisation' - dice_coefficient = 0.0 - def msg(): - return f'Iteration {iteration}; ' \ - f'{step} step; ' \ - f'previous Dice coefficient {dice_coefficient}' - progress = app.ProgressBar(msg) - dwi_input_image = dwi_image - for iteration in range(0, DWIBIASNORMMASK_MAX_ITERS): - iter_string = f'_iter{iteration+1}' - - tissues = [Tissue('WM', iteration), - Tissue('GM', iteration), - Tissue('CSF', iteration)] - - step = 'dwi2response' - progress.increment() - run.command(['dwi2response', 'dhollander', dwi_input_image, - '-mask', dwi_mask_image] - + [tissue.rf for tissue in tissues]) - - # Remove GM if we can't deal with it - lmaxes = '4,0,0' - if not multishell: - app.cleanup(tissues[1].rf) - tissues = tissues[::2] - lmaxes = '4,0' - - step = 'dwi2fod' - progress.increment() - run.command(f'dwi2fod msmt_csd {dwi_input_image} ' - f'-lmax {lmaxes} ' - + ' '.join(f'{tissue.rf} {tissue.fod_init}' - for tissue in tissues)) - - step = 'mtnormalise' - progress.increment() - field_path = pathlib.Path(f'field{iter_string}.mif') - factors_path = pathlib.Path(f'factors{iter_string}.txt') - run.command(f'maskfilter {dwi_mask_image} erode - | ' - 'mtnormalise -mask - -balanced ' - f'-check_norm {field_path} ' - f'-check_factors {factors_path} ' - + ' '.join(f'{tissue.fod_init} {tissue.fod_norm}' - for tissue in tissues)) - app.cleanup([tissue.fod_init for tissue in tissues]) - - # Apply both estimated bias field, and appropiate - # scaling factor, to DWIs - step = 'mrcalc_dwi' - progress.increment() - csf_rf = matrix.load_matrix(tissues[-1].rf) - csf_rf_bzero_lzero = csf_rf[0][0] - app.cleanup([tissue.rf for tissue in tissues]) - balance_factors = matrix.load_vector(factors_path) - csf_balance_factor = balance_factors[-1] - app.cleanup(factors_path) - scale_multiplier = (1000.0 * math.sqrt(4.0*math.pi)) / \ - (csf_rf_bzero_lzero / csf_balance_factor) - new_dwi_image = pathlib.Path(f'dwi{iter_string}.mif') - run.command(f'mrcalc {dwi_image} {field_path} -div ' - f'{scale_multiplier} -mult {new_dwi_image}') - app.cleanup(field_path) - app.cleanup(dwi_image) - dwi_image = new_dwi_image - - step = 'dwi2mask' - progress.increment() - new_dwi_mask_image = pathlib.Path(f'dwi_mask{iter_string}.mif') - run.command(f'mrconvert {tissues[0].fod_norm} -coord 3 0 - | ' - f'mrmath - {" ".join(tissue.fod_norm for tissue in tissues[1:])} sum - | ' - f'mrthreshold - -abs {TISSUESUM_THRESHOLD} - | ' - 'maskfilter - connect -largest - | ' - 'mrcalc 1 - -sub - -datatype bit | ' - 'maskfilter - connect -largest - | ' - 'mrcalc 1 - -sub - -datatype bit | ' - 'maskfilter - clean - | ' - f'mrcalc - {dwi_validdata_image} -mult {new_dwi_mask_image} -datatype bit') - app.cleanup([tissue.fod_norm for tissue in tissues]) - - # Compare input and output masks - step = 'mrcalc_mask' - progress.increment() - dwi_old_mask_count = image.statistics(dwi_mask_image, - mask=dwi_mask_image).count - dwi_new_mask_count = image.statistics(new_dwi_mask_image, - mask=new_dwi_mask_image).count - app.debug(f'Old mask: {dwi_old_mask_count}') - app.debug(f'New mask: {dwi_new_mask_count}') - dwi_mask_overlap_image = pathlib.Path(f'dwi_mask_overlap{iter_string}.mif') - run.command(['mrcalc', - dwi_mask_image, - new_dwi_mask_image, - '-mult', - dwi_mask_overlap_image]) - app.cleanup(dwi_mask_image) - dwi_mask_image = new_dwi_mask_image - mask_overlap_count = image.statistics(dwi_mask_overlap_image, - mask=dwi_mask_overlap_image).count - app.debug(f'Mask overlap: {mask_overlap_count}') - dice_coefficient = 2.0 * mask_overlap_count / \ - (dwi_old_mask_count + dwi_new_mask_count) - app.debug(f'Dice coefficient: {dice_coefficient}') - if dice_coefficient > (1.0 - 1e-3): - app.debug('Exiting iterative loop due to mask convergence') - break - progress.done() - app.cleanup(dwi_validdata_image) + app.console('Running simultaneous bias field correction, ' + 'intensity normalisation, ' + 'and DWI brain mask derivation ' + 'via dwibiasnormmask command') + dwi_biasnorm_image = pathlib.Path('dwi_biasnorm.mif') + dwi_mask_image = pathlib.Path('dwi_mask.mif') + # Note that: + # 1. The first of these results in the synthstrip command + # being utilised in the initial dwi2mask call + # to derive an initial mask prior to the first iteration + # 2. The second of these results in the synthstrip command + # being run with the ODF sum image as input + # during the iterative process + mask_algo_options = ['-config', 'Dwi2maskAlgo', 'synthstrip', + '-mask_algo', 'synthstrip'] \ + if shared.dwi2mask_algo == 'synthstrip' \ + else [] + run.command(['dwibiasnormmask', + dwi_image, + dwi_biasnorm_image, + dwi_mask_image] + + mask_algo_options) + app.cleanup(dwi_image) + dwi_image = dwi_biasnorm_image # Step 6: Crop images to reduce storage space # (but leave some padding on the sides) diff --git a/mrtrix3_connectome/preproc/shared.py b/mrtrix3_connectome/preproc/shared.py index e555e73..94786e4 100644 --- a/mrtrix3_connectome/preproc/shared.py +++ b/mrtrix3_connectome/preproc/shared.py @@ -90,8 +90,3 @@ def get_eddy_help(binary_name): self.dwi2mask_algo = 'legacy' app.warn('FreeSurfer command mri_synthstrip not present;' ' legacy dwi2mask algorithm will be used') - - self.have_dwibiasnormmask = bool(shutil.which('dwibiasnormmask')) - if not self.have_dwibiasnormmask: - app.warn('MRtrix3 command dwibiasnormmask not present; ' - 'process will be completed using manual code') From 08fc89c0274da8a32bf5ccea2a3c1db3184815ff Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 29 Oct 2025 11:05:10 +1100 Subject: [PATCH 03/16] Add preprepco-level options -eddy_cubicflm, -eddy_mbs --- mrtrix3_connectome/execute.py | 6 ++++-- mrtrix3_connectome/preproc/run.py | 13 ++++--------- mrtrix3_connectome/preproc/shared.py | 6 +++++- mrtrix3_connectome/usage.py | 14 ++++++++++++++ 4 files changed, 27 insertions(+), 12 deletions(-) diff --git a/mrtrix3_connectome/execute.py b/mrtrix3_connectome/execute.py index 0f46258..faaba78 100644 --- a/mrtrix3_connectome/execute.py +++ b/mrtrix3_connectome/execute.py @@ -76,7 +76,10 @@ def execute(): #pylint: disable=unused-variable if app.ARGS.analysis_level == 'preproc': - preproc_shared = PreprocShared(app.ARGS.gdc) + preproc_shared = PreprocShared(app.ARGS.gdc, + app.ARGS.concat_denoise, + app.ARGS.eddy_cubicflm, + app.ARGS.eddy_mbs) for session_to_process in sessions_to_analyze: app.console(f'Commencing execution for session: {"_".join(session_to_process)}') @@ -84,7 +87,6 @@ def execute(): #pylint: disable=unused-variable session_to_process, preproc_shared, app.ARGS.t1w_preproc, - app.ARGS.concat_denoise, app.ARGS.output_verbosity, output_app_path) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 2a31ed6..770442f 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -575,19 +575,13 @@ def run_preproc(bids_dir, session, shared, mporder = 1 + int(math.ceil(num_slices/(mb_factor*4))) app.debug(f'MPorder: {mporder}') - # With Cima-X single-refocus data, cubic first-level model is required; - # for homogeneity of processing, activate regardless of scanner model - # TODO This may no longer be required after a scanner update; - # seek clarification from Siemens RE whether this can be determined from input data - eddy_options = ['--flm=cubic'] + eddy_options = ['--flm=cubic'] if shared.eddy_cubicflm else [] if shared.eddy_repol: eddy_options.append('--repol') if shared.eddy_mporder and have_slice_timing: eddy_options.append('--mporder=' + str(mporder)) - # Disabled: is only necessary for cohorts with very large motion, - # and significantly increases execution time - #if shared.eddy_mbs: - # eddy_options.append('--estimate_move_by_susceptibility') + if shared.eddy_mbs: + eddy_options.append('--estimate_move_by_susceptibility') # # High b-value monopolar data still has eddy current distortions # in b=0 images @@ -602,6 +596,7 @@ def run_preproc(bids_dir, session, shared, # Exception thrown # - eddy: msg=ECScanManager::set_slice_to_vol_reference: # ref index out of bounds + # TODO Investigate whether this is still the case #if monopolar: # eddy_options.append('--b0_flm=linear') diff --git a/mrtrix3_connectome/preproc/shared.py b/mrtrix3_connectome/preproc/shared.py index 94786e4..5c43b81 100644 --- a/mrtrix3_connectome/preproc/shared.py +++ b/mrtrix3_connectome/preproc/shared.py @@ -7,8 +7,12 @@ from ..anat.shared import Shared as T1wShared class Shared(object): #pylint: disable=useless-object-inheritance - def __init__(self, gdc_dir): + def __init__(self, gdc_dir, concat_denoise, eddy_cubicflm, eddy_mbs): self.gdc_dir = gdc_dir + self.concat_denoise = concat_denoise + self.eddy_cubicflm = eddy_cubicflm + self.eddy_mbs = eddy_mbs + self.gdc_images = {} if self.gdc_dir is not None: gdc_dir_images = self.gdc_dir.glob('*.*') diff --git a/mrtrix3_connectome/usage.py b/mrtrix3_connectome/usage.py index f921f9c..71c6028 100644 --- a/mrtrix3_connectome/usage.py +++ b/mrtrix3_connectome/usage.py @@ -244,6 +244,20 @@ def usage(cmdline): #pylint: disable=unused-variable 'one prefers concatenation of those files into a single series ' 'before vs. after denoising ' '(not guaranteed to be used depending on data)') + preproc_options.add_argument( + f'{OPTION_PREFIX}eddy_cubicflm', + action='store_true', + help='Specify that FSL eddy needs to be invoked' + ' with a cubic rather than default quadratic first-level eddy current model;' + ' this can be deemed necessary for specific datasets' + ' based on knowledge of the datasets beyond what is encoded in BIDS metadata') + preproc_options.add_argument( + f'{OPTION_PREFIX}eddy_mbs', + action='store_true', + help='Specify that FSL eddy should have the "movement-by-susceptibility" capability activated;' + ' this estimates changes in the susceptibility field that result from subject motion,' + ' which can improve pre-processing in non-conformant cohorts' + ' but comes at considerable computational expense') preproc_participant_options = \ cmdline.add_argument_group( From 208426bb3f3895778d4ebe7b68b5833f2d351ca4 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Tue, 4 Nov 2025 11:00:07 +1100 Subject: [PATCH 04/16] Preproc: Fix refactoring of "concat_denoise" --- mrtrix3_connectome/preproc/run.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 770442f..64ffdae 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -25,8 +25,7 @@ # TODO Further split code across multiple files def run_preproc(bids_dir, session, shared, - t1w_preproc_path, concat_denoise, - output_verbosity, output_app_dir): + t1w_preproc_path, output_verbosity, output_app_dir): session_label = '_'.join(session) output_subdir = pathlib.Path(output_app_dir, 'MRtrix3_connectome-preproc', *session) @@ -360,7 +359,7 @@ def run_preproc(bids_dir, session, shared, 'to the input DWI data') else: - if concat_denoise == 'before' and len(dwi_image_list) > 1: + if shared.concat_denoise == 'before' and len(dwi_image_list) > 1: # TODO We need to determine first whether these images can be trivially concatenated: # if execution of dwicat would result in resampling, # and that would then preclude the application of denoising / Gibbs ringing removal, From 54d22db211c45ba73525c31dc4b68f3c85c11417 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Tue, 11 Nov 2025 22:17:38 +1100 Subject: [PATCH 05/16] Fix erroneous usage of eddy MBS functionality Erroneously set Shared class member variable name already in use, resulting in engagement on that functionality based in its availability rather than user request. --- mrtrix3_connectome/preproc/shared.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/mrtrix3_connectome/preproc/shared.py b/mrtrix3_connectome/preproc/shared.py index 5c43b81..92dbd1f 100644 --- a/mrtrix3_connectome/preproc/shared.py +++ b/mrtrix3_connectome/preproc/shared.py @@ -7,11 +7,10 @@ from ..anat.shared import Shared as T1wShared class Shared(object): #pylint: disable=useless-object-inheritance - def __init__(self, gdc_dir, concat_denoise, eddy_cubicflm, eddy_mbs): + def __init__(self, gdc_dir, concat_denoise, eddy_cubicflm, want_eddy_mbs): self.gdc_dir = gdc_dir self.concat_denoise = concat_denoise self.eddy_cubicflm = eddy_cubicflm - self.eddy_mbs = eddy_mbs self.gdc_images = {} if self.gdc_dir is not None: @@ -72,7 +71,7 @@ def get_eddy_help(binary_name): self.eddy_repol = False self.eddy_mporder = False - self.eddy_mbs = False + eddy_has_mbs = False for line in eddy_help.splitlines(): line = line.lstrip() if line.startswith('--repol'): @@ -80,7 +79,15 @@ def get_eddy_help(binary_name): elif line.startswith('--mporder') and self.eddy_cuda: self.eddy_mporder = True elif line.startswith('--estimate_move_by_susceptibility'): + eddy_has_mbs = True + if want_eddy_mbs: + if eddy_has_mbs: self.eddy_mbs = True + else: + app.warn("eddy movement-by-susceptibility requested," + " but not available in installed version of eddy") + else: + self.eddy_mbs = False self.dwibiascorrect_algo = 'ants' if not self.t1w_shared.n4_cmd: From 88c031ffc72c9fc293ebceaca7e12a257e599964 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 12 Nov 2025 10:02:55 +1100 Subject: [PATCH 06/16] Code cleanup #137 --- mrtrix3_connectome/anat/get.py | 18 ++--- mrtrix3_connectome/participant/run.py | 107 ++++++++++++++------------ mrtrix3_connectome/preproc/run.py | 21 ++--- mrtrix3_connectome/usage.py | 3 +- 4 files changed, 78 insertions(+), 71 deletions(-) diff --git a/mrtrix3_connectome/anat/get.py b/mrtrix3_connectome/anat/get.py index fc08fdb..b0aa39a 100644 --- a/mrtrix3_connectome/anat/get.py +++ b/mrtrix3_connectome/anat/get.py @@ -39,7 +39,7 @@ def get_t1w_preproc_images(import_path, if t1w_preproc.is_file(): preproc_image_path = t1w_preproc else: - expected_image_basename = session_label + '*_T1w.nii*' + expected_image_basename = f'{session_label}*_T1w.nii*' for candidate in [ pathlib.Path(''), pathlib.Path('anat'), @@ -135,8 +135,8 @@ def get_t1w_preproc_images(import_path, 'T1w_premasked.mif' if preproc_image_is_masked else 'T1w.mif']) # If we have been provided with a pre-processed T1-weighted image - # (regardless of where it has come from), check to see if there - # is a corresponding mask image + # (regardless of where it has come from), + # check to see if there is a corresponding mask image preproc_mask_path = preproc_image_path.parent / \ preproc_image_path.name \ .replace('_desc-preproc', '_desc-brain') \ @@ -220,7 +220,7 @@ def get_t1w_preproc_images(import_path, # do everything based on the raw T1-weighted image else: - # TODO If we're doing pre-processing of the T1w from scratch, + # If we're doing pre-processing of the T1w from scratch, # check to see if GDC has already been applied to the raw data, # and if not, whether we have the right warp field available; # if we do, then we'll perform GDC before anything else @@ -275,12 +275,10 @@ def get_t1w_preproc_images(import_path, else: # TODO Make this acceptable in preproc-level analysis with adequate warning return - #raise MRtrixError('Cannot complete processing for session ' - # + session_label - # + ': no pre-processed T1-weighted image ' - # + 'available, and software tools for ' - # + 'processing raw T1-weighted image ' - # + 'not installed') + #raise MRtrixError(f'Cannot complete processing for session {session_label}:' + # 'no pre-processed T1-weighted image available,' + # ' and software tools for processing raw T1-weighted image' + # ' not installed') if gdc_to_be_applied: run.command(['mrtransform', raw_image_path, 'T1w_raw.nii', diff --git a/mrtrix3_connectome/participant/run.py b/mrtrix3_connectome/participant/run.py index acd9712..6a325795 100644 --- a/mrtrix3_connectome/participant/run.py +++ b/mrtrix3_connectome/participant/run.py @@ -577,40 +577,42 @@ def do_import(import_path): # Use ANTs SyN for registration to template # From Klein and Avants, Frontiers in Neuroinformatics 2013: ants_prefix = 'template_to_t1_' - run.command(['antsRegistration', - '--dimensionality', '3', - '--output', ants_prefix, - '--use-histogram-matching', '1', - '--initial-moving-transform', - f'[{t1w_histmatched_path},{shared.template_image_path},1]', - '--transform', 'Rigid[0.1]', - '--metric', - f'MI[{t1w_histmatched_path},{shared.template_image_path},1,32,Regular,0.25]', - '--convergence', '1000x500x250x100', - '--smoothing-sigmas', '3x2x1x0', - '--shrink-factors', '8x4x2x1', - '--transform', 'Affine[0.1]', - '--metric', - f'MI[{t1w_histmatched_path},{shared.template_image_path},1,32,Regular,0.25]', - '--convergence', '1000x500x250x100', - '--smoothing-sigmas', '3x2x1x0', - '--shrink-factors', '8x4x2x1', - '--transform', 'BSplineSyN[0.1,26,0,3]', - '--metric', - f'CC[{t1w_histmatched_path},{shared.template_image_path},1,4]', - '--convergence', '100x70x50x20', - '--smoothing-sigmas', '3x2x1x0', - '--shrink-factors', '6x4x2x1']) + run.command([ + 'antsRegistration', + '--dimensionality', '3', + '--output', ants_prefix, + '--use-histogram-matching', '1', + '--initial-moving-transform', + f'[{t1w_histmatched_path},{shared.template_image_path},1]', + '--transform', 'Rigid[0.1]', + '--metric', + f'MI[{t1w_histmatched_path},{shared.template_image_path},1,32,Regular,0.25]', + '--convergence', '1000x500x250x100', + '--smoothing-sigmas', '3x2x1x0', + '--shrink-factors', '8x4x2x1', + '--transform', 'Affine[0.1]', + '--metric', + f'MI[{t1w_histmatched_path},{shared.template_image_path},1,32,Regular,0.25]', + '--convergence', '1000x500x250x100', + '--smoothing-sigmas', '3x2x1x0', + '--shrink-factors', '8x4x2x1', + '--transform', 'BSplineSyN[0.1,26,0,3]', + '--metric', + f'CC[{t1w_histmatched_path},{shared.template_image_path},1,4]', + '--convergence', '100x70x50x20', + '--smoothing-sigmas', '3x2x1x0', + '--shrink-factors', '6x4x2x1']) transformed_atlas_path = 'atlas_transformed.nii' - run.command(['antsApplyTransforms', - '--dimensionality', '3', - '--input', shared.parc_image_path, - '--reference-image', t1w_histmatched_path, - '--output', transformed_atlas_path, - '--n', 'GenericLabel', - '--transform', f'{ants_prefix}1Warp.nii.gz', - '--transform', f'{ants_prefix}0GenericAffine.mat', - '--default-value', '0']) + run.command([ + 'antsApplyTransforms', + '--dimensionality', '3', + '--input', shared.parc_image_path, + '--reference-image', t1w_histmatched_path, + '--output', transformed_atlas_path, + '--n', 'GenericLabel', + '--transform', f'{ants_prefix}1Warp.nii.gz', + '--transform', f'{ants_prefix}0GenericAffine.mat', + '--default-value', '0']) app.cleanup(glob.glob(f'{ants_prefix}*')) elif shared.template_registration_software == 'fsl': @@ -655,14 +657,15 @@ def do_import(import_path): run.command(f'maskfilter {shared.template_mask_path} dilate {fnirt_ref_mask_path}' ' -npass 3') - run.command([shared.fnirt_cmd, - f'--config={shared.fnirt_config_basename}', - f'--ref={fnirt_ref_path}', - f'--in={fnirt_in_path}', - '--aff=T1w_to_template.mat', - f'--refmask={fnirt_ref_mask_path}', - f'--inmask={fnirt_in_mask_path}', - '--cout=T1w_to_template_warpcoef.nii']) + run.command([ + shared.fnirt_cmd, + f'--config={shared.fnirt_config_basename}', + f'--ref={fnirt_ref_path}', + f'--in={fnirt_in_path}', + '--aff=T1w_to_template.mat', + f'--refmask={fnirt_ref_mask_path}', + f'--inmask={fnirt_in_mask_path}', + '--cout=T1w_to_template_warpcoef.nii']) app.cleanup(fnirt_in_mask_path) if not t1w_is_premasked: app.cleanup(fnirt_ref_mask_path) @@ -672,19 +675,21 @@ def do_import(import_path): # Use result of registration to transform atlas # parcellation to subject space - run.command([shared.invwarp_cmd, - f'--ref={t1w_histmatched_path}', - f'--warp={fnirt_warp_subject2template_path}', - '--out=template_to_T1w_warpcoef.nii']) + run.command([ + shared.invwarp_cmd, + f'--ref={t1w_histmatched_path}', + f'--warp={fnirt_warp_subject2template_path}', + '--out=template_to_T1w_warpcoef.nii']) app.cleanup(fnirt_warp_subject2template_path) fnirt_warp_template2subject_path = \ fsl.find_image('template_to_T1w_warpcoef') - run.command([shared.applywarp_cmd, - f'--ref={t1w_histmatched_path}', - f'--in={shared.parc_image_path}', - f'--warp={fnirt_warp_template2subject_path}', - '--out=atlas_transformed.nii', - '--interp=nn']) + run.command([ + shared.applywarp_cmd, + f'--ref={t1w_histmatched_path}', + f'--in={shared.parc_image_path}', + f'--warp={fnirt_warp_template2subject_path}', + '--out=atlas_transformed.nii', + '--interp=nn']) app.cleanup(fnirt_warp_template2subject_path) transformed_atlas_path = fsl.find_image('atlas_transformed') diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 64ffdae..418c9ab 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -7,7 +7,6 @@ from mrtrix3 import MRtrixError from mrtrix3 import app from mrtrix3 import image -from mrtrix3 import matrix from mrtrix3 import run from ..anat.get import get_t1w_preproc_images @@ -221,18 +220,21 @@ def run_preproc(bids_dir, session, shared, elif json_data['ManufacturersModelName'] != scanner_name: raise MRtrixError('Inconsistent "ManufacturersModelName" value in input DWI data') elif scanner_name is not None: - raise MRtrixError('Inconsistent appearance of "ManufacturersModelName" in input DWI data') + raise MRtrixError('Inconsistent appearance of "ManufacturersModelName"' + ' in input DWI data') else: gdc_to_be_applied = False if gdc_already_applied: - app.warn('Gradient non-linearity distortion correction already applied to input DWI data; ' - 'some pre-processing steps will need to be omitted accordingly') + app.warn('Gradient non-linearity distortion correction already applied to input DWI data;' + ' some pre-processing steps will need to be omitted accordingly') gdc_to_be_applied = False if gdc_to_be_applied: - app.console(f'Scanner model "{scanner_name}" matches file "{shared.gdc_images[scanner_name]}",' - ' and no prior application of gradient non-linearity distortion correction indicated in image metadata;' + app.console(f'Scanner model "{scanner_name}"' + f' matches file "{shared.gdc_images[scanner_name]}",' + ' and no prior application of gradient non-linearity distortion correction' + ' indicated in image metadata;' ' correction to be applied after dwifslpreproc') dwi_image_list = [pathlib.Path(f'dwi{index}.mif') for index in range(1, dwi_index+1)] @@ -360,7 +362,7 @@ def run_preproc(bids_dir, session, shared, else: if shared.concat_denoise == 'before' and len(dwi_image_list) > 1: - # TODO We need to determine first whether these images can be trivially concatenated: + # We need to determine first whether these images can be trivially concatenated: # if execution of dwicat would result in resampling, # and that would then preclude the application of denoising / Gibbs ringing removal, # then we should override this and do the concatenation after these steps @@ -480,7 +482,8 @@ def run_preproc(bids_dir, session, shared, 'performing explicit interpolation') fmap_resampled_image_list = [] for item in fmap_transformed_image_list: - fmap_resampled_image_path = pathlib.Path(f'{item.with_suffix("")}_resampled.mif') + fmap_resampled_image_path = \ + pathlib.Path(f'{item.with_suffix("")}_resampled.mif') run.command(['mrtransform', item, '-template', dwifslpreproc_input, @@ -580,7 +583,7 @@ def run_preproc(bids_dir, session, shared, if shared.eddy_mporder and have_slice_timing: eddy_options.append('--mporder=' + str(mporder)) if shared.eddy_mbs: - eddy_options.append('--estimate_move_by_susceptibility') + eddy_options.append('--estimate_move_by_susceptibility') # # High b-value monopolar data still has eddy current distortions # in b=0 images diff --git a/mrtrix3_connectome/usage.py b/mrtrix3_connectome/usage.py index 71c6028..627f188 100644 --- a/mrtrix3_connectome/usage.py +++ b/mrtrix3_connectome/usage.py @@ -254,7 +254,8 @@ def usage(cmdline): #pylint: disable=unused-variable preproc_options.add_argument( f'{OPTION_PREFIX}eddy_mbs', action='store_true', - help='Specify that FSL eddy should have the "movement-by-susceptibility" capability activated;' + help='Specify that FSL eddy should have' + ' the "movement-by-susceptibility" capability activated;' ' this estimates changes in the susceptibility field that result from subject motion,' ' which can improve pre-processing in non-conformant cohorts' ' but comes at considerable computational expense') From 16c93e7de4d070ba3467d96f67735b27549ad2d8 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 12 Nov 2025 10:03:46 +1100 Subject: [PATCH 07/16] Use SynthStrip for T1w brain extraction Closes #138. --- mrtrix3_connectome/anat/get.py | 70 +++++++++++++++++++------------ mrtrix3_connectome/anat/shared.py | 5 +-- 2 files changed, 46 insertions(+), 29 deletions(-) diff --git a/mrtrix3_connectome/anat/get.py b/mrtrix3_connectome/anat/get.py index b0aa39a..af22e6a 100644 --- a/mrtrix3_connectome/anat/get.py +++ b/mrtrix3_connectome/anat/get.py @@ -183,6 +183,9 @@ def get_t1w_preproc_images(import_path, # it's just the mask that is absent if preproc_image_path: + if t1w_shared.synthstrip_cmd: + app.console(f'Using SynthStrip for brain extraction for session {session_label}, ' + 'operating on existing pre-processed T1-weighted image') if t1w_shared.robex_cmd: app.console(f'Using ROBEX for brain extraction for session {session_label}, ' 'operating on existing pre-processed T1-weighted image') @@ -198,9 +201,18 @@ def get_t1w_preproc_images(import_path, run.command(['mrconvert', preproc_image_path, 'T1w.nii', - '-strides', '+1,+2,+3' if t1w_shared.robex_cmd else '-1,+2,+3']) + '-strides', + '+1,+2,+3' \ + if t1w_shared.synthstrip_cmd or t1w_shared.robex_cmd \ + else '-1,+2,+3']) - if t1w_shared.robex_cmd: + if t1w_shared.synthstrip_cmd: + run.command(f'{t1w_shared.synthstrip_cmd} -i T1w.nii -m T1w_mask.nii') + run.command(['mrconvert', + 'T1w_mask.nii', + 'T1w_mask.mif', + '-datatype', 'bit']) + elif t1w_shared.robex_cmd: run.command(f'{t1w_shared.robex_cmd} T1w.nii T1w_brain.nii T1w_mask.nii') run.command(['mrconvert', 'T1w_mask.nii', @@ -258,20 +270,18 @@ def get_t1w_preproc_images(import_path, 'will not apply this correction') gdc_to_be_applied = False - if t1w_shared.robex_cmd and t1w_shared.n4_cmd: + if (t1w_shared.synthstrip_cmd or t1w_shared.robex_cmd) and t1w_shared.n4_cmd: app.console('No pre-processed T1-weighted image ' f'found for session {session_label}; ' - 'will use ROBEX and N4 for ' - 'iterative brain extraction and bias field ' + f'will use {"SynthStrip" if t1w_shared.synthstrip_cmd else "ROBEX"} ' + 'and N4 for iterative brain extraction and bias field ' 'correction from raw T1-weighted image input') elif t1w_shared.fsl_anat_cmd: app.console('No pre-processed T1-weighted image ' f'found for session {session_label}; ' 'will use fsl_anat for brain extraction and ' - 'bias field correction from raw T1-weighted ' - 'image input' - f'{"" if t1w_shared.robex_cmd else " (ROBEX not installed)"}' - f'{"" if t1w_shared.n4_cmd else " (N4 not installed)"}') + 'bias field correction from raw T1-weighted image input' + '(other softwares not installed)"') else: # TODO Make this acceptable in preproc-level analysis with adequate warning return @@ -291,39 +301,50 @@ def get_t1w_preproc_images(import_path, run.command(['mrconvert', raw_image_path, 'T1w_raw.nii', '-strides', '+1,+2,+3']) - if t1w_shared.robex_cmd and t1w_shared.n4_cmd: + if (t1w_shared.synthstrip_cmd or t1w_shared.robex_cmd) and t1w_shared.n4_cmd: # Do a semi-iterative approach here: # Get an initial brain mask, use that mask to estimate a # bias field, then re-compute the brain mask # TODO Consider making this fully iterative, just like the # approach in preproc with dwi2mask and mtnormalise - run.command([t1w_shared.robex_cmd, - 'T1w_raw.nii', - 'T1w_initial_brain.nii', - 'T1w_initial_mask.nii']) - app.cleanup('T1w_initial_brain.nii') + if t1w_shared.synthstrip_cmd: + run.command([t1w_shared.synthstrip_cmd, + '-i', 'T1w_raw.nii', + '-m', 'T1w_initial_mask.nii']) + else: + run.command([t1w_shared.robex_cmd, + 'T1w_raw.nii', + 'T1w_initial_brain.nii', + 'T1w_initial_mask.nii']) + app.cleanup('T1w_initial_brain.nii') run.command([t1w_shared.n4_cmd, '-i', 'T1w_raw.nii', '-w', 'T1w_initial_mask.nii', '-o', 'T1w_biascorr.nii']) app.cleanup('T1w_initial_mask.nii') - run.command([t1w_shared.robex_cmd, - 'T1w_biascorr.nii', - 'T1w_biascorr_brain.nii', - 'T1w_biascorr_brain_mask.nii']) - app.cleanup('T1w_biascorr_brain.nii') + if t1w_shared.synthstrip_cmd: + run.command([t1w_shared.synthstrip_cmd, + '-i', 'T1w_biascorr.nii', + '-m', 'T1w_biascorr_mask.nii']) + else: + run.command([t1w_shared.robex_cmd, + 'T1w_biascorr.nii', + 'T1w_biascorr_brain.nii', + 'T1w_biascorr_mask.nii']) + app.cleanup('T1w_biascorr_brain.nii') run.command(['mrconvert', 'T1w_biascorr.nii', 'T1w.mif']) app.cleanup('T1w_biascorr.nii') run.command(['mrconvert', - 'T1w_biascorr_brain_mask.nii', + 'T1w_biascorr_mask.nii', 'T1w_mask.mif', '-datatype', 'bit']) - app.cleanup('T1w_biascorr_brain_mask.nii') + app.cleanup('T1w_biascorr_mask.nii') - elif t1w_shared.fsl_anat_cmd: + else: + assert t1w_shared.fsl_anat_cmd run.command(f'{t1w_shared.fsl_anat_cmd} -i T1w_raw.nii --noseg --nosubcortseg') run.command(['mrconvert', @@ -336,6 +357,3 @@ def get_t1w_preproc_images(import_path, 'T1w_mask.mif', '-datatype', 'bit']) app.cleanup('T1w_raw.anat') - - else: - assert False diff --git a/mrtrix3_connectome/anat/shared.py b/mrtrix3_connectome/anat/shared.py index 93de3e7..c7b78c3 100644 --- a/mrtrix3_connectome/anat/shared.py +++ b/mrtrix3_connectome/anat/shared.py @@ -6,8 +6,6 @@ class Shared(object): #pylint: disable=useless-object-inheritance def __init__(self, gdc_images): self.gdc_images = gdc_images - # TODO If synthstrip is available, - # prioritise using that for brain extraction try: self.fsl_anat_cmd = shutil.which(fsl.exe_name('fsl_anat')) except MRtrixError: @@ -15,8 +13,9 @@ def __init__(self, gdc_images): robex_cmd = shutil.which('ROBEX') self.robex_cmd = robex_cmd if robex_cmd else shutil.which('runROBEX.sh') self.n4_cmd = shutil.which('N4BiasFieldCorrection') + self.synthstrip_cmd = shutil.which('mri_synthstrip') - if not self.fsl_anat_cmd and not self.robex_cmd: + if not self.fsl_anat_cmd and not self.robex_cmd and not self.synthstrip_cmd: app.warn('No commands for T1w image processing found; ' 'command can only proceed if either ' 'existing pre-processed T1w image data can be found, ' From bf8c8258c258e04c9a7bb9d7277c074ce0377f52 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Mon, 17 Nov 2025 16:20:02 +1100 Subject: [PATCH 08/16] Fix use of FSL for derivation of brain mask only Where the user provides as input a derivatives directory that only provides a pre-processed T1-weighted image and not a corresponding mask, FSL's fsl_anat command may produce images for which the field of view in specifically the inferior portion of the image has been reduced. This would otherwise lead to downstream problems where the mask extracted from this directory and the input pre-processed T1-weighted image are expected to reside on the same voxel grid, initially by the mrhistmatch command. This fix resamples the FSL-derived brain mask back onto the full voxel grid of the input pre-processed T1-weighted image. --- mrtrix3_connectome/anat/get.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/mrtrix3_connectome/anat/get.py b/mrtrix3_connectome/anat/get.py index b0aa39a..a1b614f 100644 --- a/mrtrix3_connectome/anat/get.py +++ b/mrtrix3_connectome/anat/get.py @@ -209,9 +209,13 @@ def get_t1w_preproc_images(import_path, elif t1w_shared.fsl_anat_cmd: run.command(f'{t1w_shared.fsl_anat_cmd} -i T1w.nii' ' --noseg --nosubcortseg --nobias') - run.command(['mrconvert', + run.command(['mrgrid', fsl.find_image(pathlib.PurePath('T1w.anat', 'T1_brain_mask')), + 'regrid', 'T1w_mask.mif', + '-interp', 'nearest', + '-template', 'T1w.nii', + '-fill', '0', '-datatype', 'bit']) else: assert False From d363b0d3b1b4f2325e489dfaa570abcd6785dc90 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Tue, 18 Nov 2025 10:55:22 +1100 Subject: [PATCH 09/16] Preproc: Fix import of complex DWI data --- mrtrix3_connectome/preproc/run.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 418c9ab..60476d7 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -84,7 +84,7 @@ def run_preproc(bids_dir, session, shared, assert complex_part in ['mag', 'phase'] if complex_part == 'mag': # Find corresponding phase image - in_phase_image = entry.replace('_part-mag_', '_part-phase_') + in_phase_image = entry.parent / entry.name.replace('_part-mag_', '_part-phase_') if in_phase_image not in in_dwi_image_list: raise MRtrixError( f'Image {entry} does not have corresponding phase image') @@ -118,10 +118,10 @@ def run_preproc(bids_dir, session, shared, else: # Make sure we also have the corresponding magnitude image - if entry.replace('_part-phase_', - '_part-mag_') not in in_dwi_image_list: + if not any(item == entry.parent / entry.name.replace('_part-phase_', '_part-mag_') \ + for item in in_dwi_image_list): raise MRtrixError( - f'Image {entry} does not have corresponding mag image') + f'Phase image {entry} does not have corresponding magnitude image') # Do nothing for the second image in the pair continue else: From 37e5a1e88d11177d5617545a00d5f7d8c12228a1 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Tue, 18 Nov 2025 10:59:20 +1100 Subject: [PATCH 10/16] Preproc: Run Gibbs ringing removal on complex data --- mrtrix3_connectome/preproc/run.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 60476d7..c9f8d9e 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -392,21 +392,8 @@ def run_preproc(bids_dir, session, shared, dwi_image_list = [pathlib.Path(f'{entry.with_suffix("")}_denoised.mif') \ for entry in dwi_image_list] - # If data are complex, take the magnitude - new_dwi_image_list = [] - for entry in dwi_image_list: - if image.Header(entry).datatype().startswith('CFloat'): - mag_entry = pathlib.Path(f'{entry.with_suffix("")}_mag.mif') - run.command(f'mrcalc {entry} -abs {mag_entry}') - app.cleanup(entry) - new_dwi_image_list.append(mag_entry) - else: - new_dwi_image_list.append(entry) - dwi_image_list = new_dwi_image_list - # Step 2: Gibbs ringing removal - # TODO Can newer implementations use complex data - # for 2D Gibbs ringing removal? + # Note: Will run on complex data if available app.console('Performing Gibbs ringing removal for DWI' f'{" and fmap" if fmap_image_list else ""} data') for i in dwi_image_list: @@ -422,6 +409,18 @@ def run_preproc(bids_dir, session, shared, fmap_image_list = [pathlib.Path(f'{i.with_suffix("")}_degibbs.mif') \ for i in fmap_image_list] + # If DWI data are complex, take the magnitude + new_dwi_image_list = [] + for entry in dwi_image_list: + if image.Header(entry).datatype().startswith('CFloat'): + mag_entry = pathlib.Path(f'{entry.with_suffix("")}_mag.mif') + run.command(f'mrcalc {entry} -abs {mag_entry}') + app.cleanup(entry) + new_dwi_image_list.append(mag_entry) + else: + new_dwi_image_list.append(entry) + dwi_image_list = new_dwi_image_list + # We need to concatenate the DWI and fmap/ data (separately) # before they can be fed into dwifslpreproc if len(dwi_image_list) > 1 or fmap_image_list: From 7c5d95a4f30258f638561d51e410f2a9f414b2f5 Mon Sep 17 00:00:00 2001 From: Aaron Capon Date: Thu, 20 Nov 2025 10:37:09 +1100 Subject: [PATCH 11/16] Fix T1w import with -t1w_preproc but no mask --- mrtrix3_connectome/anat/get.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/mrtrix3_connectome/anat/get.py b/mrtrix3_connectome/anat/get.py index 803e177..3842acf 100644 --- a/mrtrix3_connectome/anat/get.py +++ b/mrtrix3_connectome/anat/get.py @@ -51,9 +51,7 @@ def get_t1w_preproc_images(import_path, if len(glob_result) == 1: preproc_image_path = glob_result[0] break - glob_refined_result = \ - [item for item in glob_result \ - if not '_space-' in item] + glob_refined_result = list(filter(lambda x: '_space-' not in x.name, glob_result)) if len(glob_refined_result) == 1: preproc_image_path = glob_refined_result[0] break @@ -62,7 +60,7 @@ def get_t1w_preproc_images(import_path, 'pre-processed T1-weighted image ' 'due to multiple candidates ' f'in location "{t1w_preproc / candidate}": ' - f'{";".join(glob_result)}') + f'{";".join(map(str, glob_result))}') if preproc_image_path is None: raise MRtrixError( @@ -189,7 +187,7 @@ def get_t1w_preproc_images(import_path, if t1w_shared.robex_cmd: app.console(f'Using ROBEX for brain extraction for session {session_label}, ' 'operating on existing pre-processed T1-weighted image') - elif t1w_shared.fsl_anat_path: + elif t1w_shared.fsl_anat_cmd: app.console(f'Using fsl_anat for brain extraction for session {session_label} ' '(due to ROBEX not being installed), ' 'operating on existing pre-processed T1-weighted image') @@ -222,7 +220,7 @@ def get_t1w_preproc_images(import_path, run.command(f'{t1w_shared.fsl_anat_cmd} -i T1w.nii' ' --noseg --nosubcortseg --nobias') run.command(['mrgrid', - fsl.find_image(pathlib.PurePath('T1w.anat', 'T1_brain_mask')), + fsl.find_image(pathlib.PurePath('T1w.anat', 'T1_biascorr_brain_mask')), 'regrid', 'T1w_mask.mif', '-interp', 'nearest', From d20cbcd4ff2686a24ea8f6084c5dc5607785ebb9 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Tue, 25 Nov 2025 11:10:06 +1100 Subject: [PATCH 12/16] Update dwidenoise2 Requisite update that prevents the manifestation of NaNs in the output denoised series, which would result in complete image corruption by mrdegibbs. --- Dockerfile | 4 ++-- mrtrix3_connectome/preproc/run.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 3bf47c9..80d8c4f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -153,10 +153,10 @@ RUN apt-get install -y \ # Committish is tip of MRtrix3 #3029 as at 2025-10-21 RUN git clone https://github.com/MRtrix3/mrtrix3.git . && \ git checkout 26965d57b374a733ac0c583d3b92bad17923128a -# Main tip as at 2025-10-21 +# Main tip as at 2025-11-25 RUN git clone https://github.com/Lestropie/dwidenoise2.git dwidenoise2 && \ cd dwidenoise2 && \ - git checkout 37c9b70cf7e1c67ac846311ddd0df925424797f5 && \ + git checkout a2da5b32876256499a383a52494ca6564009ca85 && \ cd ../ && \ cp -r dwidenoise2/cpp . # Since external project compilation may not yet be working on 3.1.0, diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index c9f8d9e..9ba9fa5 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -93,7 +93,7 @@ def run_preproc(bids_dir, session, shared, if abs(2.0*math.pi - (phase_stats.max - phase_stats.min)) \ > 0.01: app.console(f'Phase image {in_phase_image} is not stored in radian units ' - f'(values from {phase_stats.min} to {phase_stats.max}); ' + f'(values from {phase_stats.min} to {phase_stats.max}); ' 'data will be rescaled automatically') # Are the values stored as integers? If so, assume that # taking the maximum phase value observed in the image From 09a1f2ceff17586b7f2f00e74eac3224cfe97ea5 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 3 Dec 2025 10:33:54 +1100 Subject: [PATCH 13/16] Permit image provided via -t1w_preproc to not have "_desc-preproc" --- mrtrix3_connectome/anat/get.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/mrtrix3_connectome/anat/get.py b/mrtrix3_connectome/anat/get.py index 3842acf..48df431 100644 --- a/mrtrix3_connectome/anat/get.py +++ b/mrtrix3_connectome/anat/get.py @@ -88,9 +88,11 @@ def get_t1w_preproc_images(import_path, if preproc_image_path: if '_desc-preproc' not in preproc_image_path.name: - raise MRtrixError( + app.warn( f'Selected T1-weighted image "{preproc_image_path}" ' - 'not flagged as pre-processed') + 'not explicitly flagged as pre-processed in file name; ' + 'but proceeding on presumption that it is ' + '(ie. no GDC or bias field correction will be applied)') # Check to see if there's a JSON file along with the T1-weighted # image; if they is, parse it to find out whether or not the From 231de4aaa1ac36c6eb2f3bee9cb3deeadf817038 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Tue, 6 Jan 2026 16:33:49 +1100 Subject: [PATCH 14/16] Preproc: Fix calculation of MPorder Previous code would result in implausible MPorder value (and immediate eddy failure) if the multiband acceleration factor was at least 4 as evidenced by the contents of metadata field "SliceTiming", but the metadata did not contain field "MultibandAccelerationFactor", as the requested order would exceed the number of slice groups. This change explicitly computes the latter if it is absent from the input metadata. --- mrtrix3_connectome/preproc/run.py | 65 ++++++++++++++++++++----------- 1 file changed, 42 insertions(+), 23 deletions(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 9ba9fa5..12eafeb 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -553,34 +553,53 @@ def run_preproc(bids_dir, session, shared, dwifslpreproc_input_header = image.Header(dwifslpreproc_input) have_slice_timing = 'SliceTiming' in dwifslpreproc_input_header.keyval() app.debug(f'Have slice timing: {have_slice_timing}') - mb_factor = int(dwifslpreproc_input_header.keyval() - .get('MultibandAccelerationFactor', '1')) - app.debug(f'Multiband factor: {mb_factor}') - if 'SliceDirection' in dwifslpreproc_input_header.keyval(): - slice_direction_code = \ - dwifslpreproc_input_header.keyval()['SliceDirection'] - if 'i' in slice_direction_code: - num_slices = dwifslpreproc_input_header.size()[0] - elif 'j' in slice_direction_code: - num_slices = dwifslpreproc_input_header.size()[1] - elif 'k' in slice_direction_code: - num_slices = dwifslpreproc_input_header.size()[2] - else: + mporder = None + if have_slice_timing: + # If "MultibandAccelerationFactor" is defined, + # use that and the number of slices to compute an appropriate MPorder; + # otherwise, use the number of slice groups + # (as inferred by the contents of SliceTiming) + # to perform the same computation + try: + slice_direction_code = \ + dwifslpreproc_input_header.keyval()['SliceDirection'] + if 'i' in slice_direction_code: + num_slices = dwifslpreproc_input_header.size()[0] + elif 'j' in slice_direction_code: + num_slices = dwifslpreproc_input_header.size()[1] + elif 'k' in slice_direction_code: + num_slices = dwifslpreproc_input_header.size()[2] + else: + num_slices = dwifslpreproc_input_header.size()[2] + app.warn('Error in contents of BIDS field "SliceDirection" ' + f'(value: "{slice_direction_code}"); ' + 'assuming third axis') + except KeyError: num_slices = dwifslpreproc_input_header.size()[2] - app.warn('Error reading BIDS field "SliceDirection" ' - f'(value: "{slice_direction_code}"); ' - 'assuming third axis') - else: - num_slices = dwifslpreproc_input_header.size()[2] - app.debug(f'Number of slices: {num_slices}') - mporder = 1 + int(math.ceil(num_slices/(mb_factor*4))) - app.debug(f'MPorder: {mporder}') + app.debug(f'Number of slices: {num_slices}') + try: + mb_factor = int(dwifslpreproc_input_header.keyval()['MultibandAccelerationFactor']) + app.debug(f'Multi-band acceleration factor (from header metadata): {mb_factor}') + mporder = 1 + int(math.ceil(num_slices/(mb_factor*4.0))) + except KeyError: + num_slice_groups = len(set(dwifslpreproc_input_header.keyval()["SliceTiming"])) + app.debug(f'Number of slice groups (from SliceTiming): {num_slice_groups}') + mb_factor = num_slices / num_slice_groups + if mb_factor * num_slice_groups == num_slices: + app.debug(f'Inferred multiband acceleration factor: {mb_factor}') + else: + app.warn('Potential issues in SliceTiming:' + f' found {num_slice_groups} unique slice timings' + f' in content of SliceTiming,' + f' which is incompatible with {num_slices} slices') + mporder = 1 + int(math.ceil(num_slice_groups/4.0)) + app.debug(f'MPorder: {mporder}') eddy_options = ['--flm=cubic'] if shared.eddy_cubicflm else [] if shared.eddy_repol: eddy_options.append('--repol') - if shared.eddy_mporder and have_slice_timing: - eddy_options.append('--mporder=' + str(mporder)) + if shared.eddy_mporder and mporder is not None: + eddy_options.append(f'--mporder={mporder}') if shared.eddy_mbs: eddy_options.append('--estimate_move_by_susceptibility') # From 54cff609a1c8a11d283dbe0fb23c125328aaddc8 Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Wed, 4 Mar 2026 11:05:25 +1100 Subject: [PATCH 15/16] Preproc: Fixes to inter-modal registration - Use eroded masks for histogram matching and registration to mitigate hypointensities at the peripheries of the masks leading to hyperintensities in the contrast-inverted images. - Perform contrast inversion using a linear rather than reciprocal transform, taking into account the prospect of values that are negative / zero / small in magnitude (which could be introduced by eg. gradient non-linearity distortion correction of the T1w) that have a deleterious effect on the reciprocal transform. Closes #142. Closes #141. --- mrtrix3_connectome/preproc/run.py | 90 +++++++++++++++++++------------ 1 file changed, 56 insertions(+), 34 deletions(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 12eafeb..1ff95b3 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -387,9 +387,9 @@ def run_preproc(bids_dir, session, shared, for entry in dwi_image_list: run.command([shared.dwidenoise_cmd, entry, - f'{entry.with_suffix("")}_denoised.mif']) + f'{entry.stem}_denoised.mif']) app.cleanup(entry) - dwi_image_list = [pathlib.Path(f'{entry.with_suffix("")}_denoised.mif') \ + dwi_image_list = [pathlib.Path(f'{entry.stem}_denoised.mif') \ for entry in dwi_image_list] # Step 2: Gibbs ringing removal @@ -397,23 +397,23 @@ def run_preproc(bids_dir, session, shared, app.console('Performing Gibbs ringing removal for DWI' f'{" and fmap" if fmap_image_list else ""} data') for i in dwi_image_list: - run.command(f'mrdegibbs {i} {i.with_suffix("")}_degibbs.mif' + run.command(f'mrdegibbs {i} {i.stem}_degibbs.mif' ' -nshifts 50') app.cleanup(i) - dwi_image_list = [pathlib.Path(f'{i.with_suffix("")}_degibbs.mif') \ + dwi_image_list = [pathlib.Path(f'{i.stem}_degibbs.mif') \ for i in dwi_image_list] for i in fmap_image_list: - run.command(f'mrdegibbs {i} {i.with_suffix("")}_degibbs.mif' + run.command(f'mrdegibbs {i} {i.stem}_degibbs.mif' ' -nshifts 50') app.cleanup(i) - fmap_image_list = [pathlib.Path(f'{i.with_suffix("")}_degibbs.mif') \ + fmap_image_list = [pathlib.Path(f'{i.stem}_degibbs.mif') \ for i in fmap_image_list] # If DWI data are complex, take the magnitude new_dwi_image_list = [] for entry in dwi_image_list: if image.Header(entry).datatype().startswith('CFloat'): - mag_entry = pathlib.Path(f'{entry.with_suffix("")}_mag.mif') + mag_entry = pathlib.Path(f'{entry.stem}_mag.mif') run.command(f'mrcalc {entry} -abs {mag_entry}') app.cleanup(entry) new_dwi_image_list.append(mag_entry) @@ -446,9 +446,9 @@ def run_preproc(bids_dir, session, shared, fmap_transformed_image_list = [] for item in fmap_image_list: - affine_transform_filepath = pathlib.Path(f'{item.with_suffix("")}2dwi_affine.txt') - rigid_transform_filepath = pathlib.Path(f'{item.with_suffix("")}2dwi_rigid.txt') - fmap_transformed_filepath = pathlib.Path(f'{item.with_suffix("")}_transformed.mif') + affine_transform_filepath = pathlib.Path(f'{item.stem}2dwi_affine.txt') + rigid_transform_filepath = pathlib.Path(f'{item.stem}2dwi_rigid.txt') + fmap_transformed_filepath = pathlib.Path(f'{item.stem}_transformed.mif') run.command(['transformcalc', item, dwifslpreproc_input, @@ -482,7 +482,7 @@ def run_preproc(bids_dir, session, shared, fmap_resampled_image_list = [] for item in fmap_transformed_image_list: fmap_resampled_image_path = \ - pathlib.Path(f'{item.with_suffix("")}_resampled.mif') + pathlib.Path(f'{item.stem}_resampled.mif') run.command(['mrtransform', item, '-template', dwifslpreproc_input, @@ -520,7 +520,7 @@ def run_preproc(bids_dir, session, shared, raise MRtrixError('First DWI volume is not b=0; ' 'cannot utilise b=0 volumes ' 'prior to DWI volumes only') - bzero_image = pathlib.Path(f'{dwi_image.with_suffix("")}_bzero.mif') + bzero_image = pathlib.Path(f'{dwi_image.stem}_bzero.mif') run.command(['mrconvert', dwi_image, bzero_image, @@ -644,7 +644,7 @@ def run_preproc(bids_dir, session, shared, dwifslpreproc_output = pathlib.Path( 'dwifslpreproc_out.mif' \ if dwifslpreproc_input == 'dwifslpreproc_in.mif' \ - else (f'{dwifslpreproc_input.with_suffix("")}_preproc.mif')) + else (f'{dwifslpreproc_input.stem}_preproc.mif')) eddy_olnstd_value = 4.0 # The internal eddy default eddy_olnstd_option = [] @@ -724,7 +724,7 @@ def run_preproc(bids_dir, session, shared, assert scanner_name is not None assert scanner_name in shared.gdc_images app.console('Applying gradient non-linearity distortion correction') - dwi_gdc_image = pathlib.Path('dwi_gdc.mif') + dwi_gdc_image = pathlib.Path(f'{dwi_image.stem}_gdc.mif') run.command(['mrtransform', dwifslpreproc_output, dwi_gdc_image, '-template', dwifslpreproc_output, '-warp', shared.gdc_images[scanner_name], @@ -743,7 +743,7 @@ def run_preproc(bids_dir, session, shared, 'intensity normalisation, ' 'and DWI brain mask derivation ' 'via dwibiasnormmask command') - dwi_biasnorm_image = pathlib.Path('dwi_biasnorm.mif') + dwi_biasnorm_image = pathlib.Path(f'{dwi_image.stem}_biasnorm.mif') dwi_mask_image = pathlib.Path('dwi_mask.mif') # Note that: # 1. The first of these results in the synthstrip command @@ -766,8 +766,8 @@ def run_preproc(bids_dir, session, shared, # Step 6: Crop images to reduce storage space # (but leave some padding on the sides) - dwi_cropped_image = pathlib.Path('dwi_crop.mif') - dwi_cropped_mask_image = pathlib.Path('mask_crop.mif') + dwi_cropped_image = pathlib.Path(f'{dwi_image.stem}_crop.mif') + dwi_cropped_mask_image = pathlib.Path('dwi_mask_crop.mif') run.command(f'mrgrid {dwi_image} crop {dwi_cropped_image} ' f'-mask {dwi_mask_image} -uniform -3') app.cleanup(dwi_image) @@ -779,24 +779,44 @@ def run_preproc(bids_dir, session, shared, # Step 7: DWI->T1 registration if t1w_image: - - # Step 7.1: Generate target images for T1w->DWI registration app.console('Generating contrast-matched images for ' 'inter-modal registration between DWIs and T1w') + + # Step 7.1: Use eroded masks for both DWI and T1w images: + # in both cases, voxels at the periphery of the brain + # are typically hypointense, + # leading to a hyper-intense ring around the periphery of the mask + # once the contrast inversion is applied + dwi_mask_eroded_image = f'{dwi_mask_image.stem}_eroded.mif' + run.command(f'maskfilter {dwi_mask_image} erode {dwi_mask_eroded_image}') + run.command('maskfilter T1w_mask.mif erode T1w_mask_eroded.mif') + + # Step 7.2: Generate target images for T1w->DWI registration run.command(f'dwiextract {dwi_image} -bzero - | ' 'mrcalc - 0.0 -max - | ' 'mrmath - mean -axis 3 dwi_meanbzero.mif') - run.command(f'mrcalc 1 dwi_meanbzero.mif -div {dwi_mask_image} -mult - | ' + # Reworked contrast inversion + # Rather than a reciprocal as in the INVERSE publication, + # which can behave badly in the presence of negatives / zeroes / small positive values + # (which can be introduced even in the T1w due to GDC) + # instead do a linear transform so that the minimum value maps to 1.0 + # and the maximum value maps to 0.0; + # any non-linear distribution of intensities within that range + # is best dealt with by the histogram matching algorithm + dwi_meanbzero_stats = image.statistics('dwi_meanbzero.mif', mask=dwi_mask_eroded_image) + t1w_stats = image.statistics(t1w_image, mask='T1w_mask_eroded.mif') + run.command(f'mrcalc {dwi_meanbzero_stats.max} dwi_meanbzero.mif -sub ' + f'{dwi_meanbzero_stats.max - dwi_meanbzero_stats.min} -div - | ' f'mrhistmatch nonlinear - {t1w_image} dwi_pseudoT1w.mif ' - f'-mask_input {dwi_mask_image} ' - '-mask_target T1w_mask.mif') - run.command(f'mrcalc 1 {t1w_image} -div ' - f'{"" if t1w_is_premasked else "T1w_mask.mif -mult "}- | ' + f'-mask_input {dwi_mask_eroded_image} ' + '-mask_target T1w_mask_eroded.mif') + run.command(f'mrcalc {t1w_stats.max} {t1w_image} -sub ' + f'{t1w_stats.max - t1w_stats.min} -div - | ' 'mrhistmatch nonlinear - dwi_meanbzero.mif T1w_pseudobzero.mif ' - '-mask_input T1w_mask.mif ' - f'-mask_target {dwi_mask_image}') + '-mask_input T1w_mask_eroded.mif ' + f'-mask_target {dwi_mask_eroded_image}') - # Step 7.2: Perform DWI->T1w registration + # Step 7.3: Perform DWI->T1w *registration* # Note that two registrations are performed: # Even though we have a symmetric registration, generation of the # two histogram-matched images means that you will get slightly @@ -807,17 +827,19 @@ def run_preproc(bids_dir, session, shared, transform_b0_pb0 = pathlib.Path('rigid_bzero_to_pseudobzero.txt') run.command(f'mrregister dwi_pseudoT1w.mif {t1w_image}' ' -type rigid' - f' -mask1 {dwi_mask_image}' - ' -mask2 T1w_mask.mif' + f' -mask1 {dwi_mask_eroded_image}' + ' -mask2 T1w_mask_eroded.mif' f' -rigid {transform_pt1w_t1w}') run.command('mrregister dwi_meanbzero.mif T1w_pseudobzero.mif' ' -type rigid' - f' -mask1 {dwi_mask_image}' - ' -mask2 T1w_mask.mif' + f' -mask1 {dwi_mask_eroded_image}' + ' -mask2 T1w_mask_eroded.mif' f' -rigid {transform_b0_pb0}') app.cleanup('dwi_meanbzero.mif') + app.cleanup('T1w_mask_eroded.mif') + app.cleanup(dwi_mask_eroded_image) - # Step 7.3: Perform DWI->T1w transformation + # Step 7.4: Perform DWI->T1w *transformation* # In this scenario, we're going to transform the DWI data to the T1w # rather than the other way around, since the T1w is more likely to # be used as a common reference across multiple analysis pipelines, @@ -830,8 +852,8 @@ def run_preproc(bids_dir, session, shared, transform_average]) app.cleanup(transform_pt1w_t1w) app.cleanup(transform_b0_pb0) - transformed_dwi_image = pathlib.Path(f'{dwi_image.with_suffix("")}_transform.mif') - transformed_dwi_mask_image = pathlib.Path(f'{dwi_mask_image.with_suffix("")}_transform.mif') + transformed_dwi_image = pathlib.Path(f'{dwi_image.stem}_transformed.mif') + transformed_dwi_mask_image = pathlib.Path(f'{dwi_mask_image.stem}_transformed.mif') run.command(['mrtransform', dwi_image, transformed_dwi_image, From 511797599780364263733e85524dda53208d051d Mon Sep 17 00:00:00 2001 From: Robert Smith Date: Thu, 5 Mar 2026 16:15:59 +1100 Subject: [PATCH 16/16] Fix variable name error Error in modifications in 54cff609a1c8a11d283dbe0fb23c125328aaddc8. --- mrtrix3_connectome/preproc/run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mrtrix3_connectome/preproc/run.py b/mrtrix3_connectome/preproc/run.py index 1ff95b3..c13a55a 100644 --- a/mrtrix3_connectome/preproc/run.py +++ b/mrtrix3_connectome/preproc/run.py @@ -724,7 +724,7 @@ def run_preproc(bids_dir, session, shared, assert scanner_name is not None assert scanner_name in shared.gdc_images app.console('Applying gradient non-linearity distortion correction') - dwi_gdc_image = pathlib.Path(f'{dwi_image.stem}_gdc.mif') + dwi_gdc_image = pathlib.Path(f'{dwifslpreproc_output.stem}_gdc.mif') run.command(['mrtransform', dwifslpreproc_output, dwi_gdc_image, '-template', dwifslpreproc_output, '-warp', shared.gdc_images[scanner_name],