From e0662bb029b999d4de01e8e8148508f83922c01a Mon Sep 17 00:00:00 2001 From: Matt Cieslak Date: Thu, 5 Sep 2024 10:36:27 -0400 Subject: [PATCH 1/3] initial commit [skip ci] --- qsiprep/cli/parser.py | 8 ++++++++ qsiprep/config.py | 3 +++ 2 files changed, 11 insertions(+) diff --git a/qsiprep/cli/parser.py b/qsiprep/cli/parser.py index e736f82e..f198a510 100644 --- a/qsiprep/cli/parser.py +++ b/qsiprep/cli/parser.py @@ -558,6 +558,14 @@ def _bids_filter(value, parser): help="EXPERIMENTAL/TEMPORARY: Use SyN correction in addition to " "fieldmap correction, if available", ) + g_syn.add_argument( + "--syn-method", + "--syn_method", + choices=["synb0", "legacy"], + action="store", + default="synb0", + help="Mehtod for SYN unwarping. Options are synb0 (default) or legacy", + ) g_other = parser.add_argument_group("Other options") g_other.add_argument("--version", action="version", version=verstr) diff --git a/qsiprep/config.py b/qsiprep/config.py index aecadccd..69545bc1 100644 --- a/qsiprep/config.py +++ b/qsiprep/config.py @@ -602,6 +602,9 @@ class workflow(_Config): use_syn_sdc = None """Run *fieldmap-less* susceptibility-derived distortions estimation in the absence of any alternatives.""" + syn_method = None + """Which method to use for synthetic distortion correction. Must be + synb0 or legacy.""" class loggers: From eeeba1a1d3c220b95f7a60c40ad7ba0f71aa4f71 Mon Sep 17 00:00:00 2001 From: Matt Cieslak Date: Sat, 7 Sep 2024 11:38:35 -0400 Subject: [PATCH 2/3] anat workflow seems to work --- Dockerfile | 2 +- qsiprep/interfaces/synb0.py | 24 ++++++++ qsiprep/workflows/anatomical/volume.py | 79 +++++++++++++++++++++++++- 3 files changed, 103 insertions(+), 2 deletions(-) create mode 100644 qsiprep/interfaces/synb0.py diff --git a/Dockerfile b/Dockerfile index 9f7d980b..10dc1f87 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,7 +6,7 @@ RUN apt-get update && \ COPY . /src/qsiprep RUN python -m build /src/qsiprep -FROM pennbbl/qsiprep_build:24.7.4 +FROM pennbbl/qsiprep_build:24.9.4 # Install qsiprep wheel COPY --from=wheelstage /src/qsiprep/dist/*.whl . diff --git a/qsiprep/interfaces/synb0.py b/qsiprep/interfaces/synb0.py new file mode 100644 index 00000000..acd00007 --- /dev/null +++ b/qsiprep/interfaces/synb0.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +""" +Interfaces for using SynB0-DISCO +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +""" +import os +import os.path as op + + +def get_synb0_atlas(masked=True, res="low"): + + atlas_dir = os.getenv("SYNB0_ATLASES") + if not atlas_dir: + raise Exception("Unable to locate SynB0 atlases. Define a SYNB0_ATLASES variable.") + + res_str = "" if res == "high" else "_2_5" + mask_str = "_mask" if masked else "" + + atlas_file = f"mni_icbm152_t1_tal_nlin_asym_09c{mask_str}{res_str}.nii.gz" + return op.join(atlas_dir, atlas_file) diff --git a/qsiprep/workflows/anatomical/volume.py b/qsiprep/workflows/anatomical/volume.py index c9883395..697ee26d 100644 --- a/qsiprep/workflows/anatomical/volume.py +++ b/qsiprep/workflows/anatomical/volume.py @@ -32,7 +32,7 @@ """ -from nipype.interfaces import afni, ants, mrtrix3 +from nipype.interfaces import afni, ants, freesurfer, mrtrix3 from nipype.interfaces import utility as niu from nipype.interfaces.ants import BrainExtraction, N4BiasFieldCorrection from nipype.interfaces.base import traits @@ -54,6 +54,7 @@ ) from ...interfaces.itk import AffineToRigid, DisassembleTransform from ...interfaces.niworkflows import RobustMNINormalizationRPT +from ...interfaces.synb0 import get_synb0_atlas from ...utils.misc import fix_multi_source_name ANTS_VERSION = BrainExtraction().version or "" @@ -135,6 +136,8 @@ def init_anat_preproc_wf( ANTs-compatible affine-and-warp transform file (inverse) t1_resampling_grid Image of the preprocessed t1 to be used as the reference output for dwis + t1_for_synb0 + T1w image to be used for synb0-DISCO (if selected) """ @@ -861,6 +864,80 @@ def init_anat_normalization_wf(has_rois=False) -> Workflow: return workflow +def init_synb0_anat_wf(name="synb0_anat_wf") -> Workflow: + """Creates a T1w image that has been prepared/scaled for synb0 disco. + + The steps come from the normalize_T1.sh script in v3.1 of Synb0-DISCO + + Inputs + ------ + t1w_brain_acpc + T1-weighted structural image to skull-strip + + Outputs + ------- + t1w_brain_acpc_nu + Full resolution, scaled brain image + t1w_brain_acpc_nu_2_5 + The ``t1w_brain_acpc_nu`` image resampled into SynB0-DISCO's + input 2.5mm grid. + + """ + workflow = Workflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=["t1w_brain_acpc"]), name="inputnode") + outputnode = pe.Node( + niu.IdentityInterface(fields=["t1w_brain_acpc_nu", "t1w_brain_acpc_nu_2_5"]), + name="outputnode", + ) + + # mri_convert $JOB_PATH/T1.nii.gz $JOB_PATH/T1.mgz + convert_to_mgz = pe.Node( + freesurfer.MRIConvert(out_type="mgz"), + name="convert_to_mgz", + ) + + # mri_nu_correct.mni --i $JOB_PATH/T1.mgz --o $JOB_PATH/T1_N3.mgz --n 2 + nu_mni = pe.Node( + freesurfer.MNIBiasCorrection(iterations=2), + name="nu_mni", + ) + + # mri_normalize -g 1 -mprage $JOB_PATH/T1_N3.mgz $JOB_PATH/T1_norm.mgz + mri_normalize = pe.Node( + freesurfer.Normalize(gradient=1, args="-mprage", out_file="T1_norm.mgz"), + name="mri_normalize", + ) + + # mri_convert $JOB_PATH/T1_norm.mgz $T1_NORM_PATH + convert_to_nii = pe.Node( + freesurfer.MRIConvert(out_type="niigz"), + name="convert_to_nii", + ) + + # From prepare_input.sh: + # "antsApplyTransforms -d 3 -i $T1_NORM_PATH -r $T1_ATLAS_2_5_PATH \ + # -n BSpline -t "$ANTS_OUT"0GenericAffine.mat -o $T1_NORM_LIN_ATLAS_2_5_PATH" + resample_to_model_grid = pe.Node( + ants.ApplyTransforms( + reference_image=get_synb0_atlas(masked=True, res="low"), + dimension=3, + interpolation="BSpline", + transforms="identity", + ), + name="resample_to_model_grid", + ) + workflow.connect([ + (inputnode, convert_to_mgz, [('t1w_brain_acpc', 'in_file')]), + (convert_to_mgz, nu_mni, [('out_file', 'in_file')]), + (nu_mni, mri_normalize, [('out_file', 'in_file')]), + (mri_normalize, convert_to_nii, [('out_file', 'in_file')]), + (convert_to_nii, outputnode, [('out_file', 't1w_brain_acpc_nu')]), + (convert_to_nii, resample_to_model_grid, [('out_file', 'input_image')]), + (resample_to_model_grid, outputnode, [('output_image', 't1w_brain_acpc_nu_2_5')]) + ]) # fmt:skip + return workflow + + def init_dl_prep_wf(name="dl_prep_wf") -> Workflow: """Prepare images for use in the FreeSurfer deep learning functions""" workflow = Workflow(name=name) From 544161fbbd36950df51d0aad5f067759e9d247db Mon Sep 17 00:00:00 2001 From: Matt Cieslak Date: Thu, 24 Oct 2024 11:42:06 -0400 Subject: [PATCH 3/3] stash [skip ci] --- qsiprep/tests/test_workflows.py | 11 + qsiprep/workflows/fieldmap/synb0.py | 1226 +++++++++++++++++++++++++++ 2 files changed, 1237 insertions(+) create mode 100644 qsiprep/tests/test_workflows.py create mode 100644 qsiprep/workflows/fieldmap/synb0.py diff --git a/qsiprep/tests/test_workflows.py b/qsiprep/tests/test_workflows.py new file mode 100644 index 00000000..2391f1d4 --- /dev/null +++ b/qsiprep/tests/test_workflows.py @@ -0,0 +1,11 @@ +from pathlib import Path +from qsiprep.workflows.anatomical.volume import init_synb0_anat_wf + +input_nii = "/data/qsiprep_testing_data/anat_preprocessing/" \ + "rigid_acpc_resample_brain_t1w/masked_brain_trans.nii" + +wf = init_synb0_anat_wf() +wf.inputs.inputnode.t1w_brain_acpc = input_nii +wf.base_dir = "/data/qsiprep_testing/synb0" +wf.run() + diff --git a/qsiprep/workflows/fieldmap/synb0.py b/qsiprep/workflows/fieldmap/synb0.py new file mode 100644 index 00000000..9c320643 --- /dev/null +++ b/qsiprep/workflows/fieldmap/synb0.py @@ -0,0 +1,1226 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: + +""" +Creating a synthetic b=0 for distortion correction +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + + +""" + +from nipype.interfaces import afni, ants, mrtrix3 +from nipype.interfaces import utility as niu +from nipype.interfaces.ants import BrainExtraction, N4BiasFieldCorrection +from nipype.interfaces.base import traits +from nipype.pipeline import engine as pe +from niworkflows.interfaces.images import TemplateDimensions +from niworkflows.interfaces.reportlets.masks import ROIsPlot +from pkg_resources import resource_filename as pkgr + +from ... import config +from ...engine import Workflow +from ...interfaces import Conform, DerivativesDataSink +from ...interfaces.anatomical import DesaturateSkull, GetTemplate, VoxelSizeChooser +from ...interfaces.freesurfer import ( + FixHeaderSynthStrip, + MockSynthSeg, + MockSynthStrip, + PrepareSynthStripGrid, + SynthSeg, +) +from ...interfaces.itk import AffineToRigid, DisassembleTransform +from ...interfaces.niworkflows import RobustMNINormalizationRPT +from ...utils.misc import fix_multi_source_name + +ANTS_VERSION = BrainExtraction().version or "" +FS_VERSION = "7.3.1" + + +# pylint: disable=R0914 +def init_synb0_creation_wf(): + r""" + This workflow creates a synthetic b=0 image aligned to the preprocessed T1 + + Inputs + ------ + synb0_t1w + T1w from anatomical workflow that has been created for synb0 + distorted_b0 + distorted b=0 image that will be sent to TOPUP + + Outputs + ------- + undistorted_b0 + + + + """ + + workflow = Workflow(name="anat_preproc_wf") + inputnode = pe.Node( + niu.IdentityInterface( + fields=["t1w", "distorted_b0"], + ), + name="inputnode", + ) + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "t1_preproc", + "t2_preproc", + "t1_brain", + "t1_mask", + "t1_seg", + "t1_aseg", + "t1_aparc", + "t1_2_mni", + "t1_2_mni_forward_transform", + "t1_2_mni_reverse_transform", + "t2w_unfatsat", + "segmentation_qc", + "template_transforms", + "acpc_transform", + "acpc_inv_transform", + "dwi_sampling_grid", + ] + ), + name="outputnode", + ) + + dwi_only = config.workflow.anat_modality == "none" + + # XXX: This is a temporary solution until QSIPrep supports flexible output spaces. + get_template = pe.Node( + GetTemplate( + template_name=config.workflow.anatomical_template, + anatomical_contrast=config.workflow.anat_modality, + ), + name="get_template_image", + ) + mask_template = pe.Node( + afni.Calc(expr="a*b", outputtype="NIFTI_GZ"), + name="mask_template", + ) + reorient_brain_to_lps = pe.Node( + afni.Resample(orientation="RAI", outputtype="NIFTI_GZ"), + name="reorient_brain_to_lps", + ) + reorient_mask_to_lps = pe.Node( + afni.Resample(orientation="RAI", outputtype="NIFTI_GZ"), + name="reorient_mask_to_lps", + ) + + # Create the output reference grid_image + reference_grid_wf = init_output_grid_wf() + workflow.connect([ + (get_template, mask_template, [ + ('template_file', 'in_file_a'), + ('mask_file', 'in_file_b'), + ]), + (get_template, reorient_mask_to_lps, [('mask_file', 'in_file')]), + (mask_template, reorient_brain_to_lps, [('out_file', 'in_file')]), + (reorient_brain_to_lps, reference_grid_wf, [('out_file', 'inputnode.template_image')]), + (reference_grid_wf, outputnode, [('outputnode.grid_image', 'dwi_sampling_grid')]), + ]) # fmt:skip + + if dwi_only: + config.loggers.workflow.info( + "No anatomical scans will be processed! Visual reports will show template masks." + ) + workflow.connect([ + (reorient_brain_to_lps, outputnode, [("out_file", "t1_brain")]), + (reorient_mask_to_lps, outputnode, [ + ('out_file', 't1_mask'), + ('out_file', 't1_seg'), + ]), + ]) # fmt:skip + + reorient_template_to_lps = pe.Node( + afni.Resample(orientation="RAI", outputtype="NIFTI_GZ"), + name="reorient_template_to_lps", + ) + workflow.connect([ + (get_template, reorient_template_to_lps, [('template_file', 'in_file')]), + (reorient_template_to_lps, outputnode, [('out_file', 't1_preproc')]), + ]) # fmt:skip + + workflow.add_nodes([inputnode]) + return workflow + + contrast = config.workflow.anat_modality[:-1] + desc = """Anatomical data preprocessing + +: """ + desc += ( + f"""\ +A total of {num_anat_images} {contrast}-weighted ({contrast}w) images were found within the input +BIDS dataset. +All of them were corrected for intensity non-uniformity (INU) +using `N4BiasFieldCorrection` [@n4, ANTs {ANTS_VERSION}]. +""" + if num_anat_images > 1 + else f"""\ +The {contrast}-weighted ({contrast}w) image was corrected for intensity non-uniformity (INU) +using `N4BiasFieldCorrection` [@n4, ANTs {ANTS_VERSION}], +and used as an anatomical reference throughout the workflow. +""" + ) + + # Ensure there is 1 and only 1 anatomical reference + anat_reference_wf = init_anat_template_wf(num_images=num_anat_images) + + # Do some padding to prevent memory issues in the synth workflows + pad_anat_reference_wf = init_dl_prep_wf() + + # Skull strip the anatomical reference + synthstrip_anat_wf = init_synthstrip_wf( + unfatsat=config.workflow.anat_modality == "T2w", + name="synthstrip_anat_wf", + ) + + # Segment the anatomical reference + synthseg_anat_wf = init_synthseg_wf() + + # Synthstrip is used a lot elsewhere, so make boilerplate for the anatomy-specific + # version here. TODO: get version number automatically + workflow.__postdesc__ = """\ +Brain extraction was performed on the {contrast} image using +SynthStrip [@synthstrip] and automated segmentation was +performed using SynthSeg [@synthseg1, @synthseg2] from +FreeSurfer version {fs_version}. """.format( + fs_version=FS_VERSION, contrast=config.workflow.anat_modality + ) + + # Perform registrations + anat_normalization_wf = init_anat_normalization_wf(has_rois=has_rois) + + # Resampling + rigid_acpc_resample_brain = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="LanczosWindowedSinc"), + name="rigid_acpc_resample_brain", + ) + rigid_acpc_resample_head = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="LanczosWindowedSinc"), + name="rigid_acpc_resample_head", + ) + rigid_acpc_resample_unfatsat = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="LanczosWindowedSinc"), + name="rigid_acpc_resample_unfatsat", + ) + rigid_acpc_resample_aseg = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="MultiLabel"), + name="rigid_acpc_resample_aseg", + ) + rigid_acpc_resample_mask = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="MultiLabel"), + name="rigid_acpc_resample_mask", + ) + + acpc_aseg_to_dseg = pe.Node( + mrtrix3.LabelConvert( + in_lut=pkgr("qsiprep", "data/FreeSurferColorLUT.txt"), + in_config=pkgr("qsiprep", "data/FreeSurfer2dseg.txt"), + out_file="acpc_dseg.nii.gz", + ), + name="acpc_aseg_to_dseg", + ) + + # What to do about T2w's? + if config.workflow.anat_modality == "T2w": + workflow.connect([ + (synthstrip_anat_wf, rigid_acpc_resample_unfatsat, [ + ('outputnode.unfatsat', 'input_image')]), + (anat_normalization_wf, rigid_acpc_resample_unfatsat, [ + ('outputnode.to_template_rigid_transform', 'transforms')]), + (reorient_brain_to_lps, rigid_acpc_resample_unfatsat, [ + ('out_file', 'reference_image'), + ]), + (rigid_acpc_resample_unfatsat, outputnode, [('output_image', 't2w_unfatsat')]), + (rigid_acpc_resample_head, outputnode, [('output_image', 't2_preproc')])]) # fmt:skip + else: + if num_additional_t2ws > 0: + t2w_preproc_wf = init_t2w_preproc_wf( + num_t2ws=num_additional_t2ws, + name="t2w_preproc_wf", + ) + workflow.connect([ + (rigid_acpc_resample_brain, t2w_preproc_wf, [ + ('output_image', 'inputnode.t1_brain')]), + (inputnode, t2w_preproc_wf, [('t2w', 'inputnode.t2w_images')]), + (t2w_preproc_wf, outputnode, [ + ('outputnode.t2_preproc', 't2_preproc'), + ('outputnode.t2w_unfatsat', 't2w_unfatsat')]) + ]) # fmt:skip + + seg2msks = pe.Node(niu.Function(function=_seg2msks), name="seg2msks") + seg_rpt = pe.Node(ROIsPlot(colors=["r", "magenta", "b", "g"]), name="seg_rpt") + anat_reports_wf = init_anat_reports_wf() + + workflow.connect([ + (inputnode, anat_reference_wf, [ + (config.workflow.anat_modality.lower(), 'inputnode.images')]), + + # Make a single anatomical reference. Pad it. + (anat_reference_wf, pad_anat_reference_wf, [ + ('outputnode.template', 'inputnode.image')]), + (anat_reference_wf, outputnode, [ + ('outputnode.template_transforms', 'anat_template_transforms')]), + + # SynthStrip + (pad_anat_reference_wf, synthstrip_anat_wf, [ + ('outputnode.padded_image', 'inputnode.padded_image')]), + (anat_reference_wf, synthstrip_anat_wf, [ + ('outputnode.template', 'inputnode.original_image')]), + + # SynthSeg + (pad_anat_reference_wf, synthseg_anat_wf, [ + ('outputnode.padded_image', 'inputnode.padded_image')]), + (anat_reference_wf, synthseg_anat_wf, [ + ('outputnode.template', 'inputnode.original_image')]), + (synthseg_anat_wf, outputnode, [ + ('outputnode.qc_file', 'segmentation_qc')]), + + # Make AC-PC transform, do nonlinear registration if requested + (inputnode, anat_normalization_wf, [ + ("roi", "inputnode.roi") + ]), + (synthstrip_anat_wf, anat_normalization_wf, [ + ('outputnode.brain_mask', 'inputnode.brain_mask')]), + (anat_reference_wf, anat_normalization_wf, [ + ('outputnode.bias_corrected', 'inputnode.anatomical_reference')]), + (reorient_brain_to_lps, anat_normalization_wf, [ + ('out_file', 'inputnode.template_image'), + ]), + (reorient_mask_to_lps, anat_normalization_wf, [('out_file', 'inputnode.template_mask')]), + (anat_normalization_wf, outputnode, [ + ('outputnode.to_template_rigid_transform', 'acpc_transform'), + ('outputnode.from_template_rigid_transform', 'acpc_inv_transform'), + ('outputnode.to_template_nonlinear_transform', 't1_2_mni_forward_transform'), + ('outputnode.from_template_nonlinear_transform', 't1_2_mni_reverse_transform')]), + + # Resampling + (synthstrip_anat_wf, rigid_acpc_resample_brain, [ + ('outputnode.brain_image', 'input_image')]), + (synthstrip_anat_wf, rigid_acpc_resample_mask, [ + ('outputnode.brain_mask', 'input_image')]), + + (anat_reference_wf, rigid_acpc_resample_head, [ + ('outputnode.bias_corrected', 'input_image')]), + (synthseg_anat_wf, rigid_acpc_resample_aseg, [ + ('outputnode.aparc_image', 'input_image')]), + + (reorient_brain_to_lps, rigid_acpc_resample_brain, [('out_file', 'reference_image')]), + (reorient_brain_to_lps, rigid_acpc_resample_mask, [('out_file', 'reference_image')]), + (reorient_brain_to_lps, rigid_acpc_resample_head, [('out_file', 'reference_image')]), + (reorient_brain_to_lps, rigid_acpc_resample_aseg, [('out_file', 'reference_image')]), + (anat_normalization_wf, rigid_acpc_resample_brain, [ + ('outputnode.to_template_rigid_transform', 'transforms')]), + (anat_normalization_wf, rigid_acpc_resample_mask, [ + ('outputnode.to_template_rigid_transform', 'transforms')]), + (anat_normalization_wf, rigid_acpc_resample_head, [ + ('outputnode.to_template_rigid_transform', 'transforms')]), + (anat_normalization_wf, rigid_acpc_resample_aseg, [ + ('outputnode.to_template_rigid_transform', 'transforms')]), + (rigid_acpc_resample_brain, outputnode, [('output_image', 't1_brain')]), + (rigid_acpc_resample_mask, outputnode, [('output_image', 't1_mask')]), + (rigid_acpc_resample_head, outputnode, [('output_image', 't1_preproc')]), + (rigid_acpc_resample_aseg, outputnode, [('output_image', 't1_aseg')]), + (rigid_acpc_resample_aseg, acpc_aseg_to_dseg, [('output_image', 'in_file')]), + (acpc_aseg_to_dseg, outputnode, [('out_file', 't1_seg')]), + + # Reports + (outputnode, seg2msks, [('t1_seg', 'in_file')]), + (seg2msks, seg_rpt, [('out', 'in_rois')]), + (outputnode, seg_rpt, [ + ('t1_mask', 'in_mask'), + ('t1_preproc', 'in_file')]), + (inputnode, anat_reports_wf, [ + ((config.workflow.anat_modality.lower(), fix_multi_source_name, + False, config.workflow.anat_modality), + 'inputnode.source_file')]), + (anat_reference_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.t1_conform_report')]), + (seg_rpt, anat_reports_wf, [('out_report', 'inputnode.seg_report')]), + (anat_normalization_wf, anat_reports_wf, [ + ('outputnode.out_report', 'inputnode.t1_2_mni_report')]) + ]) # fmt:skip + + anat_derivatives_wf = init_anat_derivatives_wf() + + workflow.connect([ + (anat_reference_wf, anat_derivatives_wf, [ + ('outputnode.valid_list', 'inputnode.source_files')]), + (outputnode, anat_derivatives_wf, [ + ('anat_template_transforms', 'inputnode.t1_template_transforms'), + ('acpc_transform', 'inputnode.t1_acpc_transform'), + ('acpc_inv_transform', 'inputnode.t1_acpc_inv_transform'), + ('t1_preproc', 'inputnode.t1_preproc'), + ('t1_mask', 'inputnode.t1_mask'), + ('t1_seg', 'inputnode.t1_seg'), + ('t1_aseg', 'inputnode.t1_aseg'), + ('t1_2_mni_forward_transform', 'inputnode.t1_2_mni_forward_transform'), + ('t1_2_mni_reverse_transform', 'inputnode.t1_2_mni_reverse_transform'), + ('t1_2_mni', 'inputnode.t1_2_mni') + ]), + ]) # fmt:skip + + return workflow + + +def init_t2w_preproc_wf(num_t2ws, name="t2w_preproc_wf"): + """If T1w is the anatomical contrast, you may also want to process the T2ws for + worlflows that can use them (ie DRBUDDI). This""" + workflow = Workflow(name=name) + inputnode = pe.Node( + niu.IdentityInterface(fields=["t2w_images", "t1_brain"]), + name="inputnode", + ) + outputnode = pe.Node( + niu.IdentityInterface(fields=["t2_preproc", "t2w_unfatsat"]), + name="outputnode", + ) + + # Ensure there is 1 and only 1 T2w reference + anat_reference_wf = init_anat_template_wf(num_images=num_t2ws) + # ^ this also provides some boilerplate. + workflow.__postdesc__ = """\ +The additional T2w reference image was registered to the T1w-ACPC reference +image using an affine transformation in antsRegistration. +""" + # Skull strip the anatomical reference + synthstrip_anat_wf = init_synthstrip_wf( + do_padding=True, unfatsat=True, name="synthstrip_anat_wf" + ) + + # Perform registrations + settings = pkgr("qsiprep", "data/affine.json") + t2_brain_to_t1_brain = pe.Node( + ants.Registration(from_file=settings), + name="t2_brain_to_t1_brain", + n_procs=config.nipype.omp_nthreads, + ) + + # Resampling + rigid_resample_t2w = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="LanczosWindowedSinc"), + name="rigid_resample_t2w", + ) + rigid_resample_unfatsat = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="LanczosWindowedSinc"), + name="rigid_resample_unfatsat", + ) + + workflow.connect([ + (inputnode, anat_reference_wf, [ + ('t2w_images', 'inputnode.images')]), + + # Make a single anatomical reference. Pad it. + (anat_reference_wf, synthstrip_anat_wf, [ + ('outputnode.template', 'inputnode.original_image')]), + # (anat_reference_wf, outputnode, [ + # ('outputnode.template_transforms', 'anat_template_transforms')]), + + # Register the skull-stripped T2w to the skull-stripped T2w + (synthstrip_anat_wf, t2_brain_to_t1_brain, [ + ('outputnode.brain_image', 'moving_image')]), + (inputnode, t2_brain_to_t1_brain, [ + ('t1_brain', 'fixed_image')]), + + # Resampling + (synthstrip_anat_wf, rigid_resample_unfatsat, [ + ('outputnode.unfatsat', 'input_image')]), + (anat_reference_wf, rigid_resample_t2w, [ + ('outputnode.bias_corrected', 'input_image')]), + (inputnode, rigid_resample_unfatsat, [ + ('t1_brain', 'reference_image')]), + (inputnode, rigid_resample_t2w, [ + ('t1_brain', 'reference_image')]), + (t2_brain_to_t1_brain, rigid_resample_unfatsat, [ + ('forward_transforms', 'transforms')]), + (t2_brain_to_t1_brain, rigid_resample_t2w, [ + ('forward_transforms', 'transforms')]), + (rigid_resample_unfatsat, outputnode, [('output_image', 't2w_unfatsat')]), + (rigid_resample_t2w, outputnode, [('output_image', 't2_preproc')]) + ]) # fmt:skip + + return workflow + + +def init_anat_template_wf(num_images) -> Workflow: + r""" + This workflow generates a canonically oriented structural template from + input anatomical images. + + .. workflow:: + :graph2use: orig + :simple_form: yes + + from qsiprep.workflows.anatomical import init_anat_template_wf + wf = init_anat_template_wf(num_images=1) + + Parameters + ---------- + num_images : int + Number of anatomical images + + Inputs + ------ + anatomical_images + List of structural images + + Outputs + ------- + template + Structural template, defining T1w space + template_transforms + List of affine transforms from ``template`` to original images + out_report + Conformation report + """ + + from ..dwi.hmc import init_b0_hmc_wf + + workflow = Workflow(name="anat_template_wf") + contrast = config.workflow.anat_modality + + if num_images > 1: + workflow.__desc__ = f"""\ +A {contrast}-reference map was computed after registration of +{num_images} {contrast} images (after INU-correction) using +`antsRegistration` [ANTs {ANTS_VERSION}]. +""".format( + contrast=config.workflow.anat_modality, + num_images=num_images, + ants_ver=BrainExtraction().version or "", + ) + + inputnode = pe.Node(niu.IdentityInterface(fields=["images"]), name="inputnode") + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "template", + "bias_corrected", + "valid_list", + "template_transforms", + "out_report", + ] + ), + name="outputnode", + ) + + omp_nthreads = config.nipype.omp_nthreads + + # 0. Reorient anatomical image(s) to LPS and resample to common voxel space + template_dimensions = pe.Node(TemplateDimensions(), name="template_dimensions") + anat_conform = pe.MapNode( + Conform(deoblique_header=True), iterfield="in_file", name="anat_conform" + ) + + workflow.connect([ + (inputnode, template_dimensions, [('images', 't1w_list')]), + (template_dimensions, anat_conform, [ + ('t1w_valid_list', 'in_file'), + ('target_zooms', 'target_zooms'), + ('target_shape', 'target_shape')]), + (template_dimensions, outputnode, [('out_report', 'out_report'), + ('t1w_valid_list', 'valid_list')]), + ]) # fmt:skip + + # To match what was done in antsBrainExtraction.sh + # -c "[ 50x50x50x50,0.0000001 ]" + # -s 4 + # -b [ 200 ] + n4_interface = N4BiasFieldCorrection( + dimension=3, + copy_header=True, + n_iterations=[50, 50, 50, 50], + convergence_threshold=0.0000001, + shrink_factor=4, + bspline_fitting_distance=200.0, + num_threads=omp_nthreads, + ) + + if num_images == 1: + + def _get_first(in_list): + if isinstance(in_list, (list, tuple)): + return in_list[0] + return in_list + + n4_correct = pe.Node(n4_interface, name="n4_correct", n_procs=omp_nthreads) + + outputnode.inputs.template_transforms = [pkgr("qsiprep", "data/itkIdentityTransform.txt")] + + workflow.connect([ + (anat_conform, outputnode, [ + (('out_file', _get_first), 'template')]), + (anat_conform, n4_correct, [ + (('out_file', _get_first), 'input_image')]), + (n4_correct, outputnode, [('output_image', 'bias_corrected')]) + ]) # fmt:skip + + return workflow + + # 1. Template (only if several images) + # 1a. Correct for bias field: the bias field is an additive factor + # in log-transformed intensity units. Therefore, it is not a linear + # combination of fields and N4 fails with merged images. + # 1b. Align and merge if several T1w images are provided + n4_correct = pe.MapNode( + n4_interface, iterfield="input_image", name="n4_correct", n_procs=omp_nthreads + ) + + # Make an unbiased template, same as used for b=0 registration + anat_merge_wf = init_b0_hmc_wf( + align_to="first" if not config.workflow.longitudinal else "iterative", + transform="Rigid", + name="anat_merge_wf", + boilerplate=False, + prioritize_omp=True, + ) + + workflow.connect([ + (anat_conform, n4_correct, [('out_file', 'input_image')]), + (n4_correct, anat_merge_wf, [('output_image', 'inputnode.b0_images')]), + (anat_merge_wf, outputnode, [ + ('outputnode.final_template', 'template'), + ('outputnode.final_template', 'bias_corrected'), + ('outputnode.forward_transforms', 'template_transforms')])]) # fmt:skip + + return workflow + + +def init_anat_normalization_wf(has_rois=False) -> Workflow: + r""" + This workflow performs registration from the original anatomical reference to the + template anatomical reference. + + + .. workflow:: + :graph2use: orig + :simple_form: yes + + from qsiprep.workflows.anatomical import init_anat_normalization_wf + wf = init_anat_registration_wf(has_rois=False) + + Parameters + ---------- + has_rois : bool + Whether Registration should account for regions to exclude + + Inputs + ------ + in_file + T1-weighted structural image to skull-strip + roi + A mask to exclude regions during standardization (as list) + + Outputs + ------- + to_template_nonlinear_transform + Bias-corrected ``in_file``, before skull-stripping + to_template_rigid_transform + Skull-stripped ``in_file`` + out_mask + Binary mask of the skull-stripped ``in_file`` + out_report + Reportlet visualizing quality of skull-stripping + """ + + workflow = Workflow(name="anat_normalization_wf") + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "template_image", + "template_mask", + "anatomical_reference", + "brain_mask", + "roi", + ] + ), + name="inputnode", + ) + outputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "to_template_nonlinear_transform", + "to_template_rigid_transform", + "from_template_nonlinear_transform", + "from_template_rigid_transform", + "out_report", + ] + ), + name="outputnode", + ) + omp_nthreads = config.nipype.omp_nthreads + + # get a good ACPC transform + desc = f"""\ +The anatomical reference image was reoriented into AC-PC alignment via +a 6-DOF transform extracted from a full Affine registration to the +{config.workflow.anatomical_template} template. """ + + acpc_settings = pkgr( + "qsiprep", + ( + "data/intramodal_ACPC.json" + if not config.execution.sloppy + else "data/intramodal_ACPC_sloppy.json" + ), + ) + acpc_reg = pe.Node( + RobustMNINormalizationRPT( + float=True, + generate_report=False, + settings=[acpc_settings], + ), + name="acpc_reg", + n_procs=omp_nthreads, + ) + disassemble_transform = pe.Node( + DisassembleTransform(), + name="disassemble_transform", + ) + extract_rigid_transform = pe.Node(AffineToRigid(), name="extract_rigid_transform") + + workflow.connect([ + (inputnode, acpc_reg, [ + ('template_image', 'reference_image'), + ('template_mask', 'reference_mask'), + ('anatomical_reference', 'moving_image'), + ('roi', 'lesion_mask'), + ('brain_mask', 'moving_mask')]), + (acpc_reg, disassemble_transform, [ + ('composite_transform', 'in_file')]), + (disassemble_transform, extract_rigid_transform, [ + (('out_transforms', _get_affine_component), 'affine_transform')]), + (extract_rigid_transform, outputnode, [ + ('rigid_transform', 'to_template_rigid_transform'), + ('rigid_transform_inverse', 'from_template_rigid_transform')]), + ]) # fmt:skip + + # If not doing a normalization to the template, we're done + if config.execution.skip_anat_based_spatial_normalization: + workflow.__desc__ = desc + return workflow + + desc += """\ +A full nonlinear registration to the template from AC-PC space was +estimated via symmetric nonlinear registration (SyN) using antsRegistration. """ + rigid_acpc_resample_anat = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="LanczosWindowedSinc"), + name="rigid_acpc_resample_anat", + ) + config.loggers.workflow.info("Running nonlinear normalization to template") + rigid_acpc_resample_mask = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="MultiLabel"), + name="rigid_acpc_resample_mask", + ) + + if config.execution.sloppy: + config.loggers.workflow.info("Using QuickSyN") + # Requires a warp file: make an inaccurate one + settings = pkgr("qsiprep", "data/quick_syn.json") + anat_norm_interface = RobustMNINormalizationRPT( + float=True, generate_report=True, settings=[settings] + ) + else: + anat_norm_interface = RobustMNINormalizationRPT( + float=True, generate_report=True, flavor="precise" + ) + + anat_nlin_normalization = pe.Node( + anat_norm_interface, name="anat_nlin_normalization", n_procs=omp_nthreads + ) + anat_nlin_normalization.inputs.template = config.workflow.anatomical_template + anat_nlin_normalization.inputs.orientation = "LPS" + + workflow.connect([ + (inputnode, anat_nlin_normalization, [ + ('template_image', 'reference_image'), + ('template_mask', 'reference_mask')]), + (inputnode, rigid_acpc_resample_mask, [ + ('template_image', 'reference_image'), + ('brain_mask', 'input_image')]), + (inputnode, rigid_acpc_resample_anat, [ + ('template_image', 'reference_image'), + ('anatomical_reference', 'input_image')]), + (extract_rigid_transform, rigid_acpc_resample_anat, [ + ('rigid_transform', 'transforms')]), + (extract_rigid_transform, rigid_acpc_resample_mask, [ + ('rigid_transform', 'transforms')]), + (rigid_acpc_resample_anat, anat_nlin_normalization, [ + ('output_image', 'moving_image')]), + (rigid_acpc_resample_mask, anat_nlin_normalization, [ + ('output_image', 'moving_mask')]), + (anat_nlin_normalization, outputnode, [ + ('composite_transform', 'to_template_nonlinear_transform'), + ('inverse_composite_transform', 'from_template_nonlinear_transform'), + ('out_report', 'out_report')]) + ]) # fmt:skip + + if has_rois: + desc += "ROI masks of abnormal tissue were incorporated into the registration. " + rigid_acpc_resample_roi = pe.Node( + ants.ApplyTransforms(input_image_type=0, interpolation="MultiLabel"), + name="rigid_acpc_resample_roi", + ) + workflow.connect([ + (rigid_acpc_resample_roi, anat_nlin_normalization, [ + ('output_image', 'lesion_mask')]), + (extract_rigid_transform, rigid_acpc_resample_roi, [ + ('rigid_transform', 'transforms')]), + (inputnode, rigid_acpc_resample_roi, [ + ('template_image', 'reference_image'), + ('roi', 'input_image')]), + ]) # fmt:skip + + workflow.__desc__ = desc + return workflow + + +def init_dl_prep_wf(name="dl_prep_wf") -> Workflow: + """Prepare images for use in the FreeSurfer deep learning functions""" + workflow = Workflow(name=name) + inputnode = pe.Node(niu.IdentityInterface(fields=["image"]), name="inputnode") + outputnode = pe.Node( + niu.IdentityInterface(fields=["padded_image"]), + name="outputnode", + ) + skulled_1mm_resample = pe.Node( + afni.Resample(outputtype="NIFTI_GZ", voxel_size=(1.0, 1.0, 1.0)), + name="skulled_1mm_resample", + ) + skulled_autobox = pe.Node( + afni.Autobox(outputtype="NIFTI_GZ", padding=3), + name="skulled_autobox", + ) + prepare_synthstrip_reference = pe.Node( + PrepareSynthStripGrid(), + name="prepare_synthstrip_reference", + ) + resample_skulled_to_reference = pe.Node( + ants.ApplyTransforms( + dimension=3, + interpolation="BSpline", + transforms=["identity"], + ), + name="resample_skulled_to_reference", + ) + + workflow.connect([ + (inputnode, skulled_1mm_resample, [('image', 'in_file')]), + (skulled_1mm_resample, skulled_autobox, [('out_file', 'in_file')]), + (skulled_autobox, prepare_synthstrip_reference, [('out_file', 'input_image')]), + (prepare_synthstrip_reference, resample_skulled_to_reference, [ + ('prepared_image', 'reference_image')]), + (inputnode, resample_skulled_to_reference, [('image', 'input_image')]), + (resample_skulled_to_reference, outputnode, [('output_image', 'padded_image')]) + ]) # fmt:skip + return workflow + + +def init_synthstrip_wf(do_padding=False, unfatsat=False, name="synthstrip_wf") -> Workflow: + workflow = Workflow(name=name) + inputnode = pe.Node( + niu.IdentityInterface(fields=["padded_image", "original_image"]), + name="inputnode", + ) + outputnode = pe.Node( + niu.IdentityInterface(fields=["brain_image", "brain_mask", "unfatsat"]), + name="outputnode", + ) + + if not config.execution.sloppy: + synthstrip = pe.Node( + FixHeaderSynthStrip(), # Threads are always fixed to 1 in the run + name="synthstrip", + n_procs=config.nipype.omp_nthreads, + ) + else: + synthstrip = pe.Node( + MockSynthStrip(), + name="mocksynthstrip", + ) + + mask_to_original_grid = pe.Node( + ants.ApplyTransforms( + dimension=3, transforms=["identity"], interpolation="NearestNeighbor" + ), + name="mask_to_original_grid", + ) + mask_brain = pe.Node( + ants.MultiplyImages(dimension=3, output_product_image="masked_brain.nii"), + name="mask_brain", + ) + + # For T2w images, create an artificially skull-downweighted image + if unfatsat: + desaturate_skull = pe.Node(DesaturateSkull(), name="desaturate_skull") + workflow.connect([ + (mask_brain, desaturate_skull, [('output_product_image', 'brain_mask_image')]), + (inputnode, desaturate_skull, [('original_image', 'skulled_t2w_image')]), + (desaturate_skull, outputnode, [('desaturated_t2w', 'unfatsat')]) + ]) # fmt:skip + + # If the input image isn't already padded, do it here + if do_padding: + padding_wf = init_dl_prep_wf(name="pad_before_" + name) + workflow.connect([ + (inputnode, padding_wf, [('original_image', 'inputnode.image')]), + (padding_wf, synthstrip, [('outputnode.padded_image', 'input_image')]) + ]) # fmt:skip + else: + workflow.connect([(inputnode, synthstrip, [('padded_image', 'input_image')])]) # fmt:skip + + workflow.connect([ + (synthstrip, mask_to_original_grid, [('out_brain_mask', 'input_image')]), + (inputnode, mask_to_original_grid, [('original_image', 'reference_image')]), + (mask_to_original_grid, outputnode, [('output_image', 'brain_mask')]), + (inputnode, mask_brain, [('original_image', 'first_input')]), + (mask_to_original_grid, mask_brain, [("output_image", "second_input")]), + (mask_brain, outputnode, [('output_product_image', 'brain_image')]) + ]) # fmt:skip + + return workflow + + +def init_synthseg_wf() -> Workflow: + workflow = Workflow(name="synthseg_wf") + inputnode = pe.Node( + niu.IdentityInterface(fields=["padded_image", "original_image"]), + name="inputnode", + ) + outputnode = pe.Node( + niu.IdentityInterface(fields=["aparc_image", "posterior_image", "qc_file"]), + name="outputnode", + ) + + if not config.execution.sloppy: + synthseg = pe.Node( + SynthSeg(fast=config.execution.sloppy, num_threads=1), # Hard code to 1 + n_procs=config.nipype.omp_nthreads, + name="synthseg", + ) + else: + synthseg = pe.Node( + MockSynthSeg(fast=config.execution.sloppy, num_threads=1), # Hard code to 1 + n_procs=config.nipype.omp_nthreads, + name="mocksynthseg", + ) + + workflow.connect([ + (inputnode, synthseg, [('padded_image', 'input_image')]), + (synthseg, outputnode, [ + ('out_seg', 'aparc_image'), + ('out_post', 'posterior_image'), + ('out_qc', 'qc_file') + ]) + ]) # fmt:skip + return workflow + + +def init_output_grid_wf() -> Workflow: + """Generate a non-oblique, uniform voxel-size grid around a brain.""" + workflow = Workflow(name="output_grid_wf") + inputnode = pe.Node( + niu.IdentityInterface(fields=["template_image", "input_image"]), + name="inputnode", + ) + outputnode = pe.Node(niu.IdentityInterface(fields=["grid_image"]), name="outputnode") + # Create the output reference grid_image + if config.workflow.output_resolution is None: + voxel_size = traits.Undefined + else: + voxel_size = config.workflow.output_resolution + padding = 4 if config.workflow.infant else 8 + + autobox_template = pe.Node( + afni.Autobox(outputtype="NIFTI_GZ", padding=padding), name="autobox_template" + ) + deoblique_autobox = pe.Node( + afni.Warp(outputtype="NIFTI_GZ", deoblique=True), name="deoblique_autobox" + ) + voxel_size_chooser = pe.Node( + VoxelSizeChooser(voxel_size=voxel_size), name="voxel_size_chooser" + ) + resample_to_voxel_size = pe.Node( + afni.Resample(outputtype="NIFTI_GZ"), name="resample_to_voxel_size" + ) + + workflow.connect([ + (inputnode, autobox_template, [('template_image', 'in_file')]), + (autobox_template, deoblique_autobox, [('out_file', 'in_file')]), + (deoblique_autobox, resample_to_voxel_size, [('out_file', 'in_file')]), + (resample_to_voxel_size, outputnode, [('out_file', 'grid_image')]), + (inputnode, voxel_size_chooser, [('input_image', 'input_image')]), + (voxel_size_chooser, resample_to_voxel_size, [(('voxel_size', _tupleize), 'voxel_size')]) + ]) # fmt:skip + + return workflow + + +def _tupleize(value): + # Nipype did not like having a Tuple output trait + return (value, value, value) + + +def init_anat_reports_wf() -> Workflow: + """ + Set up a battery of datasinks to store reports in the right location + """ + workflow = Workflow(name="anat_reports_wf") + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "source_file", + "t1_conform_report", + "seg_report", + "t1_2_mni_report", + "recon_report", + ] + ), + name="inputnode", + ) + + ds_report_t1_conform = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + datatype="figures", + suffix="conform", + ), + name="ds_report_t1_conform", + run_without_submitting=True, + ) + + ds_report_t1_2_mni = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + datatype="figures", + suffix="t1w2mni", + ), + name="ds_report_t1_2_mni", + run_without_submitting=True, + ) + + ds_report_t1_seg_mask = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + datatype="figures", + desc="seg", + suffix="mask", + ), + name="ds_report_t1_seg_mask", + run_without_submitting=True, + ) + + workflow.connect([ + (inputnode, ds_report_t1_conform, [('source_file', 'source_file'), + ('t1_conform_report', 'in_file')]), + (inputnode, ds_report_t1_seg_mask, [('source_file', 'source_file'), + ('seg_report', 'in_file')]), + ]) # fmt:skip + + if not config.execution.skip_anat_based_spatial_normalization: + workflow.connect([ + (inputnode, ds_report_t1_2_mni, [('source_file', 'source_file'), + ('t1_2_mni_report', 'in_file')]) + ]) # fmt:skip + + return workflow + + +def init_anat_derivatives_wf() -> Workflow: + """ + Set up a battery of datasinks to store derivatives in the right location + """ + workflow = Workflow(name="anat_derivatives_wf") + + inputnode = pe.Node( + niu.IdentityInterface( + fields=[ + "source_files", + "t1_template_transforms", + "t1_acpc_transform", + "t1_acpc_inv_transform", + "t1_preproc", + "t1_mask", + "t1_seg", + "t1_2_mni_forward_transform", + "t1_2_mni_reverse_transform", + "t1_2_mni", + "t1_aseg", + ] + ), + name="inputnode", + ) + + t1_name = pe.Node( + niu.Function( + function=fix_multi_source_name, + input_names=["in_files", "dwi_only", "anatomical_contrast"], + ), + name="t1_name", + ) + t1_name.inputs.anatomical_contrast = config.workflow.anat_modality + t1_name.inputs.dwi_only = False + + ds_t1_preproc = pe.Node( + DerivativesDataSink( + compress=True, + base_directory=config.execution.output_dir, + desc="preproc", + keep_dtype=True, + ), + name="ds_t1_preproc", + run_without_submitting=True, + ) + + ds_t1_mask = pe.Node( + DerivativesDataSink( + compress=True, + base_directory=config.execution.output_dir, + desc="brain", + suffix="mask", + ), + name="ds_t1_mask", + run_without_submitting=True, + ) + + ds_t1_seg = pe.Node( + DerivativesDataSink( + compress=True, + base_directory=config.execution.output_dir, + suffix="dseg", + ), + name="ds_t1_seg", + run_without_submitting=True, + ) + + ds_t1_aseg = pe.Node( + DerivativesDataSink( + compress=True, + base_directory=config.execution.output_dir, + desc="aseg", + suffix="dseg", + ), + name="ds_t1_aseg", + run_without_submitting=True, + ) + + # Transforms + ds_t1_template_transforms = pe.MapNode( + DerivativesDataSink( + base_directory=config.execution.output_dir, + to="T1w", + mode="image", + suffix="xfm", + **{"from": "orig"}, + ), + iterfield=["source_file", "in_file"], + name="ds_t1_template_transforms", + run_without_submitting=True, + ) + + ds_t1_mni_inv_warp = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + to="T1w", + mode="image", + suffix="xfm", + **{"from": config.workflow.anatomical_template}, + ), + name="ds_t1_mni_inv_warp", + run_without_submitting=True, + ) + + ds_t1_template_acpc_transform = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + to="T1wACPC", + mode="image", + suffix="xfm", + **{"from": "T1wNative"}, + ), + name="ds_t1_template_acpc_transforms", + run_without_submitting=True, + ) + + ds_t1_template_acpc_inv_transform = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + to="T1wNative", + mode="image", + suffix="xfm", + **{"from": "T1wACPC"}, + ), + name="ds_t1_template_acpc_inv_transforms", + run_without_submitting=True, + ) + + ds_t1_mni_warp = pe.Node( + DerivativesDataSink( + base_directory=config.execution.output_dir, + to=config.workflow.anatomical_template, + mode="image", + suffix="xfm", + **{"from": "T1w"}, + ), + name="ds_t1_mni_warp", + run_without_submitting=True, + ) + + workflow.connect([ + (inputnode, t1_name, [('source_files', 'in_files')]), + (inputnode, ds_t1_template_transforms, [('source_files', 'source_file'), + ('t1_template_transforms', 'in_file')]), + (inputnode, ds_t1_template_acpc_transform, [('t1_acpc_transform', 'in_file')]), + (inputnode, ds_t1_template_acpc_inv_transform, [('t1_acpc_inv_transform', 'in_file')]), + (inputnode, ds_t1_preproc, [('t1_preproc', 'in_file')]), + (inputnode, ds_t1_mask, [('t1_mask', 'in_file')]), + (inputnode, ds_t1_seg, [('t1_seg', 'in_file')]), + (inputnode, ds_t1_aseg, [('t1_aseg', 'in_file')]), + (t1_name, ds_t1_preproc, [('out', 'source_file')]), + (t1_name, ds_t1_template_acpc_transform, [('out', 'source_file')]), + (t1_name, ds_t1_template_acpc_inv_transform, [('out', 'source_file')]), + (t1_name, ds_t1_aseg, [('out', 'source_file')]), + (t1_name, ds_t1_mask, [('out', 'source_file')]), + (t1_name, ds_t1_seg, [('out', 'source_file')]), + ]) # fmt:skip + + if not config.execution.skip_anat_based_spatial_normalization: + workflow.connect([ + (inputnode, ds_t1_mni_warp, [('t1_2_mni_forward_transform', 'in_file')]), + (inputnode, ds_t1_mni_inv_warp, [('t1_2_mni_reverse_transform', 'in_file')]), + (t1_name, ds_t1_mni_warp, [('out', 'source_file')]), + (t1_name, ds_t1_mni_inv_warp, [('out', 'source_file')]), + ]) # fmt:skip + + return workflow + + +def _seg2msks(in_file, newpath=None): + """Converts labels to masks""" + import nibabel as nb + import numpy as np + from nipype.utils.filemanip import fname_presuffix + + nii = nb.load(in_file) + labels = nii.get_fdata() + + out_files = [] + for i in range(1, 4): + ldata = np.zeros_like(labels) + ldata[labels == i] = 1 + out_files.append(fname_presuffix(in_file, suffix="_label%03d" % i, newpath=newpath)) + nii.__class__(ldata, nii.affine, nii.header).to_filename(out_files[-1]) + + return out_files + + +def _get_affine_component(transform_list): + # The disassembled transform will have the affine transform as the first element + if isinstance(transform_list, str): + return transform_list + return transform_list[0]