diff --git a/setup.cfg b/setup.cfg index 21a5d11f6e..4534b49ad7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -21,6 +21,7 @@ python_requires = >=3.8 install_requires = indexed_gzip >= 0.8.8 lockfile + looseversion matplotlib >= 2.2.0 nibabel >= 4.0.1 nipype >= 1.7.0 diff --git a/smriprep/conftest.py b/smriprep/conftest.py index 138e5b9db7..9864fda688 100644 --- a/smriprep/conftest.py +++ b/smriprep/conftest.py @@ -1,3 +1,22 @@ import os +from pathlib import Path +from tempfile import TemporaryDirectory +import pytest os.environ['NO_ET'] = '1' + + +@pytest.fixture(autouse=True, scope="session") +def default_cwd(): + cwd = os.getcwd() + with TemporaryDirectory(prefix="smriprepTest") as tmpdir: + try: + os.chdir(tmpdir) + yield Path(tmpdir) + finally: + os.chdir(cwd) + + +@pytest.fixture(autouse=True, scope="session") +def populate_default_cwd(default_cwd): + Path.write_bytes(default_cwd / 'lh.white', b'') diff --git a/smriprep/interfaces/cifti.py b/smriprep/interfaces/cifti.py new file mode 100644 index 0000000000..b31fd461fa --- /dev/null +++ b/smriprep/interfaces/cifti.py @@ -0,0 +1,196 @@ +import json +from pathlib import Path +import typing as ty + +import nibabel as nb +import numpy as np + +from nibabel import cifti2 as ci +from nipype.interfaces.base import TraitedSpec, traits, File, SimpleInterface +from templateflow import api as tf + + +class _GenerateDScalarInputSpec(TraitedSpec): + surface_target = traits.Enum( + "fsLR", + usedefault=True, + desc="CIFTI surface target space", + ) + grayordinates = traits.Enum( + "91k", "170k", usedefault=True, desc="Final CIFTI grayordinates" + ) + scalar_surfs = traits.List( + File(exists=True), + mandatory=True, + desc="list of surface BOLD GIFTI files (length 2 with order [L,R])", + ) + scalar_name = traits.Str(mandatory=True, desc="Name of scalar") + + +class _GenerateDScalarOutputSpec(TraitedSpec): + out_file = File(desc="generated CIFTI file") + out_metadata = File(desc="CIFTI metadata JSON") + + +class GenerateDScalar(SimpleInterface): + """ + Generate a HCP-style CIFTI-2 image from scalar surface files. + """ + input_spec = _GenerateDScalarInputSpec + output_spec = _GenerateDScalarOutputSpec + + def _run_interface(self, runtime): + + surface_labels, metadata = _prepare_cifti(self.inputs.grayordinates) + self._results["out_file"] = _create_cifti_image( + self.inputs.scalar_surfs, + surface_labels, + self.inputs.scalar_name, + metadata, + ) + metadata_file = Path("dscalar.json").absolute() + metadata_file.write_text(json.dumps(metadata, indent=2)) + self._results["out_metadata"] = str(metadata_file) + return runtime + + +def _prepare_cifti(grayordinates: str) -> ty.Tuple[list, dict]: + """ + Fetch the required templates needed for CIFTI-2 generation, based on input surface density. + + Parameters + ---------- + grayordinates : + Total CIFTI grayordinates (91k, 170k) + + Returns + ------- + surface_labels + Surface label files for vertex inclusion/exclusion. + metadata + Dictionary with BIDS metadata. + + Examples + -------- + >>> surface_labels, metadata = _prepare_cifti('91k') + >>> surface_labels # doctest: +ELLIPSIS + ['.../tpl-fsLR_hemi-L_den-32k_desc-nomedialwall_dparc.label.gii', \ + '.../tpl-fsLR_hemi-R_den-32k_desc-nomedialwall_dparc.label.gii'] + >>> metadata # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE + {'Density': '91,282 grayordinates corresponding to all of the grey matter sampled at a \ +2mm average vertex spacing...', 'SpatialReference': {'CIFTI_STRUCTURE_CORTEX_LEFT': ...}} + + """ + + grayord_key = { + "91k": { + "surface-den": "32k", + "tf-res": "02", + "grayords": "91,282", + "res-mm": "2mm" + }, + "170k": { + "surface-den": "59k", + "tf-res": "06", + "grayords": "170,494", + "res-mm": "1.6mm" + } + } + if grayordinates not in grayord_key: + raise NotImplementedError(f"Grayordinates {grayordinates} is not supported.") + + total_grayords = grayord_key[grayordinates]['grayords'] + res_mm = grayord_key[grayordinates]['res-mm'] + surface_density = grayord_key[grayordinates]['surface-den'] + # Fetch templates + surface_labels = [ + str( + tf.get( + "fsLR", + density=surface_density, + hemi=hemi, + desc="nomedialwall", + suffix="dparc", + raise_empty=True, + ) + ) + for hemi in ("L", "R") + ] + + tf_url = "https://templateflow.s3.amazonaws.com" + surfaces_url = ( # midthickness is the default, but varying levels of inflation are all valid + f"{tf_url}/tpl-fsLR/tpl-fsLR_den-{surface_density}_hemi-%s_midthickness.surf.gii" + ) + metadata = { + "Density": ( + f"{total_grayords} grayordinates corresponding to all of the grey matter sampled at a " + f"{res_mm} average vertex spacing on the surface" + ), + "SpatialReference": { + "CIFTI_STRUCTURE_CORTEX_LEFT": surfaces_url % "L", + "CIFTI_STRUCTURE_CORTEX_RIGHT": surfaces_url % "R", + } + } + return surface_labels, metadata + + +def _create_cifti_image( + scalar_surfs: ty.Tuple[str, str], + surface_labels: ty.Tuple[str, str], + scalar_name: str, + metadata: ty.Optional[dict] = None, +): + """ + Generate CIFTI image in target space. + + Parameters + ---------- + scalar_surfs + Surface scalar files (L,R) + surface_labels + Surface label files used to remove medial wall (L,R) + metadata + Metadata to include in CIFTI header + scalar_name + Name to apply to scalar map + + Returns + ------- + out : + BOLD data saved as CIFTI dtseries + """ + brainmodels = [] + arrays = [] + + for idx, hemi in enumerate(('left', 'right')): + labels = nb.load(surface_labels[idx]) + mask = np.bool_(labels.darrays[0].data) + + struct = f'cortex_{hemi}' + brainmodels.append( + ci.BrainModelAxis(struct, vertex=np.nonzero(mask)[0], nvertices={struct: len(mask)}) + ) + + morph_scalar = nb.load(scalar_surfs[idx]) + arrays.append(morph_scalar.darrays[0].data[mask].astype("float32")) + + # provide some metadata to CIFTI matrix + if not metadata: + metadata = { + "surface": "fsLR", + } + + # generate and save CIFTI image + hdr = ci.Cifti2Header.from_axes( + (ci.ScalarAxis([scalar_name]), brainmodels[0] + brainmodels[1]) + ) + hdr.matrix.metadata = ci.Cifti2MetaData(metadata) + + img = ci.Cifti2Image(dataobj=np.atleast_2d(np.concatenate(arrays)), header=hdr) + img.nifti_header.set_intent("NIFTI_INTENT_CONNECTIVITY_DENSE_SCALARS") + + stem = Path(scalar_surfs[0]).name.split(".")[0] + cifti_stem = "_".join(ent for ent in stem.split("_") if not ent.startswith("hemi-")) + out_file = Path.cwd() / f"{cifti_stem}.dscalar.nii" + img.to_filename(out_file) + return out_file diff --git a/smriprep/interfaces/freesurfer.py b/smriprep/interfaces/freesurfer.py index 82ffd14eb9..f2a77d7e8f 100644 --- a/smriprep/interfaces/freesurfer.py +++ b/smriprep/interfaces/freesurfer.py @@ -22,10 +22,12 @@ # """Nipype's recon-all replacement.""" import os +from looseversion import LooseVersion from nipype import logging from nipype.utils.filemanip import check_depends from nipype.interfaces.base import traits, InputMultiObject, isdefined, File from nipype.interfaces import freesurfer as fs +from niworkflows.interfaces import freesurfer as nwfs iflogger = logging.getLogger("nipype.interface") @@ -174,6 +176,11 @@ def cmdline(self): no_run = False continue + # FreeSurfer changed the meaning and order of -apas2aseg without + # updating the recon table on the wiki. Hack it until fixed in nipype. + if step == 'apas2aseg' and fs.Info.looseversion() >= LooseVersion("7.3.0"): + infiles = [] + subj_dir = os.path.join(subjects_dir, self.inputs.subject_id) if check_depends( [os.path.join(subj_dir, f) for f in outfiles], @@ -283,3 +290,34 @@ def _gen_filename(self, name): return self.inputs.in_file return super()._gen_filename(name) + + +class MakeMidthickness(nwfs.MakeMidthickness): + """Patched MakeMidthickness interface + + Ensures output filenames are specified with hemisphere labels, when appropriate. + This may not cover all use-cases in MRIsExpand, but we're just making midthickness + files. + + >>> from smriprep.interfaces.freesurfer import MakeMidthickness + >>> mris_expand = MakeMidthickness(thickness=True, distance=0.5) + >>> mris_expand.inputs.in_file = 'lh.white' + >>> mris_expand.cmdline + 'mris_expand -thickness lh.white 0.5 lh.expanded' + >>> mris_expand.inputs.out_name = 'graymid' + >>> mris_expand.cmdline + 'mris_expand -thickness lh.white 0.5 lh.graymid' + + Explicit hemisphere labels should still be respected: + + >>> mris_expand.inputs.out_name = 'rh.graymid' + >>> mris_expand.cmdline + 'mris_expand -thickness lh.white 0.5 rh.graymid' + """ + + def _format_arg(self, name, trait_spec, value): + # FreeSurfer at some point changed whether it would add the hemi label onto the + # surface. Therefore we'll do it ourselves. + if name == "out_name": + value = self._associated_file(self.inputs.in_file, value) + return super()._format_arg(name, trait_spec, value) diff --git a/smriprep/workflows/anatomical.py b/smriprep/workflows/anatomical.py index ccce7ea5a2..6e56a22e8e 100644 --- a/smriprep/workflows/anatomical.py +++ b/smriprep/workflows/anatomical.py @@ -51,7 +51,7 @@ from ..utils.misc import apply_lut as _apply_bids_lut, fs_isRunning as _fs_isRunning from .norm import init_anat_norm_wf from .outputs import init_anat_reports_wf, init_anat_derivatives_wf -from .surfaces import init_anat_ribbon_wf, init_surface_recon_wf +from .surfaces import init_anat_ribbon_wf, init_surface_recon_wf, init_morph_grayords_wf LOGGER = logging.getLogger("nipype.workflow") @@ -69,6 +69,7 @@ def init_anat_preproc_wf( skull_strip_mode, skull_strip_template, spaces, + cifti_output=False, debug=False, existing_derivatives=None, name="anat_preproc_wf", @@ -464,6 +465,7 @@ def _check_img(img): t2w=t2w, output_dir=output_dir, spaces=spaces, + cifti_output=cifti_output, ) # fmt:off @@ -639,6 +641,22 @@ def _check_img(img): ]) # fmt:on + if cifti_output: + morph_grayords_wf = init_morph_grayords_wf(grayord_density=cifti_output) + anat_derivatives_wf.get_node('inputnode').inputs.cifti_density = cifti_output + # fmt:off + workflow.connect([ + (surface_recon_wf, morph_grayords_wf, [ + ('outputnode.subject_id', 'inputnode.subject_id'), + ('outputnode.subjects_dir', 'inputnode.subjects_dir'), + ]), + (morph_grayords_wf, anat_derivatives_wf, [ + ("outputnode.cifti_morph", "inputnode.cifti_morph"), + ("outputnode.cifti_metadata", "inputnode.cifti_metadata"), + ]), + ]) + # fmt:on + return workflow diff --git a/smriprep/workflows/base.py b/smriprep/workflows/base.py index c396772818..6e6f921be5 100644 --- a/smriprep/workflows/base.py +++ b/smriprep/workflows/base.py @@ -414,6 +414,7 @@ def init_single_subject_wf( skull_strip_mode=skull_strip_mode, skull_strip_template=skull_strip_template, spaces=spaces, + cifti_output=False, # Enabling this needs a CLI flag ) # fmt:off diff --git a/smriprep/workflows/outputs.py b/smriprep/workflows/outputs.py index 3df1ffa259..80d907d648 100644 --- a/smriprep/workflows/outputs.py +++ b/smriprep/workflows/outputs.py @@ -204,6 +204,7 @@ def init_anat_derivatives_wf( t2w, output_dir, spaces, + cifti_output, name="anat_derivatives_wf", tpm_labels=BIDS_TISSUE_ORDER, ): @@ -224,6 +225,8 @@ def init_anat_derivatives_wf( Workflow name (default: anat_derivatives_wf) tpm_labels : :obj:`tuple` Tissue probability maps in order + cifti_output : :obj:`bool` + Whether the ``--cifti-output`` flag was set. Inputs ------ @@ -273,6 +276,12 @@ def init_anat_derivatives_wf( FreeSurfer's aseg segmentation, in native T1w space t1w_fs_aparc FreeSurfer's aparc+aseg segmentation, in native T1w space + cifti_morph + Morphometric CIFTI-2 dscalar files + cifti_density + Grayordinate density + cifti_metadata + JSON files containing metadata dictionaries """ from niworkflows.interfaces.utility import KeySelect @@ -299,6 +308,9 @@ def init_anat_derivatives_wf( "anat_ribbon", "t1w_fs_aseg", "t1w_fs_aparc", + 'cifti_metadata', + 'cifti_density', + 'cifti_morph', ] ), name="inputnode", @@ -696,6 +708,27 @@ def init_anat_derivatives_wf( ]) # fmt:on + + if cifti_output: + ds_cifti_morph = pe.MapNode( + DerivativesDataSink( + base_directory=output_dir, + suffix=['curv', 'sulc', 'thickness'], + compress=False, + space='fsLR', + ), + name='ds_cifti_morph', + run_without_submitting=True, + iterfield=["in_file", "meta_dict", "suffix"], + ) + # fmt:off + workflow.connect([ + (inputnode, ds_cifti_morph, [('cifti_morph', 'in_file'), + ('source_files', 'source_file'), + ('cifti_density', 'density'), + (('cifti_metadata', _read_jsons), 'meta_dict')]) + ]) + # fmt:on return workflow @@ -796,3 +829,10 @@ def _combine_cohort(in_template): return template return f"{template}+{in_template.split('cohort-')[-1].split(':')[0]}" return [_combine_cohort(v) for v in in_template] + + +def _read_jsons(in_file): + from json import loads + from pathlib import Path + + return [loads(Path(f).read_text()) for f in in_file] diff --git a/smriprep/workflows/surfaces.py b/smriprep/workflows/surfaces.py index 959f1160fa..7079cb4d31 100644 --- a/smriprep/workflows/surfaces.py +++ b/smriprep/workflows/surfaces.py @@ -27,6 +27,7 @@ structural images. """ +import typing as ty from nipype.pipeline import engine as pe from nipype.interfaces.base import Undefined from nipype.interfaces import ( @@ -34,15 +35,15 @@ io as nio, utility as niu, freesurfer as fs, + workbench as wb, ) -from ..interfaces.freesurfer import ReconAll +from ..interfaces.freesurfer import ReconAll, MakeMidthickness from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.freesurfer import ( FSDetectInputs, FSInjectBrainExtracted, - MakeMidthickness, PatchedLTAConvert as LTAConvert, PatchedRobustRegister as RobustRegister, RefineBrainMask, @@ -379,6 +380,16 @@ def init_autorecon_resume_wf(*, omp_nthreads, name="autorecon_resume_wf"): niu.IdentityInterface(fields=["subjects_dir", "subject_id"]), name="outputnode" ) + # FreeSurfer 7.3 removed gcareg from autorecon2-volonly + # Adding it directly in would force it to run every time + gcareg = pe.Node( + ReconAll(directive=Undefined, steps=["gcareg"], openmp=omp_nthreads), + n_procs=omp_nthreads, + mem_gb=5, + name="gcareg", + ) + gcareg.interface._always_run = True + autorecon2_vol = pe.Node( ReconAll(directive="autorecon2-volonly", openmp=omp_nthreads), n_procs=omp_nthreads, @@ -455,8 +466,10 @@ def _dedup(in_list): workflow.connect([ (inputnode, cortribbon, [('use_T2', 'use_T2'), ('use_FLAIR', 'use_FLAIR')]), - (inputnode, autorecon2_vol, [('subjects_dir', 'subjects_dir'), - ('subject_id', 'subject_id')]), + (inputnode, gcareg, [('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), + (gcareg, autorecon2_vol, [('subjects_dir', 'subjects_dir'), + ('subject_id', 'subject_id')]), (autorecon2_vol, autorecon_surfs, [('subjects_dir', 'subjects_dir'), ('subject_id', 'subject_id')]), (autorecon_surfs, cortribbon, [(('subjects_dir', _dedup), 'subjects_dir'), @@ -823,6 +836,193 @@ def init_anat_ribbon_wf(name="anat_ribbon_wf"): return workflow +def init_morph_grayords_wf( + grayord_density: ty.Literal['91k', '170k'], + name: str = "bold_grayords_wf", +): + """ + Sample Grayordinates files onto the fsLR atlas. + + Outputs are in CIFTI2 format. + + Workflow Graph + .. workflow:: + :graph2use: colored + :simple_form: yes + + from smriprep.workflows.surfaces import init_morph_grayords_wf + wf = init_morph_grayords_wf(grayord_density="91k") + + Parameters + ---------- + grayord_density : :obj:`str` + Either `91k` or `170k`, representing the total of vertices or *grayordinates*. + name : :obj:`str` + Unique name for the subworkflow (default: ``"bold_grayords_wf"``) + + Inputs + ------ + subject_id : :obj:`str` + FreeSurfer subject ID + subjects_dir : :obj:`str` + FreeSurfer SUBJECTS_DIR + + Outputs + ------- + cifti_morph : :obj:`list` of :obj:`str` + Paths of CIFTI dscalar files + cifti_metadata : :obj:`list` of :obj:`str` + Paths to JSON files containing metadata corresponding to ``cifti_morph`` + + """ + import templateflow.api as tf + from niworkflows.engine.workflows import LiterateWorkflow as Workflow + from smriprep.interfaces.cifti import GenerateDScalar + + workflow = Workflow(name=name) + workflow.__desc__ = f"""\ +*Grayordinate* "dscalar" files [@hcppipelines] containing {grayord_density} samples were +also generated using the highest-resolution ``fsaverage`` as an intermediate standardized +surface space. +""" + + fslr_density = "32k" if grayord_density == "91k" else "59k" + + inputnode = pe.Node( + niu.IdentityInterface(fields=["subject_id", "subjects_dir"]), + name="inputnode", + ) + + outputnode = pe.Node( + niu.IdentityInterface(fields=["cifti_morph", "cifti_metadata"]), + name="outputnode", + ) + + get_surfaces = pe.Node(nio.FreeSurferSource(), name="get_surfaces") + + surfmorph_list = pe.Node( + niu.Merge(3, ravel_inputs=True), + name="surfmorph_list", + run_without_submitting=True, + ) + + surf2surf = pe.MapNode( + fs.SurfaceTransform(target_subject="fsaverage", target_type="gii"), + iterfield=["source_file", "hemi"], + name="surf2surf", + mem_gb=0.01, + ) + surf2surf.inputs.hemi = ["lh", "rh"] * 3 + + # Setup Workbench command. LR ordering for hemi can be assumed, as it is imposed + # by the iterfield of the MapNode in the surface sampling workflow above. + resample = pe.MapNode( + wb.MetricResample(method="ADAP_BARY_AREA", area_metrics=True), + name="resample", + iterfield=[ + "in_file", + "out_file", + "new_sphere", + "new_area", + "current_sphere", + "current_area", + ], + ) + resample.inputs.current_sphere = [ + str( + tf.get( + "fsaverage", + hemi=hemi, + density="164k", + desc="std", + suffix="sphere", + extension=".surf.gii", + ) + ) + for hemi in "LR" + ] * 3 + resample.inputs.current_area = [ + str( + tf.get( + "fsaverage", + hemi=hemi, + density="164k", + desc="vaavg", + suffix="midthickness", + extension=".shape.gii", + ) + ) + for hemi in "LR" + ] * 3 + resample.inputs.new_sphere = [ + str( + tf.get( + "fsLR", + space="fsaverage", + hemi=hemi, + density=fslr_density, + suffix="sphere", + extension=".surf.gii", + ) + ) + for hemi in "LR" + ] * 3 + resample.inputs.new_area = [ + str( + tf.get( + "fsLR", + hemi=hemi, + density=fslr_density, + desc="vaavg", + suffix="midthickness", + extension=".shape.gii", + ) + ) + for hemi in "LR" + ] * 3 + resample.inputs.out_file = [ + f"space-fsLR_hemi-{h}_den-{grayord_density}_{morph}.shape.gii" + # Order: curv-L, curv-R, sulc-L, sulc-R, thickness-L, thickness-R + for morph in ('curv', 'sulc', 'thickness') + for h in "LR" + ] + + gen_cifti = pe.MapNode( + GenerateDScalar( + grayordinates=grayord_density, + ), + iterfield=['scalar_name', 'scalar_surfs'], + name="gen_cifti", + ) + gen_cifti.inputs.scalar_name = ['curv', 'sulc', 'thickness'] + + # fmt: off + workflow.connect([ + (inputnode, get_surfaces, [ + ('subject_id', 'subject_id'), + ('subjects_dir', 'subjects_dir'), + ]), + (inputnode, surf2surf, [ + ('subject_id', 'source_subject'), + ('subjects_dir', 'subjects_dir'), + ]), + (get_surfaces, surfmorph_list, [ + (('curv', _sorted_by_basename), 'in1'), + (('sulc', _sorted_by_basename), 'in2'), + (('thickness', _sorted_by_basename), 'in3'), + ]), + (surfmorph_list, surf2surf, [('out', 'source_file')]), + (surf2surf, resample, [('out_file', 'in_file')]), + (resample, gen_cifti, [ + (("out_file", _collate), "scalar_surfs")]), + (gen_cifti, outputnode, [("out_file", "cifti_morph"), + ("out_metadata", "cifti_metadata")]), + ]) + # fmt: on + + return workflow + + def _check_cw256(in_files, default_flags): import numpy as np from nibabel.funcs import concat_images @@ -841,3 +1041,7 @@ def _sorted_by_basename(inlist): from os.path import basename return sorted(inlist, key=lambda x: str(basename(x))) + + +def _collate(files): + return [files[i:i + 2] for i in range(0, len(files), 2)]