diff --git a/.circleci/bcp_anat_outputs.txt b/.circleci/bcp_anat_outputs.txt index b6819c90..b9d6e049 100644 --- a/.circleci/bcp_anat_outputs.txt +++ b/.circleci/bcp_anat_outputs.txt @@ -8,7 +8,6 @@ logs/CITATION.html logs/CITATION.md logs/CITATION.tex sub-01 -sub-01.html sub-01/ses-1mo sub-01/ses-1mo/anat sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-aparcaseg_dseg.nii.gz @@ -49,3 +48,4 @@ sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-CSF_pr sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-GM_probseg.nii.gz sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-MNIInfant_cohort-1_label-WM_probseg.nii.gz sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_space-T1w_desc-preproc_T2w.nii.gz +sub-01_ses-1mo.html diff --git a/.circleci/bcp_full_outputs.txt b/.circleci/bcp_full_outputs.txt index f95a3d04..c5843cdc 100644 --- a/.circleci/bcp_full_outputs.txt +++ b/.circleci/bcp_full_outputs.txt @@ -8,7 +8,6 @@ logs/CITATION.html logs/CITATION.md logs/CITATION.tex sub-01 -sub-01.html sub-01/ses-1mo sub-01/ses-1mo/anat sub-01/ses-1mo/anat/sub-01_ses-1mo_run-001_desc-aparcaseg_dseg.nii.gz @@ -72,3 +71,4 @@ sub-01/ses-1mo/func/sub-01_ses-1mo_task-rest_acq-PA_run-001_space-MNIInfant_coho sub-01/ses-1mo/func/sub-01_ses-1mo_task-rest_acq-PA_run-001_space-MNIInfant_cohort-1_desc-brain_mask.nii.gz sub-01/ses-1mo/func/sub-01_ses-1mo_task-rest_acq-PA_run-001_space-MNIInfant_cohort-1_desc-preproc_bold.json sub-01/ses-1mo/func/sub-01_ses-1mo_task-rest_acq-PA_run-001_space-MNIInfant_cohort-1_desc-preproc_bold.nii.gz +sub-01_ses-1mo.html diff --git a/docs/usage.md b/docs/usage.md index 0862e64b..ea404e65 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -8,21 +8,19 @@ The input dataset is required to be in valid {abbr}`BIDS (The Brain Imaging Data Structure)` format, and it must include at least one T1-weighted and one T2-weighted structural image and -(unless disabled with a flag) a BOLD series. +a BOLD series (unless using the `--anat-only` flag). + We highly recommend that you validate your dataset with the free, online [BIDS Validator](http://bids-standard.github.io/bids-validator/). -The exact command to run *NiBabies* depends on the [Installation](./installation.md) method. -The common parts of the command follow the -[BIDS-Apps](https://github.com/BIDS-Apps) definition. -Example: - -```Shell -$ nibabies data/bids_root/ out/ participant -w work/ --participant-id 01 --age-months 12 -``` +### Participant Ages +*NiBabies* will attempt to automatically extract participant ages (in months) from the BIDS layout. +Specifically, these two files will be checked: +- [Sessions file](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#sessions-file): `//subject_sessions.tsv` +- [Participants file](https://bids-specification.readthedocs.io/en/stable/03-modality-agnostic-files.html#participants-file): `/participants.tsv` -Further information about BIDS and BIDS-Apps can be found at the -[NiPreps portal](https://www.nipreps.org/apps/framework/). +Either file should include `age` (or if you wish to be more explicit: `age_months`) columns, and it is +recommended to have an accompanying JSON file to further describe these fields, and explicitly state the values are in months. ## The FreeSurfer license @@ -33,6 +31,21 @@ To obtain a FreeSurfer license, simply register for free at https://surfer.nmr.m FreeSurfer will search for a license key file first using the `$FS_LICENSE` environment variable and then in the default path to the license key file (`$FREESURFER_HOME`/license.txt). If `$FS_LICENSE` is set, the [`nibabies-wrapper`](#using-the-nibabies-wrapper) will automatically handle setting the license within the container. Otherwise, you will need to use the `--fs-license-file` flag to ensure the license is available. + +## Example command + +The exact command to run *NiBabies* depends on the [Installation](./installation.md) method. +The common parts of the command follow the +[BIDS-Apps](https://github.com/BIDS-Apps) definition. +Example: + +```Shell +$ nibabies data/bids_root/ out/ participant -w work/ --participant-id 01 +``` + +Further information about BIDS and BIDS-Apps can be found at the +[NiPreps portal](https://www.nipreps.org/apps/framework/). + ## Command-Line Arguments ```{argparse} :ref: nibabies.cli.parser._build_parser @@ -50,21 +63,9 @@ At minimum, the following *positional* arguments are required. However, as infant brains can vastly differ depending on age, providing the following arguments is highly recommended: -- **`--age-months`** - participant age in months - -:::{admonition} Warning -:class: warning - -This is required if FreeSurfer is not disabled (`--fs-no-reconall`) -::: - - **`--participant-id`** - participant ID -:::{admonition} Tip -:class: tip - -This is recommended when using `--age-months` if age varies across participants. -::: +- **`--session-id`** - session ID - **`--segmentation-atlases-dir`** - directory containing pre-labeled segmentations to use for Joint Label Fusion. @@ -85,11 +86,11 @@ For installation instructions, please see [](installation.md#installing-the-niba ### Sample Docker usage ``` -$ nibabies-wrapper docker /path/to/data /path/to/output participant --age-months 12 --fs-license-file /usr/freesurfer/license.txt +$ nibabies-wrapper docker /path/to/data /path/to/output participant --fs-license-file /usr/freesurfer/license.txt RUNNING: docker run --rm -e DOCKER_VERSION_8395080871=20.10.6 -it -v /path/to/data:/data:ro \ -v /path/to/output:/out -v /usr/freesurfer/license.txt:/opt/freesurfer/license.txt:ro \ -nipreps/nibabies:21.0.0 /data /out participant --age-months 12 +nipreps/nibabies:23.0.0 /data /out participant ... ``` @@ -103,11 +104,11 @@ This can be overridden by using the `-i` flag to specify a particular Docker ima ### Sample Singularity usage ``` -$ nibabies-wrapper singularity /path/to/data /path/to/output participant --age-months 12 -i nibabies-21.0.0.sif --fs-license-file /usr/freesurfer/license.txt +$ nibabies-wrapper singularity /path/to/data /path/to/output participant -i nibabies-23.0.0.sif --fs-license-file /usr/freesurfer/license.txt RUNNING: singularity run --cleanenv -B /path/to/data:/data:ro \ -B /path/to/output:/out -B /usr/freesurfer/license.txt:/opt/freesurfer/license.txt:ro \ -nibabies-21.0.0.sif /data /out participant --age-months 12 +nibabies-23.0.0.sif /data /out participant ... ``` diff --git a/nibabies/cli/parser.py b/nibabies/cli/parser.py index ded5aa99..b9b850c1 100644 --- a/nibabies/cli/parser.py +++ b/nibabies/cli/parser.py @@ -696,20 +696,6 @@ def parse_args(args=None, namespace=None): config.execution.log_level = int(max(25 - 5 * opts.verbose_count, logging.DEBUG)) config.from_dict(vars(opts)) - # Initialize --output-spaces if not defined - if config.execution.output_spaces is None: - from niworkflows.utils.spaces import Reference, SpatialReferences - - from ..utils.misc import cohort_by_months - - if config.workflow.age_months is None: - parser.error("--age-months must be provided if --output-spaces is not set.") - - cohort = cohort_by_months("MNIInfant", config.workflow.age_months) - config.execution.output_spaces = SpatialReferences( - [Reference("MNIInfant", {"res": "native", "cohort": cohort})] - ) - # Retrieve logging level build_log = config.loggers.cli @@ -831,8 +817,41 @@ def parse_args(args=None, namespace=None): config.execution.participant_label = sorted(participant_label) config.workflow.skull_strip_template = config.workflow.skull_strip_template[0] + config.execution.unique_labels = compute_subworkflows() # finally, write config to file config_file = config.execution.work_dir / config.execution.run_uuid / "config.toml" config_file.parent.mkdir(exist_ok=True, parents=True) config.to_filename(config_file) + + +def compute_subworkflows() -> list: + """ + Query all available participants and sessions, and construct the combinations of the + subworkflows needed. + """ + from niworkflows.utils.bids import collect_participants + + from nibabies import config + + # consists of (subject_id, session_id) tuples + subworkflows = [] + + subject_list = collect_participants( + config.execution.layout, + participant_label=config.execution.participant_label, + strict=True, + ) + + for subject in subject_list: + # Due to rapidly changing morphometry of the population + # Ensure each subject session is processed individually + sessions = ( + config.execution.session_id + or config.execution.layout.get_sessions(scope='raw', subject=subject) + or [None] + ) + # grab participant age per session + for session in sessions: + subworkflows.append((subject, session)) + return subworkflows diff --git a/nibabies/cli/run.py b/nibabies/cli/run.py index f173c681..cf45c616 100644 --- a/nibabies/cli/run.py +++ b/nibabies/cli/run.py @@ -141,8 +141,7 @@ def main(): # Generate reports phase generate_reports( - config.execution.participant_label, - config.execution.session_id, + config.execution.unique_labels, config.execution.nibabies_dir, config.execution.run_uuid, config=pkgrf("nibabies", "data/reports-spec.yml"), diff --git a/nibabies/cli/workflow.py b/nibabies/cli/workflow.py index 3b924e31..aa5cb643 100644 --- a/nibabies/cli/workflow.py +++ b/nibabies/cli/workflow.py @@ -12,7 +12,7 @@ def build_workflow(config_file): """Create the Nipype Workflow that supports the whole execution graph.""" - from niworkflows.utils.bids import check_pipeline_version, collect_participants + from niworkflows.utils.bids import check_pipeline_version from niworkflows.utils.misc import check_valid_fs_license from .. import config @@ -42,24 +42,17 @@ def build_workflow(config_file): desc_content = dset_desc_path.read_bytes() config.execution.bids_description_hash = sha256(desc_content).hexdigest() - # First check that bids_dir looks like a BIDS folder - subject_list = collect_participants( - config.execution.layout, participant_label=config.execution.participant_label - ) - subjects_sessions = { - subject: config.execution.session_id - or config.execution.layout.get_sessions(scope='raw', subject=subject) - or [None] - for subject in subject_list - } - # Called with reports only if config.execution.reports_only: from pkg_resources import resource_filename as pkgrf - build_logger.log(25, "Running --reports-only on participants %s", ", ".join(subject_list)) + build_logger.log( + 25, + "Running --reports-only on participants %s", + ", ".join(config.execution.unique_labels), + ) retval["return_code"] = generate_reports( - subject_list, + config.execution.unique_labels, nibabies_dir, config.execution.run_uuid, config=pkgrf("nibabies", "data/reports-spec.yml"), @@ -71,9 +64,9 @@ def build_workflow(config_file): init_msg = f""" Running nibabies version {config.environment.version}: * BIDS dataset path: {config.execution.bids_dir}. - * Participant list: {subject_list}. + * Participant list: {config.execution.unique_labels}. * Run identifier: {config.execution.run_uuid}. - * Output spaces: {config.execution.output_spaces}.""" + * Output spaces: {config.execution.output_spaces or 'MNIInfant'}.""" if config.execution.anat_derivatives: init_msg += f""" @@ -84,7 +77,7 @@ def build_workflow(config_file): * Pre-run FreeSurfer's SUBJECTS_DIR: {config.execution.fs_subjects_dir}.""" build_logger.log(25, init_msg) - retval["workflow"] = init_nibabies_wf(subjects_sessions) + retval["workflow"] = init_nibabies_wf(config.execution.unique_labels) # Check for FS license after building the workflow if not check_valid_fs_license(): diff --git a/nibabies/config.py b/nibabies/config.py index f239cbf8..c037538e 100644 --- a/nibabies/config.py +++ b/nibabies/config.py @@ -425,6 +425,8 @@ class execution(_Config): """Select a particular task from all available in the dataset.""" templateflow_home = _templateflow_home """The root folder of the TemplateFlow client.""" + unique_labels = None + """Combinations of subject + session identifiers to be preprocessed.""" work_dir = Path("work").absolute() """Path to a working directory where intermediate results will be available.""" write_graph = False @@ -581,8 +583,6 @@ class workflow(_Config): instance keeping standard and nonstandard spaces.""" surface_recon_method = "infantfs" """Method to use for surface reconstruction.""" - topup_max_vols = 5 - """Maximum number of volumes to use with TOPUP, per-series (EPI or BOLD).""" use_aroma = None """Run ICA-:abbr:`AROMA (automatic removal of motion artifacts)`.""" use_bbr = False @@ -694,7 +694,6 @@ def load(filename, skip=None): section = getattr(sys.modules[__name__], sectionname) ignore = skip.get(sectionname) section.load(configs, ignore=ignore) - init_spaces() def get(flat=False): @@ -729,42 +728,6 @@ def to_filename(filename): filename.write_text(dumps()) -def init_spaces(checkpoint=True): - """Initialize the :attr:`~workflow.spaces` setting.""" - from niworkflows.utils.spaces import Reference, SpatialReferences - - spaces = execution.output_spaces or SpatialReferences() - if not isinstance(spaces, SpatialReferences): - spaces = SpatialReferences( - [ref for s in spaces.split(" ") for ref in Reference.from_string(s)] - ) - - if checkpoint and not spaces.is_cached(): - spaces.checkpoint() - - # Ensure user-defined spatial references for outputs are correctly parsed. - # Certain options require normalization to a space not explicitly defined by users. - # These spaces will not be included in the final outputs. - if workflow.use_aroma: - # Make sure there's a normalization to FSL for AROMA to use. - spaces.add(Reference("MNI152NLin6Asym", {"res": "2"})) - - if workflow.cifti_output: - # CIFTI grayordinates to corresponding FSL-MNI resolutions. - vol_res = "2" if workflow.cifti_output == "91k" else "1" - spaces.add(Reference("fsaverage", {"den": "164k"})) - spaces.add(Reference("MNI152NLin6Asym", {"res": vol_res})) - # Ensure a non-native version of MNIInfant is added as a target - if workflow.age_months is not None: - from .utils.misc import cohort_by_months - - cohort = cohort_by_months("MNIInfant", workflow.age_months) - spaces.add(Reference("MNIInfant", {"cohort": cohort})) - - # Make the SpatialReferences object available - workflow.spaces = spaces - - def _process_initializer(cwd, omp_nthreads): """Initialize the environment of the child process.""" os.chdir(cwd) diff --git a/nibabies/reports/core.py b/nibabies/reports/core.py index 8dca0d2c..9fd7176c 100644 --- a/nibabies/reports/core.py +++ b/nibabies/reports/core.py @@ -87,8 +87,7 @@ def run_reports( def generate_reports( - subject_list, - sessions_list, + sub_ses_list, output_dir, run_uuid, config=None, @@ -100,15 +99,11 @@ def generate_reports( if work_dir is not None: reportlets_dir = Path(work_dir) / "reportlets" - if sessions_list is None: - sessions_list = [None] - report_errors = [] - for subject_label, session in product(subject_list, sessions_list): - html_report = f"sub-{subject_label}" - if session: - html_report += f"_ses-{session}" - html_report += ".html" + for subject_label, session in sub_ses_list: + html_report = ''.join( + [f"sub-{subject_label}", f"_ses-{session}" if session else "", ".html"] + ) report_errors.append( run_reports( output_dir, @@ -127,7 +122,7 @@ def generate_reports( logger = logging.getLogger("cli") error_list = ", ".join( - "%s (%d)" % (subid, err) for subid, err in zip(subject_list, report_errors) if err + "%s (%d)" % (subid, err) for subid, err in zip(sub_ses_list, report_errors) if err ) logger.error( "Preprocessing did not finish successfully. Errors occurred while processing " diff --git a/nibabies/tests/test_config.py b/nibabies/tests/test_config.py index c9242445..807c079f 100644 --- a/nibabies/tests/test_config.py +++ b/nibabies/tests/test_config.py @@ -66,9 +66,8 @@ def test_config_spaces(): section.load(configs, init=False) config.nipype.init() config.loggers.init() - config.init_spaces() - - spaces = config.workflow.spaces + age = 8 + spaces = _load_spaces(age) assert "MNI152NLin6Asym:res-2" not in [str(s) for s in spaces.get_standard(full_spec=True)] assert "MNI152NLin6Asym_res-2" not in [ @@ -78,8 +77,7 @@ def test_config_spaces(): ] config.workflow.use_aroma = True - config.init_spaces() - spaces = config.workflow.spaces + spaces = _load_spaces(age) assert "MNI152NLin6Asym:res-2" in [str(s) for s in spaces.get_standard(full_spec=True)] @@ -91,25 +89,17 @@ def test_config_spaces(): config.execution.output_spaces = None config.workflow.use_aroma = False - config.workflow.age_months = None - config.init_spaces() - spaces = config.workflow.spaces - assert [str(s) for s in spaces.get_standard(full_spec=True)] == [] - assert [ - format_reference((s.fullname, s.spec)) - for s in spaces.references - if s.standard and s.dim == 3 - ] == [] + with pytest.raises(RuntimeError): + spaces = _load_spaces(None) config.execution.output_spaces = None config.workflow.cifti_output = "91k" config.workflow.use_aroma = False - config.workflow.age_months = 1 - config.init_spaces() - spaces = config.workflow.spaces + spaces = _load_spaces(1) assert [str(s) for s in spaces.get_standard(full_spec=True)] == [ + 'MNIInfant:cohort-1:res-native', # Default output space 'fsaverage:den-164k', 'MNI152NLin6Asym:res-2', ] @@ -118,7 +108,7 @@ def test_config_spaces(): format_reference((s.fullname, s.spec)) for s in spaces.references if s.standard and s.dim == 3 - ] == ['MNI152NLin6Asym_res-2', 'MNIInfant_cohort-1'] + ] == ['MNIInfant_cohort-1_res-native', 'MNI152NLin6Asym_res-2', 'MNIInfant_cohort-1'] _reset_config() @@ -138,3 +128,11 @@ def test_prng_seed(master_seed, ants_seed, numpy_seed): _reset_config() for seed in ('_random_seed', 'master', 'ants', 'numpy'): assert getattr(config.seeds, seed) is None + + +def _load_spaces(age): + from nibabies.workflows.base import init_execution_spaces, init_workflow_spaces + + # Conditional based on workflow necessities + spaces = init_workflow_spaces(init_execution_spaces(), age) + return spaces diff --git a/nibabies/utils/bids.py b/nibabies/utils/bids.py index 0f1ea15c..35483f2e 100644 --- a/nibabies/utils/bids.py +++ b/nibabies/utils/bids.py @@ -4,9 +4,10 @@ import json import os import sys +import warnings from dataclasses import dataclass, field from pathlib import Path -from typing import IO, List, Union +from typing import IO, List, Literal, Optional, Union @dataclass @@ -262,3 +263,72 @@ def collect_precomputed_derivatives(layout, subject_id, derivatives_filters=None ) derivatives[deriv] = res[0] return derivatives + + +def parse_bids_for_age_months( + bids_root: Union[str, Path], + subject_id: str, + session_id: Optional[str] = None, +) -> Optional[int]: + """ + Given a BIDS root, query the BIDS metadata files for participant age, in months. + + The heuristic followed is: + 1) Check `sub-/sub-_sessions.tsv` + 2) Check `/participants.tsv` + """ + age = None + if subject_id.startswith('sub-'): + subject_id = subject_id[4:] + if session_id and session_id.startswith('ses-'): + session_id = session_id[4:] + + sessions_tsv = Path(bids_root) / f'sub-{subject_id}' / f'sub-{subject_id}_sessions.tsv' + if sessions_tsv.exists() and session_id is not None: + age = _get_age_from_tsv(sessions_tsv, level='session', key=f'ses-{session_id}') + + participants_tsv = Path(bids_root) / 'participants.tsv' + if participants_tsv.exists(): + age = _get_age_from_tsv(participants_tsv, level='participant', key=f'sub-{subject_id}') + + return age + + +def _get_age_from_tsv(bids_tsv: Path, level: Literal['session', 'participant'], key: str): + import pandas as pd + + df = pd.read_csv(str(bids_tsv), sep='\t') + age_col = None + # prefer explicit "age_months" over "age" + for c in ('age_months', 'age'): + if c in df.columns: + age_col = c + break + + if age_col == 'age': + # verify age is in months + bids_json = bids_tsv.with_suffix('.json') + if not _verify_age_json(bids_json): + warnings.warn(f'Could not verify age column is in months for file: {bids_tsv}') + + # find the relevant row + if level == 'session': + mask = df.session_id == key + elif level == 'participant': + mask = df.participant_id == key + + try: + # extract age value from row + age = int(df.loc[mask, age_col].values[0]) + except Exception: + age = None + return age + + +def _verify_age_json(bids_json: Path) -> bool: + try: + data = json.loads(bids_json.read_text()) + assert data['age']['Units'] == 'months' + except Exception: + return False + return True diff --git a/nibabies/workflows/base.py b/nibabies/workflows/base.py index 5bd4b0e2..e70dd31d 100644 --- a/nibabies/workflows/base.py +++ b/nibabies/workflows/base.py @@ -43,18 +43,20 @@ import os import sys from copy import deepcopy +from typing import Optional from nipype.interfaces import utility as niu from nipype.pipeline import engine as pe from packaging.version import Version -from .. import config -from ..interfaces import DerivativesDataSink -from ..interfaces.reports import AboutSummary, SubjectSummary -from .bold import init_func_preproc_wf +from nibabies import config +from nibabies.interfaces import DerivativesDataSink +from nibabies.interfaces.reports import AboutSummary, SubjectSummary +from nibabies.utils.bids import parse_bids_for_age_months +from nibabies.workflows.bold import init_func_preproc_wf -def init_nibabies_wf(participants_table): +def init_nibabies_wf(subworkflows_list): """ Build *NiBabies*'s pipeline. @@ -76,8 +78,9 @@ def init_nibabies_wf(participants_table): Parameters ---------- - participants_table: :obj:`dict` - Keys of participant labels and values of the sessions to process. + subworkflows_list: :obj:`list` of :obj:`tuple` + A list of the subworkflows to create. + Each subject session is run as an individual workflow. """ from niworkflows.engine.workflows import LiterateWorkflow as Workflow from niworkflows.interfaces.bids import BIDSFreeSurferDir @@ -86,13 +89,15 @@ def init_nibabies_wf(participants_table): nibabies_wf = Workflow(name=f"nibabies_{ver.major}_{ver.minor}_wf") nibabies_wf.base_dir = config.execution.work_dir + execution_spaces = init_execution_spaces() + freesurfer = config.workflow.run_reconall if freesurfer: fsdir = pe.Node( BIDSFreeSurferDir( derivatives=config.execution.output_dir, freesurfer_home=os.getenv("FREESURFER_HOME"), - spaces=config.workflow.spaces.get_fs_spaces(), + spaces=execution_spaces.get_fs_spaces(), ), name=f"fsdir_run_{config.execution.run_uuid.replace('-', '_')}", run_without_submitting=True, @@ -100,38 +105,60 @@ def init_nibabies_wf(participants_table): if config.execution.fs_subjects_dir is not None: fsdir.inputs.subjects_dir = str(config.execution.fs_subjects_dir.absolute()) - for subject_id, sessions in participants_table.items(): - for session_id in sessions: - single_subject_wf = init_single_subject_wf(subject_id, session_id=session_id) + for subject_id, session_id in subworkflows_list: + # Calculate the age and age-specific spaces + age = parse_bids_for_age_months(config.execution.bids_dir, subject_id, session_id) + if config.workflow.age_months: + config.loggers.cli.warning( + "`--age-months` is deprecated and will be removed in a future release." + "Please use a `sessions.tsv` or `participants.tsv` file to track participants age." + ) + age = config.workflow.age_months + if age is None: + raise RuntimeError( + "Could not find age for sub-{subject}{session}".format( + subject=subject_id, session=f'_ses-{session_id}' if session_id else '' + ) + ) + output_spaces = init_workflow_spaces(execution_spaces, age) - bids_level = [f"sub-{subject_id}"] - if session_id: - bids_level.append(f"ses-{session_id}") + # skull strip template cohort + single_subject_wf = init_single_subject_wf( + subject_id, + session_id=session_id, + age=age, + spaces=output_spaces, + ) - log_dir = ( - config.execution.nibabies_dir.joinpath(*bids_level) - / "log" - / config.execution.run_uuid - ) + bids_level = [f"sub-{subject_id}"] + if session_id: + bids_level.append(f"ses-{session_id}") - single_subject_wf.config["execution"]["crashdump_dir"] = str(log_dir) - for node in single_subject_wf._get_all_nodes(): - node.config = deepcopy(single_subject_wf.config) - if freesurfer: - nibabies_wf.connect( - fsdir, "subjects_dir", single_subject_wf, "inputnode.subjects_dir" - ) - else: - nibabies_wf.add_nodes([single_subject_wf]) + log_dir = ( + config.execution.nibabies_dir.joinpath(*bids_level) / "log" / config.execution.run_uuid + ) + + single_subject_wf.config["execution"]["crashdump_dir"] = str(log_dir) + for node in single_subject_wf._get_all_nodes(): + node.config = deepcopy(single_subject_wf.config) + if freesurfer: + nibabies_wf.connect(fsdir, "subjects_dir", single_subject_wf, "inputnode.subjects_dir") + else: + nibabies_wf.add_nodes([single_subject_wf]) - # Dump a copy of the config file into the log directory - log_dir.mkdir(exist_ok=True, parents=True) - config.to_filename(log_dir / "nibabies.toml") + # Dump a copy of the config file into the log directory + log_dir.mkdir(exist_ok=True, parents=True) + config.to_filename(log_dir / "nibabies.toml") return nibabies_wf -def init_single_subject_wf(subject_id, session_id=None): +def init_single_subject_wf( + subject_id: str, + session_id: Optional[str] = None, + age: Optional[int] = None, + spaces=None, +): """ Organize the preprocessing pipeline for a single subject, at a single session. @@ -158,6 +185,8 @@ def init_single_subject_wf(subject_id, session_id=None): Subject label for this single-subject workflow. session_id : :obj:`str` or None Session identifier. + age: :obj:`int` or None + Age (in months) of subject. Inputs ------ @@ -196,7 +225,6 @@ def init_single_subject_wf(subject_id, session_id=None): anat_only = config.workflow.anat_only derivatives = config.execution.derivatives or {} anat_modality = "t1w" if subject_data["t1w"] else "t2w" - spaces = config.workflow.spaces # Make sure we always go through these two checks if not anat_only and not subject_data["bold"]: task_id = config.execution.task_id @@ -315,7 +343,7 @@ def init_single_subject_wf(subject_id, session_id=None): # Preprocessing of anatomical (includes registration to UNCInfant) anat_preproc_wf = init_infant_anat_wf( ants_affine_init=True, - age_months=config.workflow.age_months, + age_months=age, anat_modality=anat_modality, t1w=subject_data["t1w"], t2w=subject_data["t2w"], @@ -419,7 +447,7 @@ def init_single_subject_wf(subject_id, session_id=None): func_preproc_wfs = [] has_fieldmap = bool(fmap_estimators) for bold_file in subject_data['bold']: - func_preproc_wf = init_func_preproc_wf(bold_file, has_fieldmap=has_fieldmap) + func_preproc_wf = init_func_preproc_wf(bold_file, spaces, has_fieldmap=has_fieldmap) if func_preproc_wf is None: continue @@ -526,8 +554,54 @@ def _prefix(subid): return subid if subid.startswith("sub-") else f"sub-{subid}" -def _select_iter_idx(in_list, idx): - """Returns a specific index of a list/tuple""" - if isinstance(in_list, (tuple, list)): - return in_list[idx] - raise AttributeError(f"Input {in_list} is incompatible type: {type(in_list)}") +def init_workflow_spaces(execution_spaces, age_months): + """ + Create output spaces at a per-subworkflow level. + + This address the case where a multi-session subject is run, and requires separate template cohorts. + """ + from niworkflows.utils.spaces import Reference + + from nibabies.utils.misc import cohort_by_months + + spaces = deepcopy(execution_spaces) + + if age_months is None: + raise RuntimeError("Participant age (in months) is required.") + + if not spaces.references: + # Ensure age specific template is added if nothing is present + cohort = cohort_by_months("MNIInfant", age_months) + spaces.add(("MNIInfant", {"res": "native", "cohort": cohort})) + + if not spaces.is_cached(): + spaces.checkpoint() + + # Ensure user-defined spatial references for outputs are correctly parsed. + # Certain options require normalization to a space not explicitly defined by users. + # These spaces will not be included in the final outputs. + if config.workflow.use_aroma: + # Make sure there's a normalization to FSL for AROMA to use. + spaces.add(Reference("MNI152NLin6Asym", {"res": "2"})) + + if config.workflow.cifti_output: + # CIFTI grayordinates to corresponding FSL-MNI resolutions. + vol_res = "2" if config.workflow.cifti_output == "91k" else "1" + spaces.add(Reference("fsaverage", {"den": "164k"})) + spaces.add(Reference("MNI152NLin6Asym", {"res": vol_res})) + # Ensure a non-native version of MNIInfant is added as a target + cohort = cohort_by_months("MNIInfant", age_months) + spaces.add(Reference("MNIInfant", {"cohort": cohort})) + + return spaces + + +def init_execution_spaces(): + from niworkflows.utils.spaces import Reference, SpatialReferences + + spaces = config.execution.output_spaces or SpatialReferences() + if not isinstance(spaces, SpatialReferences): + spaces = SpatialReferences( + [ref for s in spaces.split(" ") for ref in Reference.from_string(s)] + ) + return spaces diff --git a/nibabies/workflows/bold/base.py b/nibabies/workflows/bold/base.py index 784af72f..5ffea208 100644 --- a/nibabies/workflows/bold/base.py +++ b/nibabies/workflows/bold/base.py @@ -73,7 +73,7 @@ from .t2s import init_bold_t2s_wf, init_t2s_reporting_wf -def init_func_preproc_wf(bold_file, has_fieldmap=False, existing_derivatives=None): +def init_func_preproc_wf(bold_file, spaces, has_fieldmap=False, existing_derivatives=None): """ This workflow controls the functional preprocessing stages of *NiBabies*. @@ -191,7 +191,6 @@ def init_func_preproc_wf(bold_file, has_fieldmap=False, existing_derivatives=Non # Have some options handy omp_nthreads = config.nipype.omp_nthreads freesurfer = config.workflow.run_reconall - spaces = config.workflow.spaces nibabies_dir = str(config.execution.nibabies_dir) freesurfer_spaces = spaces.get_fs_spaces() project_goodvoxels = config.workflow.project_goodvoxels