Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,6 @@

tests/.runs/
work/
.nf-test/
.nf-test/
result
.DS_Store
21 changes: 21 additions & 0 deletions assets/multiqc_config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
report_comment: >
This report has been generated by the nf-neuro tutorial!

report_section_order:
dti_qc:
order: -1001

custom_data:
dti_qc:
file_format: "png"
section_name: "DTI QC"
description: |
This section contains QC images for diffusion tensor imaging (DTI) metric
maps. Add specifications regarding how to evaluate those images, for example:
To assess the quality of the DTI metrics, ensure that FA highlights major white
matter tracts with expected high values (e.g., corpus callosum, corticospinal tract, etc.)...
plot_type: "image"

sp:
dti_qc:
fn: "*dti_mqc.png"
87 changes: 74 additions & 13 deletions main.nf
Original file line number Diff line number Diff line change
@@ -1,30 +1,91 @@
#!/usr/bin/env nextflow

//include { RECONST_DTIMETRICS } from 'modules/nf-neuro/reconst/dtimetrics/main'
include { PREPROC_T1 } from './subworkflows/nf-neuro/preproc_t1/main'
include { STATS_METRICSINROI } from './modules/local/stats/metricsinrois/main'
include { PREPROC_DIFF } from './subworkflows/local/preproc_diff/main'
include { MULTIQC } from "./modules/nf-core/multiqc/main"

workflow get_data {
main:
if ( !params.input ) {
log.info "You must provide an input directory containing all files using:"
log.info "You must provide an input directory containing all images using:"
log.info ""
log.info " --input=/path/to/[input] Input directory containing the file needed"
log.info " --input=/path/to/[input] Input directory containing your subjects"
log.info " |"
log.info " └-- Input"
log.info " └-- participants.*"
log.info " ├-- S1"
log.info " | ├-- *dwi.nii.gz"
log.info " | ├-- *dwi.bval"
log.info " | ├-- *dwi.bvec"
log.info " | └-- *t1.nii.gz"
log.info " └-- S2"
log.info " ├-- *dwi.nii.gz"
log.info " ├-- *bval"
log.info " ├-- *bvec"
log.info " └-- *t1.nii.gz"
log.info ""
error "Please resubmit your command with the previous file structure."
}

input = file(params.input)
// ** Loading all files. ** //
participants_channel = Channel.fromFilePairs("$input/participants.*", flat: true)
{ "participants_files" }

// ** Loading DWI files. ** //
dwi_channel = Channel.fromFilePairs("$input/**/**/dwi/*dwi.{nii.gz,bval,bvec}", size: 3, flat: true)
{ it.parent.parent.parent.name + "_" + it.parent.parent.name} // Set the subject filename as subjectID + '_' + session.
.map{ sid, bvals, bvecs, dwi -> [ [id: sid], dwi, bvals, bvecs ] } // Reordering the inputs.
// ** Loading T1 file. ** //
t1_channel = Channel.fromFilePairs("$input/**/**/anat/*T1w.nii.gz", size: 1, flat: true)
{ it.parent.parent.parent.name + "_" + it.parent.parent.name } // Set the subject filename as subjectID + '_' + session.
.map{ sid, t1 -> [ [id: sid], t1 ] }
emit:
participants = participants_channel
dwi = dwi_channel
anat = t1_channel
}

workflow {
// ** Now call your input workflow to fetch your files ** //
data = get_data()
data.participants.view()
ch_multiqc_files = Channel.empty()

inputs = get_data()

//Processing DWI
PREPROC_DIFF( inputs.dwi )
ch_multiqc_files = ch_multiqc_files.mix(PREPROC_DIFF.out.mqc)

// Preprocessing T1 images
//inputs.anat.view()

PREPROC_T1(
inputs.anat,
Channel.empty(),
Channel.empty(),
Channel.empty(),
Channel.empty(),
Channel.empty(),
Channel.empty()
)

// Extract FA value
input_extract_metric = PREPROC_T1.out.image_bet
.join(PREPROC_DIFF.out.fa)
.map{ it }

STATS_METRICSINROI( input_extract_metric )

ch_multiqc_files = ch_multiqc_files
.groupTuple()
.map { meta, files_list ->
def files = files_list.flatten().findAll { it != null }
return tuple(meta, files)
}

ch_multiqc_config = Channel.fromPath("$projectDir/assets/multiqc_config.yml", checkIfExists: true)

// MultiQC
MULTIQC(
ch_multiqc_files,
[],
ch_multiqc_config.toList(),
[],
[],
[],
[]
)
}
53 changes: 52 additions & 1 deletion modules.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,58 @@
"name": "",
"homePage": "",
"repos": {
"https://github.com/nf-core/modules.git": {
"modules": {
"nf-core": {
"multiqc": {
"branch": "master",
"git_sha": "81880787133db07d9b4c1febd152c090eb8325dc",
"installed_by": ["modules"]
}
}
}
},
"https://github.com/scilus/nf-neuro.git": {
"modules": {
"nf-neuro": {
"betcrop/antsbet": {
"branch": "main",
"git_sha": "b8949dd284432bbe1399dbc3f54cdf9191855f8e",
"installed_by": ["preproc_t1"]
},
"betcrop/synthbet": {
"branch": "main",
"git_sha": "dca20370d97a69c6a91c80843f417206212568e6",
"installed_by": ["preproc_t1"]
},
"denoising/mppca": {
"branch": "main",
"git_sha": "2e222d18c89e5547a6bf5c0c74673baeb63bcd52",
"installed_by": ["modules"]
},
"denoising/nlmeans": {
"branch": "main",
"git_sha": "261a7e0606645eeaf863e401cb9fc99c130b3a19",
"installed_by": ["preproc_t1"]
},
"image/cropvolume": {
"branch": "main",
"git_sha": "3e2e971f5bdaafcd5f72cb9c69f9d0b2a6f20de3",
"installed_by": ["preproc_t1"]
},
"image/resample": {
"branch": "main",
"git_sha": "36e010a236a0bd86334ab99b0cac4f7c4ff51532",
"installed_by": ["preproc_t1"]
},
"preproc/n4": {
"branch": "main",
"git_sha": "18273a2cef9ffdaf7088e305b9c4ebf4dd439079",
"installed_by": ["preproc_t1"]
},
"reconst/dtimetrics": {
"branch": "main",
"git_sha": "452075a707a9769b0509fc33a1051e8ba80799bf",
"git_sha": "47e91ecc151180b81de0d4945c892bd616a6ad03",
"installed_by": ["modules"]
}
}
Expand All @@ -18,6 +64,11 @@
"branch": "main",
"git_sha": "a79cb5c9645269db389c563f674b17c5e900a50b",
"installed_by": ["subworkflows"]
},
"preproc_t1": {
"branch": "main",
"git_sha": "84317cd68567fa8651c120bd19028177f90d41ae",
"installed_by": ["subworkflows"]
}
}
}
Expand Down
46 changes: 46 additions & 0 deletions modules/local/stats/metricsinrois/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
process STATS_METRICSINROI {
tag "$meta.id"
label 'process_single'

container "${ workflow.containerEngine == 'singularity' && !task.ext.singularity_pull_docker_container ?
'https://scil.usherbrooke.ca/containers/scilus_latest.sif':
'scilus/scilus:latest' }"

input:
tuple val(meta), path(t1), path(metrics)

output:
tuple val(meta), path("*.json") , emit: stats
tuple val(meta), path("*mask_wm.nii.gz") , emit: wm_mask
tuple val(meta), path("*mask_gm.nii.gz") , emit: gm_mask
tuple val(meta), path("*mask_csf.nii.gz") , emit: csf_mask
tuple val(meta), path("*map_wm.nii.gz") , emit: wm_map
tuple val(meta), path("*map_gm.nii.gz") , emit: gm_map
tuple val(meta), path("*map_csf.nii.gz") , emit: csf_map

script:
def prefix = task.ext.prefix ?: "${meta.id}"
def suffix = task.ext.first_suffix ? "${task.ext.first_suffix}_stats" : "stats"
def bin = task.ext.bin ? "--bin " : ""
def normalize_weights = task.ext.normalize_weights ? "--normalize_weights " : ""
"""
export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=1
export OMP_NUM_THREADS=1
export OPENBLAS_NUM_THREADS=1

fast -t 1 -n 3\
-H 0.1 -I 4 -l 20.0 -g -o t1.nii.gz $t1
scil_volume_math.py convert t1_seg_2.nii.gz ${prefix}__mask_wm.nii.gz --data_type uint8
scil_volume_math.py convert t1_seg_1.nii.gz ${prefix}__mask_gm.nii.gz --data_type uint8
scil_volume_math.py convert t1_seg_0.nii.gz ${prefix}__mask_csf.nii.gz --data_type uint8
mv t1_pve_2.nii.gz ${prefix}__map_wm.nii.gz
mv t1_pve_1.nii.gz ${prefix}__map_gm.nii.gz
mv t1_pve_0.nii.gz ${prefix}__map_csf.nii.gz

scil_volume_stats_in_ROI.py ${prefix}__mask*.nii.gz \
--metrics $metrics \
--sort_keys \
$bin $normalize_weights > ${prefix}__${suffix}.json

"""
}
7 changes: 7 additions & 0 deletions modules/nf-core/multiqc/environment.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
# yaml-language-server: $schema=https://raw.githubusercontent.com/nf-core/modules/master/modules/environment-schema.json
channels:
- conda-forge
- bioconda
dependencies:
- bioconda::multiqc=1.27
63 changes: 63 additions & 0 deletions modules/nf-core/multiqc/main.nf
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
process MULTIQC {
tag "$meta.id"
label 'process_single'

conda "${moduleDir}/environment.yml"
container "${ 'multiqc/multiqc:v1.27.1' }"

input:
tuple val(meta), path(qc_images) // Added input with subject meta field.
path multiqc_files
path(multiqc_config)
path(extra_multiqc_config)
path(multiqc_logo)
path(replace_names)
path(sample_names)

output:
path "*multiqc_report.html", emit: report
path "*_data" , emit: data
path "*_plots" , optional:true, emit: plots
path "versions.yml" , emit: versions

when:
task.ext.when == null || task.ext.when

script:
def args = task.ext.args ?: ''
def prefix = "--filename ${meta.id}_multiqc_report"
def config = multiqc_config ? "--config $multiqc_config" : ''
def extra_config = extra_multiqc_config ? "--config $extra_multiqc_config" : ''
def logo = multiqc_logo ? "--cl-config 'custom_logo: \"${multiqc_logo}\"'" : ''
def replace = replace_names ? "--replace-names ${replace_names}" : ''
def samples = sample_names ? "--sample-names ${sample_names}" : ''
"""
multiqc \\
--force \\
$args \\
$config \\
$prefix \\
$extra_config \\
$logo \\
$replace \\
$samples \\
.

cat <<-END_VERSIONS > versions.yml
"${task.process}":
multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" )
END_VERSIONS
"""

stub:
"""
mkdir multiqc_data
mkdir multiqc_plots
touch multiqc_report.html

cat <<-END_VERSIONS > versions.yml
"${task.process}":
multiqc: \$( multiqc --version | sed -e "s/multiqc, version //g" )
END_VERSIONS
"""
}
Loading