Skip to content

Commit 97f3050

Browse files
committed
Merge pull request #585 from satra/enh/openfmri
Enh/openfmri
2 parents a251366 + f78ce1b commit 97f3050

File tree

3 files changed

+263
-27
lines changed

3 files changed

+263
-27
lines changed

examples/fmri_openfmri.py

Lines changed: 95 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,9 @@
2222
import nipype.interfaces.io as nio
2323
import nipype.interfaces.utility as niu
2424
from nipype.workflows.fmri.fsl import (create_featreg_preproc,
25-
create_modelfit_workflow,
26-
create_fixed_effects_flow)
25+
create_modelfit_workflow,
26+
create_fixed_effects_flow,
27+
create_reg_workflow)
2728

2829
fsl.FSLCommand.set_default_output_type('NIFTI_GZ')
2930

@@ -82,7 +83,8 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
8283
return run_ids[task_id - 1], conds[task_id - 1], TR
8384

8485

85-
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=None):
86+
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
87+
task_id=None, work_dir=None):
8688
"""Analyzes an open fmri dataset
8789
8890
Parameters
@@ -102,6 +104,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=Non
102104
preproc = create_featreg_preproc(whichvol='first')
103105
modelfit = create_modelfit_workflow()
104106
fixed_fx = create_fixed_effects_flow()
107+
registration = create_reg_workflow()
105108

106109
"""
107110
Remove the plotting connection so that plot iterables don't propagate
@@ -119,15 +122,17 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=Non
119122
glob(os.path.join(data_dir, 'sub*'))]
120123

121124
infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
122-
'model_id']),
125+
'model_id',
126+
'task_id']),
123127
name='infosource')
124128
if subject is None:
125129
infosource.iterables = [('subject_id', subjects),
126130
('model_id', [model_id])]
127131
else:
128132
infosource.iterables = [('subject_id',
129133
[subjects[subjects.index(subject)]]),
130-
('model_id', [model_id])]
134+
('model_id', [model_id]),
135+
('task_id', [task_id])]
131136

132137
subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
133138
'task_id', 'model_id'],
@@ -141,20 +146,20 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=Non
141146
"""
142147

143148
datasource = pe.Node(nio.DataGrabber(infields=['subject_id', 'run_id',
144-
'model_id'],
149+
'task_id', 'model_id'],
145150
outfields=['anat', 'bold', 'behav']),
146151
name='datasource')
147152
datasource.inputs.base_directory = data_dir
148153
datasource.inputs.template = '*'
149154
datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz',
150-
'bold': '%s/BOLD/task001_r*/bold.nii.gz',
151-
'behav': ('%s/model/model%03d/onsets/task001_'
155+
'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
156+
'behav': ('%s/model/model%03d/onsets/task%03d_'
152157
'run%03d/cond*.txt')}
153158
datasource.inputs.template_args = {'anat': [['subject_id']],
154-
'bold': [['subject_id']],
159+
'bold': [['subject_id', 'task_id']],
155160
'behav': [['subject_id', 'model_id',
156-
'run_id']]}
157-
datasource.inputs.sorted = True
161+
'task_id', 'run_id']]}
162+
datasource.inputs.sort_filelist = True
158163

159164
"""
160165
Create meta workflow
@@ -163,8 +168,10 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None, work_dir=Non
163168
wf = pe.Workflow(name='openfmri')
164169
wf.connect(infosource, 'subject_id', subjinfo, 'subject_id')
165170
wf.connect(infosource, 'model_id', subjinfo, 'model_id')
171+
wf.connect(infosource, 'task_id', subjinfo, 'task_id')
166172
wf.connect(infosource, 'subject_id', datasource, 'subject_id')
167173
wf.connect(infosource, 'model_id', datasource, 'model_id')
174+
wf.connect(infosource, 'task_id', datasource, 'task_id')
168175
wf.connect(subjinfo, 'run_id', datasource, 'run_id')
169176
wf.connect([(datasource, preproc, [('bold', 'inputspec.func')]),
170177
])
@@ -182,21 +189,23 @@ def get_highpass(TR, hpcutoff):
182189
Setup a basic set of contrasts, a t-test per condition
183190
"""
184191

185-
def get_contrasts(base_dir, model_id, conds):
192+
def get_contrasts(base_dir, model_id, task_id, conds):
186193
import numpy as np
187194
import os
188195
contrast_file = os.path.join(base_dir, 'models', 'model%03d' % model_id,
189196
'task_contrasts.txt')
190197
contrast_def = np.genfromtxt(contrast_file, dtype=object)
191198
contrasts = []
192199
for row in contrast_def:
193-
con = [row[0], 'T', ['cond%03d' % i for i in range(len(conds))],
194-
row[1:].astype(float).tolist()]
200+
if row[0] != 'task%03d' % task_id:
201+
continue
202+
con = [row[1], 'T', ['cond%03d' % i for i in range(len(conds))],
203+
row[2:].astype(float).tolist()]
195204
contrasts.append(con)
196205
return contrasts
197206

198207
contrastgen = pe.Node(niu.Function(input_names=['base_dir', 'model_id',
199-
'conds'],
208+
'task_id', 'conds'],
200209
output_names=['contrasts'],
201210
function=get_contrasts),
202211
name='contrastgen')
@@ -221,6 +230,7 @@ def get_contrasts(base_dir, model_id, conds):
221230
wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
222231
wf.connect(subjinfo, 'conds', contrastgen, 'conds')
223232
wf.connect(infosource, 'model_id', contrastgen, 'model_id')
233+
wf.connect(infosource, 'task_id', contrastgen, 'task_id')
224234
wf.connect(contrastgen, 'contrasts', modelfit, 'inputspec.contrasts')
225235

226236
wf.connect([(preproc, art, [('outputspec.motion_parameters',
@@ -271,21 +281,67 @@ def num_copes(files):
271281
])
272282
])
273283

284+
wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image')
285+
wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
286+
registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
287+
288+
def merge_files(copes, varcopes):
289+
out_files = []
290+
splits = []
291+
out_files.extend(copes)
292+
splits.append(len(copes))
293+
out_files.extend(varcopes)
294+
splits.append(len(varcopes))
295+
return out_files, splits
296+
297+
mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes'],
298+
output_names=['out_files', 'splits'],
299+
function=merge_files),
300+
name='merge_files')
301+
wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
302+
[('copes', 'copes'),
303+
('varcopes', 'varcopes'),
304+
])])
305+
wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')
306+
307+
def split_files(in_files, splits):
308+
copes = in_files[:splits[1]]
309+
varcopes = in_files[splits[1]:]
310+
return copes, varcopes
311+
312+
splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
313+
output_names=['copes', 'varcopes'],
314+
function=split_files),
315+
name='split_files')
316+
wf.connect(mergefunc, 'splits', splitfunc, 'splits')
317+
wf.connect(registration, 'outputspec.transformed_files',
318+
splitfunc, 'in_files')
319+
320+
274321
"""
275322
Connect to a datasink
276323
"""
277324

278-
def get_subs(subject_id, conds):
279-
subs = [('_subject_id_%s/' % subject_id, '')]
325+
def get_subs(subject_id, conds, model_id, task_id):
326+
subs = [('_subject_id_%s_' % subject_id, '')]
327+
subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
328+
subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
329+
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp',
330+
'mean'))
280331
for i in range(len(conds)):
281332
subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
282333
subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
283334
subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
284335
subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
285336
subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
337+
subs.append(('_warpall%d/cope1_warp_warp.' % i,
338+
'cope%02d.' % (i + 1)))
339+
subs.append(('_warpall%d/varcope1_warp_warp.' % (len(conds) + i),
340+
'varcope%02d.' % (i + 1)))
286341
return subs
287342

288-
subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds'],
343+
subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds',
344+
'model_id', 'task_id'],
289345
output_names=['substitutions'],
290346
function=get_subs),
291347
name='subsgen')
@@ -294,7 +350,9 @@ def get_subs(subject_id, conds):
294350
name="datasink")
295351
wf.connect(infosource, 'subject_id', datasink, 'container')
296352
wf.connect(infosource, 'subject_id', subsgen, 'subject_id')
297-
wf.connect(subjinfo, 'conds', subsgen, 'conds')
353+
wf.connect(infosource, 'model_id', subsgen, 'model_id')
354+
wf.connect(infosource, 'task_id', subsgen, 'task_id')
355+
wf.connect(contrastgen, 'contrasts', subsgen, 'conds')
298356
wf.connect(subsgen, 'substitutions', datasink, 'substitutions')
299357
wf.connect([(fixed_fx.get_node('outputspec'), datasink,
300358
[('res4d', 'res4d'),
@@ -303,13 +361,17 @@ def get_subs(subject_id, conds):
303361
('zstats', 'zstats'),
304362
('tstats', 'tstats')])
305363
])
364+
wf.connect([(splitfunc, datasink,
365+
[('copes', 'copes.mni'),
366+
('varcopes', 'varcopes.mni'),
367+
])])
368+
wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')
306369

307370
"""
308371
Set processing parameters
309372
"""
310373

311374
hpcutoff = 120.
312-
subjinfo.inputs.task_id = 1
313375
preproc.inputs.inputspec.fwhm = 6.0
314376
gethighpass.inputs.hpcutoff = hpcutoff
315377
modelspec.inputs.high_pass_filter_cutoff = hpcutoff
@@ -324,16 +386,24 @@ def get_subs(subject_id, conds):
324386
wf.config['execution'] = dict(crashdump_dir=os.path.join(work_dir,
325387
'crashdumps'),
326388
stop_on_first_crash=True)
327-
wf.run('MultiProc', plugin_args={'n_procs': 2})
389+
#wf.run('MultiProc', plugin_args={'n_procs': 4})
390+
wf.run('Linear', plugin_args={'n_procs': 4})
391+
wf.export('openfmri.py')
392+
wf.write_graph(dotfilename='hgraph.dot', graph2use='hierarchical')
393+
wf.write_graph(dotfilename='egraph.dot', graph2use='exec')
394+
wf.write_graph(dotfilename='fgraph.dot', graph2use='flat')
395+
wf.write_graph(dotfilename='ograph.dot', graph2use='orig')
328396

329397
if __name__ == '__main__':
330398
import argparse
331399
parser = argparse.ArgumentParser(prog='fmri_openfmri.py',
332400
description=__doc__)
333-
parser.add_argument('--datasetdir', required=True)
334-
parser.add_argument('--subject', default=None)
335-
parser.add_argument('--model', default=1)
401+
parser.add_argument('-d', '--datasetdir', required=True)
402+
parser.add_argument('-s', '--subject', default=None)
403+
parser.add_argument('-m', '--model', default=1)
404+
parser.add_argument('-t', '--task', default=1)
336405
args = parser.parse_args()
337406
analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir),
338407
subject=args.subject,
339-
model_id=int(args.model))
408+
model_id=int(args.model),
409+
task_id=int(args.task))

nipype/workflows/fmri/fsl/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
11
from .preprocess import (create_susan_smooth, create_fsl_fs_preproc,
2-
create_parallelfeat_preproc, create_featreg_preproc)
2+
create_parallelfeat_preproc, create_featreg_preproc,
3+
create_reg_workflow)
34
from .estimate import create_modelfit_workflow, create_fixed_effects_flow

0 commit comments

Comments
 (0)