@@ -87,7 +87,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
87
87
88
88
89
89
def analyze_openfmri_dataset (data_dir , subject = None , model_id = None ,
90
- task_id = None , output_dir = None ):
90
+ task_id = None , output_dir = None , subj_prefix = '*' ):
91
91
"""Analyzes an open fmri dataset
92
92
93
93
Parameters
@@ -121,22 +121,22 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
121
121
Set up openfmri data specific components
122
122
"""
123
123
124
- subjects = [path .split (os .path .sep )[- 1 ] for path in
125
- glob (os .path .join (data_dir , 'sub*' ))]
124
+ subjects = sorted ( [path .split (os .path .sep )[- 1 ] for path in
125
+ glob (os .path .join (data_dir , subj_prefix ))])
126
126
127
127
infosource = pe .Node (niu .IdentityInterface (fields = ['subject_id' ,
128
128
'model_id' ,
129
129
'task_id' ]),
130
130
name = 'infosource' )
131
- if subject is None :
132
- infosource .iterables = [('subject_id' , subjects [: 2 ] ),
131
+ if len ( subject ) == 0 :
132
+ infosource .iterables = [('subject_id' , subjects ),
133
133
('model_id' , [model_id ]),
134
- ('task_id' , [ task_id ] )]
134
+ ('task_id' , task_id )]
135
135
else :
136
136
infosource .iterables = [('subject_id' ,
137
- [subjects [subjects .index (subject )] ]),
137
+ [subjects [subjects .index (subj )] for subj in subject ]),
138
138
('model_id' , [model_id ]),
139
- ('task_id' , [ task_id ] )]
139
+ ('task_id' , task_id )]
140
140
141
141
subjinfo = pe .Node (niu .Function (input_names = ['subject_id' , 'base_dir' ,
142
142
'task_id' , 'model_id' ],
@@ -156,7 +156,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
156
156
name = 'datasource' )
157
157
datasource .inputs .base_directory = data_dir
158
158
datasource .inputs .template = '*'
159
- datasource .inputs .field_template = {'anat' : '%s/anatomy/highres001 .nii.gz' ,
159
+ datasource .inputs .field_template = {'anat' : '%s/anatomy/T1_001 .nii.gz' ,
160
160
'bold' : '%s/BOLD/task%03d_r*/bold.nii.gz' ,
161
161
'behav' : ('%s/model/model%03d/onsets/task%03d_'
162
162
'run%03d/cond*.txt' ),
@@ -200,13 +200,19 @@ def get_highpass(TR, hpcutoff):
200
200
def get_contrasts (contrast_file , task_id , conds ):
201
201
import numpy as np
202
202
contrast_def = np .genfromtxt (contrast_file , dtype = object )
203
+ if len (contrast_def .shape ) == 1 :
204
+ contrast_def = contrast_def [None , :]
203
205
contrasts = []
204
206
for row in contrast_def :
205
207
if row [0 ] != 'task%03d' % task_id :
206
208
continue
207
- con = [row [1 ], 'T' , ['cond%03d' % i for i in range (len (conds ))],
209
+ con = [row [1 ], 'T' , ['cond%03d' % ( i + 1 ) for i in range (len (conds ))],
208
210
row [2 :].astype (float ).tolist ()]
209
211
contrasts .append (con )
212
+ # add auto contrasts for each column
213
+ for i , cond in enumerate (conds ):
214
+ con = [cond , 'T' , ['cond%03d' % (i + 1 )], [1 ]]
215
+ contrasts .append (con )
210
216
return contrasts
211
217
212
218
contrastgen = pe .Node (niu .Function (input_names = ['contrast_file' ,
@@ -229,8 +235,19 @@ def get_contrasts(contrast_file, task_id, conds):
229
235
name = "modelspec" )
230
236
modelspec .inputs .input_units = 'secs'
231
237
238
+ def check_behav_list (behav ):
239
+ out_behav = []
240
+ if isinstance (behav , basestring ):
241
+ behav = [behav ]
242
+ for val in behav :
243
+ if not isinstance (val , list ):
244
+ out_behav .append ([val ])
245
+ else :
246
+ out_behav .append (val )
247
+ return out_behav
248
+
232
249
wf .connect (subjinfo , 'TR' , modelspec , 'time_repetition' )
233
- wf .connect (datasource , 'behav' , modelspec , 'event_files' )
250
+ wf .connect (datasource , ( 'behav' , check_behav_list ) , modelspec , 'event_files' )
234
251
wf .connect (subjinfo , 'TR' , modelfit , 'inputspec.interscan_interval' )
235
252
wf .connect (subjinfo , 'conds' , contrastgen , 'conds' )
236
253
wf .connect (datasource , 'contrasts' , contrastgen , 'contrast_file' )
@@ -288,33 +305,41 @@ def num_copes(files):
288
305
wf .connect (preproc , 'outputspec.mean' , registration , 'inputspec.mean_image' )
289
306
wf .connect (datasource , 'anat' , registration , 'inputspec.anatomical_image' )
290
307
registration .inputs .inputspec .target_image = fsl .Info .standard_image ('MNI152_T1_2mm.nii.gz' )
308
+ registration .inputs .inputspec .target_image_brain = fsl .Info .standard_image ('MNI152_T1_2mm_brain.nii.gz' )
309
+ registration .inputs .inputspec .config_file = 'T1_2_MNI152_2mm'
291
310
292
- def merge_files (copes , varcopes ):
311
+ def merge_files (copes , varcopes , zstats ):
293
312
out_files = []
294
313
splits = []
295
314
out_files .extend (copes )
296
315
splits .append (len (copes ))
297
316
out_files .extend (varcopes )
298
317
splits .append (len (varcopes ))
318
+ out_files .extend (zstats )
319
+ splits .append (len (zstats ))
299
320
return out_files , splits
300
321
301
- mergefunc = pe .Node (niu .Function (input_names = ['copes' , 'varcopes' ],
322
+ mergefunc = pe .Node (niu .Function (input_names = ['copes' , 'varcopes' ,
323
+ 'zstats' ],
302
324
output_names = ['out_files' , 'splits' ],
303
325
function = merge_files ),
304
326
name = 'merge_files' )
305
327
wf .connect ([(fixed_fx .get_node ('outputspec' ), mergefunc ,
306
328
[('copes' , 'copes' ),
307
329
('varcopes' , 'varcopes' ),
330
+ ('zstats' , 'zstats' ),
308
331
])])
309
332
wf .connect (mergefunc , 'out_files' , registration , 'inputspec.source_files' )
310
333
311
334
def split_files (in_files , splits ):
312
- copes = in_files [:splits [1 ]]
313
- varcopes = in_files [splits [1 ]:]
314
- return copes , varcopes
335
+ copes = in_files [:splits [0 ]]
336
+ varcopes = in_files [splits [0 ]:(splits [0 ] + splits [1 ])]
337
+ zstats = in_files [(splits [0 ] + splits [1 ]):]
338
+ return copes , varcopes , zstats
315
339
316
340
splitfunc = pe .Node (niu .Function (input_names = ['in_files' , 'splits' ],
317
- output_names = ['copes' , 'varcopes' ],
341
+ output_names = ['copes' , 'varcopes' ,
342
+ 'zstats' ],
318
343
function = split_files ),
319
344
name = 'split_files' )
320
345
wf .connect (mergefunc , 'splits' , splitfunc , 'splits' )
@@ -330,18 +355,23 @@ def get_subs(subject_id, conds, model_id, task_id):
330
355
subs = [('_subject_id_%s_' % subject_id , '' )]
331
356
subs .append (('_model_id_%d' % model_id , 'model%03d' % model_id ))
332
357
subs .append (('task_id_%d/' % task_id , '/task%03d_' % task_id ))
333
- subs .append (('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp ' ,
358
+ subs .append (('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp ' ,
334
359
'mean' ))
360
+ subs .append (('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt' ,
361
+ 'affine' ))
362
+
335
363
for i in range (len (conds )):
336
364
subs .append (('_flameo%d/cope1.' % i , 'cope%02d.' % (i + 1 )))
337
365
subs .append (('_flameo%d/varcope1.' % i , 'varcope%02d.' % (i + 1 )))
338
366
subs .append (('_flameo%d/zstat1.' % i , 'zstat%02d.' % (i + 1 )))
339
367
subs .append (('_flameo%d/tstat1.' % i , 'tstat%02d.' % (i + 1 )))
340
368
subs .append (('_flameo%d/res4d.' % i , 'res4d%02d.' % (i + 1 )))
341
- subs .append (('_warpall%d/cope1_warp_warp .' % i ,
369
+ subs .append (('_warpall%d/cope1_warp .' % i ,
342
370
'cope%02d.' % (i + 1 )))
343
- subs .append (('_warpall%d/varcope1_warp_warp .' % (len (conds ) + i ),
371
+ subs .append (('_warpall%d/varcope1_warp .' % (len (conds ) + i ),
344
372
'varcope%02d.' % (i + 1 )))
373
+ subs .append (('_warpall%d/zstat1_warp.' % (2 * len (conds ) + i ),
374
+ 'zstat%02d.' % (i + 1 )))
345
375
return subs
346
376
347
377
subsgen = pe .Node (niu .Function (input_names = ['subject_id' , 'conds' ,
@@ -368,8 +398,11 @@ def get_subs(subject_id, conds, model_id, task_id):
368
398
wf .connect ([(splitfunc , datasink ,
369
399
[('copes' , 'copes.mni' ),
370
400
('varcopes' , 'varcopes.mni' ),
401
+ ('zstats' , 'zstats.mni' ),
371
402
])])
372
403
wf .connect (registration , 'outputspec.transformed_mean' , datasink , 'mean.mni' )
404
+ wf .connect (registration , 'outputspec.func2anat_transform' , datasink , 'xfm.mean2anat' )
405
+ wf .connect (registration , 'outputspec.anat2target_transform' , datasink , 'xfm.anat2target' )
373
406
374
407
"""
375
408
Set processing parameters
@@ -392,12 +425,15 @@ def get_subs(subject_id, conds, model_id, task_id):
392
425
parser = argparse .ArgumentParser (prog = 'fmri_openfmri.py' ,
393
426
description = __doc__ )
394
427
parser .add_argument ('-d' , '--datasetdir' , required = True )
395
- parser .add_argument ('-s' , '--subject' , default = None ,
428
+ parser .add_argument ('-s' , '--subject' , default = [],
429
+ nargs = '+' , type = str ,
396
430
help = "Subject name (e.g. 'sub001')" )
397
431
parser .add_argument ('-m' , '--model' , default = 1 ,
398
432
help = "Model index" + defstr )
399
- parser .add_argument ('-t' , '--task' , default = 1 ,
400
- help = "Task index" + defstr )
433
+ parser .add_argument ('-x' , '--subjectprefix' , default = 'sub*' ,
434
+ help = "Subject prefix" + defstr )
435
+ parser .add_argument ('-t' , '--task' , default = 1 , #nargs='+',
436
+ type = int , help = "Task index" + defstr )
401
437
parser .add_argument ("-o" , "--output_dir" , dest = "outdir" ,
402
438
help = "Output directory base" )
403
439
parser .add_argument ("-w" , "--work_dir" , dest = "work_dir" ,
@@ -421,7 +457,8 @@ def get_subs(subject_id, conds, model_id, task_id):
421
457
wf = analyze_openfmri_dataset (data_dir = os .path .abspath (args .datasetdir ),
422
458
subject = args .subject ,
423
459
model_id = int (args .model ),
424
- task_id = int (args .task ),
460
+ task_id = [int (args .task )],
461
+ subj_prefix = args .subjectprefix ,
425
462
output_dir = outdir )
426
463
wf .base_dir = work_dir
427
464
if args .plugin_args :
0 commit comments