Skip to content

Commit b065c71

Browse files
committed
Merge branch 'master' into enh/AddInterfaceFSLWarpUtils
Fixed some typos. Conflicts: CHANGES
2 parents 46f38d9 + 48bf276 commit b065c71

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

58 files changed

+2278
-545
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ before_install:
1212
- sudo rm -rf /dev/shm
1313
- sudo ln -s /run/shm /dev/shm
1414
- bash <(wget -q -O- http://neuro.debian.net/_files/neurodebian-travis.sh)
15-
- travis_retry sudo apt-get install -qq --no-install-recommends fsl afni
15+
- travis_retry sudo apt-get install -qq --no-install-recommends fsl afni elastix
1616
- travis_retry sudo apt-get install -qq fsl-atlases
1717
- source /etc/fsl/fsl.sh
1818

CHANGES

Lines changed: 16 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,24 @@ Next Release
22
============
33

44
* ENH: Added new interfaces (fsl.utils.WarpUtils, ConvertWarp) to fnirtfileutils and convertwarp
5-
* API: Interfaces to external packages are no longer available in the top-level ``nipype`` namespace, and must be imported directly (e.g. ``from nipype.interfaces import fsl``).
5+
* API: Interfaces to external packages are no longer available in the top-level
6+
``nipype`` namespace, and must be imported directly (e.g.
7+
``from nipype.interfaces import fsl``).
8+
* ENH: New FSL interface: ProbTrackX2
9+
* ENH: New misc algorithm: NormalizeProbabilityMapSet
10+
* ENH: Support for elastix via a set of new interfaces: Registration, ApplyWarp,
11+
AnalyzeWarp, PointsWarp, and EditTransform
612
* ENH: New ANTs interface: ApplyTransformsToPoints
13+
* ENH: New metrics group in algorithms. Now Distance, Overlap, and FuzzyOverlap
14+
are found in nipype.algorithms.metrics instead of misc
15+
* ENH: New interface in algorithms.metrics: ErrorMap (a voxel-wise diff map).
716
* ENH: New FreeSurfer workflow: create_skullstripped_recon_flow()
817
* ENH: New data grabbing interface that works over SSH connections, SSHDataGrabber
18+
* ENH: New color mode for write_graph
19+
* ENH: You can now force MapNodes to be run serially
20+
* ENH: New ANTs interface: LaplacianThickness
921
* FIX: MRTrix tracking algorithms were ignoring mask parameters.
22+
* FIX: FNIRT registration pathway and associated OpenFMRI example script
1023

1124
Release 0.9.2 (January 31, 2014)
1225
============
@@ -27,7 +40,8 @@ Release 0.9.0 (December 20, 2013)
2740
* ENH: new tools for defining workflows: JoinNode, synchronize and itersource
2841
* ENH: W3C PROV support with optional RDF export built into Nipype
2942
* ENH: Added support for Simple Linux Utility Resource Management (SLURM)
30-
* ENH: AFNI interfaces refactor, prefix, suffix are replaced by "flexible_%s_templates"
43+
* ENH: AFNI interfaces refactor, prefix, suffix are replaced by
44+
"flexible_%s_templates"
3145
* ENH: New SPM interfaces:
3246
- spm.ResliceToReference,
3347
- spm.DicomImport

doc/users/install.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ Download
1010
--------
1111

1212
Release 0.9.2: [`zip <https://github.com/nipy/nipype/archive/0.9.2.zip>`__ `tar.gz
13-
<https://github.com/nipy/nipype/archive/0.9.1.tar.gz>`__]
13+
<https://github.com/nipy/nipype/archive/0.9.2.tar.gz>`__]
1414

1515
Development: [`zip <http://github.com/nipy/nipype/zipball/master>`__ `tar.gz
1616
<http://github.com/nipy/nipype/tarball/master>`__]

examples/fmri_openfmri.py

Lines changed: 52 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ def get_subjectinfo(subject_id, base_dir, task_id, model_id):
8787

8888

8989
def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
90-
task_id=None, output_dir=None):
90+
task_id=None, output_dir=None, subj_prefix='*'):
9191
"""Analyzes an open fmri dataset
9292
9393
Parameters
@@ -122,21 +122,21 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
122122
"""
123123

124124
subjects = sorted([path.split(os.path.sep)[-1] for path in
125-
glob(os.path.join(data_dir, 'sub*'))])
125+
glob(os.path.join(data_dir, subj_prefix))])
126126

127127
infosource = pe.Node(niu.IdentityInterface(fields=['subject_id',
128128
'model_id',
129129
'task_id']),
130130
name='infosource')
131-
if subject is None:
131+
if len(subject) == 0:
132132
infosource.iterables = [('subject_id', subjects),
133133
('model_id', [model_id]),
134-
('task_id', [task_id])]
134+
('task_id', task_id)]
135135
else:
136136
infosource.iterables = [('subject_id',
137-
[subjects[subjects.index(subject)]]),
137+
[subjects[subjects.index(subj)] for subj in subject]),
138138
('model_id', [model_id]),
139-
('task_id', [task_id])]
139+
('task_id', task_id)]
140140

141141
subjinfo = pe.Node(niu.Function(input_names=['subject_id', 'base_dir',
142142
'task_id', 'model_id'],
@@ -156,7 +156,7 @@ def analyze_openfmri_dataset(data_dir, subject=None, model_id=None,
156156
name='datasource')
157157
datasource.inputs.base_directory = data_dir
158158
datasource.inputs.template = '*'
159-
datasource.inputs.field_template = {'anat': '%s/anatomy/highres001.nii.gz',
159+
datasource.inputs.field_template = {'anat': '%s/anatomy/T1_001.nii.gz',
160160
'bold': '%s/BOLD/task%03d_r*/bold.nii.gz',
161161
'behav': ('%s/model/model%03d/onsets/task%03d_'
162162
'run%03d/cond*.txt'),
@@ -235,8 +235,19 @@ def get_contrasts(contrast_file, task_id, conds):
235235
name="modelspec")
236236
modelspec.inputs.input_units = 'secs'
237237

238+
def check_behav_list(behav):
239+
out_behav = []
240+
if isinstance(behav, basestring):
241+
behav = [behav]
242+
for val in behav:
243+
if not isinstance(val, list):
244+
out_behav.append([val])
245+
else:
246+
out_behav.append(val)
247+
return out_behav
248+
238249
wf.connect(subjinfo, 'TR', modelspec, 'time_repetition')
239-
wf.connect(datasource, 'behav', modelspec, 'event_files')
250+
wf.connect(datasource, ('behav', check_behav_list), modelspec, 'event_files')
240251
wf.connect(subjinfo, 'TR', modelfit, 'inputspec.interscan_interval')
241252
wf.connect(subjinfo, 'conds', contrastgen, 'conds')
242253
wf.connect(datasource, 'contrasts', contrastgen, 'contrast_file')
@@ -294,33 +305,41 @@ def num_copes(files):
294305
wf.connect(preproc, 'outputspec.mean', registration, 'inputspec.mean_image')
295306
wf.connect(datasource, 'anat', registration, 'inputspec.anatomical_image')
296307
registration.inputs.inputspec.target_image = fsl.Info.standard_image('MNI152_T1_2mm.nii.gz')
308+
registration.inputs.inputspec.target_image_brain = fsl.Info.standard_image('MNI152_T1_2mm_brain.nii.gz')
309+
registration.inputs.inputspec.config_file = 'T1_2_MNI152_2mm'
297310

298-
def merge_files(copes, varcopes):
311+
def merge_files(copes, varcopes, zstats):
299312
out_files = []
300313
splits = []
301314
out_files.extend(copes)
302315
splits.append(len(copes))
303316
out_files.extend(varcopes)
304317
splits.append(len(varcopes))
318+
out_files.extend(zstats)
319+
splits.append(len(zstats))
305320
return out_files, splits
306321

307-
mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes'],
322+
mergefunc = pe.Node(niu.Function(input_names=['copes', 'varcopes',
323+
'zstats'],
308324
output_names=['out_files', 'splits'],
309325
function=merge_files),
310326
name='merge_files')
311327
wf.connect([(fixed_fx.get_node('outputspec'), mergefunc,
312328
[('copes', 'copes'),
313329
('varcopes', 'varcopes'),
330+
('zstats', 'zstats'),
314331
])])
315332
wf.connect(mergefunc, 'out_files', registration, 'inputspec.source_files')
316333

317334
def split_files(in_files, splits):
318-
copes = in_files[:splits[1]]
319-
varcopes = in_files[splits[1]:]
320-
return copes, varcopes
335+
copes = in_files[:splits[0]]
336+
varcopes = in_files[splits[0]:(splits[0] + splits[1])]
337+
zstats = in_files[(splits[0] + splits[1]):]
338+
return copes, varcopes, zstats
321339

322340
splitfunc = pe.Node(niu.Function(input_names=['in_files', 'splits'],
323-
output_names=['copes', 'varcopes'],
341+
output_names=['copes', 'varcopes',
342+
'zstats'],
324343
function=split_files),
325344
name='split_files')
326345
wf.connect(mergefunc, 'splits', splitfunc, 'splits')
@@ -336,18 +355,23 @@ def get_subs(subject_id, conds, model_id, task_id):
336355
subs = [('_subject_id_%s_' % subject_id, '')]
337356
subs.append(('_model_id_%d' % model_id, 'model%03d' %model_id))
338357
subs.append(('task_id_%d/' % task_id, '/task%03d_' % task_id))
339-
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp_warp',
358+
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_warp',
340359
'mean'))
360+
subs.append(('bold_dtype_mcf_mask_smooth_mask_gms_tempfilt_mean_flirt',
361+
'affine'))
362+
341363
for i in range(len(conds)):
342364
subs.append(('_flameo%d/cope1.' % i, 'cope%02d.' % (i + 1)))
343365
subs.append(('_flameo%d/varcope1.' % i, 'varcope%02d.' % (i + 1)))
344366
subs.append(('_flameo%d/zstat1.' % i, 'zstat%02d.' % (i + 1)))
345367
subs.append(('_flameo%d/tstat1.' % i, 'tstat%02d.' % (i + 1)))
346368
subs.append(('_flameo%d/res4d.' % i, 'res4d%02d.' % (i + 1)))
347-
subs.append(('_warpall%d/cope1_warp_warp.' % i,
369+
subs.append(('_warpall%d/cope1_warp.' % i,
348370
'cope%02d.' % (i + 1)))
349-
subs.append(('_warpall%d/varcope1_warp_warp.' % (len(conds) + i),
371+
subs.append(('_warpall%d/varcope1_warp.' % (len(conds) + i),
350372
'varcope%02d.' % (i + 1)))
373+
subs.append(('_warpall%d/zstat1_warp.' % (2 * len(conds) + i),
374+
'zstat%02d.' % (i + 1)))
351375
return subs
352376

353377
subsgen = pe.Node(niu.Function(input_names=['subject_id', 'conds',
@@ -374,8 +398,11 @@ def get_subs(subject_id, conds, model_id, task_id):
374398
wf.connect([(splitfunc, datasink,
375399
[('copes', 'copes.mni'),
376400
('varcopes', 'varcopes.mni'),
401+
('zstats', 'zstats.mni'),
377402
])])
378403
wf.connect(registration, 'outputspec.transformed_mean', datasink, 'mean.mni')
404+
wf.connect(registration, 'outputspec.func2anat_transform', datasink, 'xfm.mean2anat')
405+
wf.connect(registration, 'outputspec.anat2target_transform', datasink, 'xfm.anat2target')
379406

380407
"""
381408
Set processing parameters
@@ -398,12 +425,15 @@ def get_subs(subject_id, conds, model_id, task_id):
398425
parser = argparse.ArgumentParser(prog='fmri_openfmri.py',
399426
description=__doc__)
400427
parser.add_argument('-d', '--datasetdir', required=True)
401-
parser.add_argument('-s', '--subject', default=None,
428+
parser.add_argument('-s', '--subject', default=[],
429+
nargs='+', type=str,
402430
help="Subject name (e.g. 'sub001')")
403431
parser.add_argument('-m', '--model', default=1,
404432
help="Model index" + defstr)
405-
parser.add_argument('-t', '--task', default=1,
406-
help="Task index" + defstr)
433+
parser.add_argument('-x', '--subjectprefix', default='sub*',
434+
help="Subject prefix" + defstr)
435+
parser.add_argument('-t', '--task', default=1, #nargs='+',
436+
type=int, help="Task index" + defstr)
407437
parser.add_argument("-o", "--output_dir", dest="outdir",
408438
help="Output directory base")
409439
parser.add_argument("-w", "--work_dir", dest="work_dir",
@@ -427,7 +457,8 @@ def get_subs(subject_id, conds, model_id, task_id):
427457
wf = analyze_openfmri_dataset(data_dir=os.path.abspath(args.datasetdir),
428458
subject=args.subject,
429459
model_id=int(args.model),
430-
task_id=int(args.task),
460+
task_id=[int(args.task)],
461+
subj_prefix=args.subjectprefix,
431462
output_dir=outdir)
432463
wf.base_dir = work_dir
433464
if args.plugin_args:

0 commit comments

Comments
 (0)