diff --git a/examples/README b/examples/README index 0f0159013e..3e482bb975 100644 --- a/examples/README +++ b/examples/README @@ -1,4 +1,4 @@ A dataset for use with these scripts can be downloaded from the nipype website. At the time of writing, it's at: -http://nipy.org/nipype/users/pipeline_tutorial.html \ No newline at end of file +http://nipype.readthedocs.io/en/0.12.0/users/pipeline_tutorial.html diff --git a/examples/rsfmri_vol_surface_preprocessing_nipy.py b/examples/rsfmri_vol_surface_preprocessing_nipy.py index b46ae28ab6..b745704023 100644 --- a/examples/rsfmri_vol_surface_preprocessing_nipy.py +++ b/examples/rsfmri_vol_surface_preprocessing_nipy.py @@ -52,7 +52,9 @@ from nipype.interfaces.base import CommandLine CommandLine.set_default_terminal_output('allatonce') +# https://github.com/moloney/dcmstack from dcmstack.extract import default_extractor +# pip install pydicom from dicom import read_file from nipype.interfaces import (fsl, Function, ants, freesurfer, nipy) @@ -64,6 +66,7 @@ from nipype.algorithms.rapidart import ArtifactDetect from nipype.algorithms.misc import TSNR +from nipype.algorithms.compcor import ACompCor from nipype.interfaces.utility import Rename, Merge, IdentityInterface from nipype.utils.filemanip import filename_to_list from nipype.interfaces.io import DataSink, FreeSurferSource @@ -228,51 +231,6 @@ def build_filter1(motion_params, comp_norm, outliers, detrend_poly=None): out_files.append(filename) return out_files - -def extract_noise_components(realigned_file, mask_file, num_components=5, - extra_regressors=None): - """Derive components most reflective of physiological noise - - Parameters - ---------- - realigned_file: a 4D Nifti file containing realigned volumes - mask_file: a 3D Nifti file containing white matter + ventricular masks - num_components: number of components to use for noise decomposition - extra_regressors: additional regressors to add - - Returns - ------- - components_file: a text file containing the noise components - """ - imgseries = nb.load(realigned_file) - components = None - for filename in filename_to_list(mask_file): - mask = nb.load(filename).get_data() - if len(np.nonzero(mask > 0)[0]) == 0: - continue - voxel_timecourses = imgseries.get_data()[mask > 0] - voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 - # remove mean and normalize by variance - # voxel_timecourses.shape == [nvoxels, time] - X = voxel_timecourses.T - stdX = np.std(X, axis=0) - stdX[stdX == 0] = 1. - stdX[np.isnan(stdX)] = 1. - stdX[np.isinf(stdX)] = 1. - X = (X - np.mean(X, axis=0)) / stdX - u, _, _ = sp.linalg.svd(X, full_matrices=False) - if components is None: - components = u[:, :num_components] - else: - components = np.hstack((components, u[:, :num_components])) - if extra_regressors: - regressors = np.genfromtxt(extra_regressors) - components = np.hstack((components, regressors)) - components_file = os.path.join(os.getcwd(), 'noise_components.txt') - np.savetxt(components_file, components, fmt=b"%.10f") - return components_file - - def rename(in_files, suffix=None): from nipype.utils.filemanip import (filename_to_list, split_filename, list_to_filename) @@ -592,7 +550,7 @@ def create_workflow(files, realign.inputs.slice_info = 2 realign.plugin_args = {'sbatch_args': '-c%d' % 4} - # Comute TSNR on realigned data regressing polynomials upto order 2 + # Compute TSNR on realigned data regressing polynomials up to order 2 tsnr = MapNode(TSNR(regress_poly=2), iterfield=['in_file'], name='tsnr') wf.connect(realign, "out_file", tsnr, "in_file") @@ -694,14 +652,10 @@ def merge_files(in1, in2): filter1, 'out_res_name') wf.connect(createfilter1, 'out_files', filter1, 'design') - createfilter2 = MapNode(Function(input_names=['realigned_file', 'mask_file', - 'num_components', - 'extra_regressors'], - output_names=['out_files'], - function=extract_noise_components, - imports=imports), + createfilter2 = MapNode(ACompCor(), iterfield=['realigned_file', 'extra_regressors'], name='makecompcorrfilter') + createfilter2.inputs.components_file = 'noise_components.txt' createfilter2.inputs.num_components = num_components wf.connect(createfilter1, 'out_files', createfilter2, 'extra_regressors') @@ -717,7 +671,7 @@ def merge_files(in1, in2): wf.connect(filter1, 'out_res', filter2, 'in_file') wf.connect(filter1, ('out_res', rename, '_cleaned'), filter2, 'out_res_name') - wf.connect(createfilter2, 'out_files', filter2, 'design') + wf.connect(createfilter2, 'components_file', filter2, 'design') wf.connect(mask, 'mask_file', filter2, 'mask') bandpass = Node(Function(input_names=['files', 'lowpass_freq', @@ -923,7 +877,7 @@ def get_names(files, suffix): wf.connect(smooth, 'out_file', datasink, 'resting.timeseries.@smoothed') wf.connect(createfilter1, 'out_files', datasink, 'resting.regress.@regressors') - wf.connect(createfilter2, 'out_files', + wf.connect(createfilter2, 'components_file', datasink, 'resting.regress.@compcorr') wf.connect(maskts, 'out_file', datasink, 'resting.timeseries.target') wf.connect(sampleaparc, 'summary_file', diff --git a/nipype/algorithms/compcor.py b/nipype/algorithms/compcor.py new file mode 100644 index 0000000000..a2f2f434b3 --- /dev/null +++ b/nipype/algorithms/compcor.py @@ -0,0 +1,170 @@ +# -*- coding: utf-8 -*- +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +''' +Miscellaneous algorithms + + Change directory to provide relative paths for doctests + >>> import os + >>> filepath = os.path.dirname(os.path.realpath(__file__)) + >>> datadir = os.path.realpath(os.path.join(filepath, '../testing/data')) + >>> os.chdir(datadir) + +''' +from ..interfaces.base import (BaseInterfaceInputSpec, TraitedSpec, + BaseInterface, traits, File) +from ..pipeline import engine as pe +from ..interfaces.utility import IdentityInterface +from .misc import regress_poly + +import nibabel as nb +import numpy as np +from scipy import linalg, stats +import os + +class CompCorInputSpec(BaseInterfaceInputSpec): + realigned_file = File(exists=True, mandatory=True, + desc='already realigned brain image (4D)') + mask_file = File(exists=True, mandatory=False, + desc='mask file that determines ROI (3D)') + components_file = File('components_file.txt', exists=False, + mandatory=False, usedefault=True, + desc='filename to store physiological components') + num_components = traits.Int(6, usedefault=True) # 6 for BOLD, 4 for ASL + use_regress_poly = traits.Bool(True, usedefault=True, + desc='use polynomial regression' + 'pre-component extraction') + regress_poly_degree = traits.Range(low=1, default=1, usedefault=True, + desc='the degree polynomial to use') + +class CompCorOutputSpec(TraitedSpec): + components_file = File(exists=True, + desc='text file containing the noise components') + +class CompCor(BaseInterface): + ''' + Interface with core CompCor computation, used in aCompCor and tCompCor + + Example + ------- + + >>> ccinterface = CompCor() + >>> ccinterface.inputs.realigned_file = 'functional.nii' + >>> ccinterface.inputs.mask_file = 'mask.nii' + >>> ccinterface.inputs.num_components = 1 + >>> ccinterface.inputs.use_regress_poly = True + >>> ccinterface.inputs.regress_poly_degree = 2 + ''' + input_spec = CompCorInputSpec + output_spec = CompCorOutputSpec + + def _run_interface(self, runtime): + imgseries = nb.load(self.inputs.realigned_file).get_data() + mask = nb.load(self.inputs.mask_file).get_data() + voxel_timecourses = imgseries[mask > 0] + # Zero-out any bad values + voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 + + # from paper: + # "The constant and linear trends of the columns in the matrix M were + # removed [prior to ...]" + if self.inputs.use_regress_poly: + voxel_timecourses = regress_poly(self.inputs.regress_poly_degree, + voxel_timecourses) + voxel_timecourses = voxel_timecourses - np.mean(voxel_timecourses, + axis=1)[:, np.newaxis] + + # "Voxel time series from the noise ROI (either anatomical or tSTD) were + # placed in a matrix M of size Nxm, with time along the row dimension + # and voxels along the column dimension." + M = voxel_timecourses.T + numvols = M.shape[0] + numvoxels = M.shape[1] + + # "[... were removed] prior to column-wise variance normalization." + M = M / self._compute_tSTD(M, 1.) + + # "The covariance matrix C = MMT was constructed and decomposed into its + # principal components using a singular value decomposition." + u, _, _ = linalg.svd(M, full_matrices=False) + components = u[:, :self.inputs.num_components] + components_file = os.path.join(os.getcwd(), self.inputs.components_file) + np.savetxt(components_file, components, fmt="%.10f") + return runtime + + def _list_outputs(self): + outputs = self._outputs().get() + outputs['components_file'] = os.path.abspath(self.inputs.components_file) + return outputs + + def _compute_tSTD(self, M, x): + stdM = np.std(M, axis=0) + # set bad values to x + stdM[stdM == 0] = x + stdM[np.isnan(stdM)] = x + return stdM + +class TCompCorInputSpec(CompCorInputSpec): + # and all the fields in CompCorInputSpec + percentile_threshold = traits.Range(low=0., high=1., value=.02, + exclude_low=True, exclude_high=True, + usedefault=True, desc='the percentile ' + 'used to select highest-variance ' + 'voxels, represented by a number ' + 'between 0 and 1, exclusive. By ' + 'default, this value is set to .02. ' + 'That is, the 2% of voxels ' + 'with the highest variance are used.') + +class TCompCor(CompCor): + ''' + Interface for tCompCor. Computes a ROI mask based on variance of voxels. + + Example + ------- + + >>> ccinterface = TCompCor() + >>> ccinterface.inputs.realigned_file = 'functional.nii' + >>> ccinterface.inputs.mask_file = 'mask.nii' + >>> ccinterface.inputs.num_components = 1 + >>> ccinterface.inputs.use_regress_poly = True + >>> ccinterface.inputs.regress_poly_degree = 2 + >>> ccinterface.inputs.percentile_threshold = .03 + ''' + + input_spec = TCompCorInputSpec + output_spec = CompCorOutputSpec + + def _run_interface(self, runtime): + imgseries = nb.load(self.inputs.realigned_file).get_data() + + # From the paper: + # "For each voxel time series, the temporal standard deviation is + # defined as the standard deviation of the time series after the removal + # of low-frequency nuisance terms (e.g., linear and quadratic drift)." + imgseries = regress_poly(2, imgseries) + imgseries = imgseries - np.mean(imgseries, axis=1)[:, np.newaxis] + + time_voxels = imgseries.T + num_voxels = np.prod(time_voxels.shape[1:]) + + # "To construct the tSTD noise ROI, we sorted the voxels by their + # temporal standard deviation ..." + tSTD = self._compute_tSTD(time_voxels, 0) + sortSTD = np.sort(tSTD, axis=None) # flattened sorted matrix + + # use percentile_threshold to pick voxels + threshold_index = int(num_voxels * (1. - self.inputs.percentile_threshold)) + threshold_std = sortSTD[threshold_index] + mask = tSTD >= threshold_std + mask = mask.astype(int) + + # save mask + mask_file = 'mask.nii' + nb.nifti1.save(nb.Nifti1Image(mask, np.eye(4)), mask_file) + self.inputs.mask_file = mask_file + + super(TCompCor, self)._run_interface(runtime) + return runtime + +ACompCor = CompCor diff --git a/nipype/algorithms/misc.py b/nipype/algorithms/misc.py index 02be67f926..23fb1337e1 100644 --- a/nipype/algorithms/misc.py +++ b/nipype/algorithms/misc.py @@ -35,6 +35,7 @@ BaseInterfaceInputSpec, isdefined, DynamicTraitedSpec, Undefined) from ..utils.filemanip import fname_presuffix, split_filename + iflogger = logging.getLogger('interface') @@ -257,7 +258,6 @@ def _list_outputs(self): outputs['nifti_file'] = self._gen_output_file_name() return outputs - class TSNRInputSpec(BaseInterfaceInputSpec): in_file = InputMultiPath(File(exists=True), mandatory=True, desc='realigned 4D file or a list of 3D files') @@ -308,17 +308,7 @@ def _run_interface(self, runtime): data = data.astype(np.float32) if isdefined(self.inputs.regress_poly): - timepoints = img.shape[-1] - X = np.ones((timepoints, 1)) - for i in range(self.inputs.regress_poly): - X = np.hstack((X, legendre( - i + 1)(np.linspace(-1, 1, timepoints))[:, None])) - betas = np.dot(np.linalg.pinv(X), np.rollaxis(data, 3, 2)) - datahat = np.rollaxis(np.dot(X[:, 1:], - np.rollaxis( - betas[1:, :, :, :], 0, 3)), - 0, 4) - data = data - datahat + data = regress_poly(self.inputs.regress_poly, data) img = nb.Nifti1Image(data, img.get_affine(), header) nb.save(img, op.abspath(self.inputs.detrended_file)) @@ -343,6 +333,32 @@ def _list_outputs(self): outputs['detrended_file'] = op.abspath(self.inputs.detrended_file) return outputs +def regress_poly(degree, data): + ''' returns data with degree polynomial regressed out. + The last dimension (i.e. data.shape[-1]) should be time. + ''' + datashape = data.shape + timepoints = datashape[-1] + + # Rearrange all voxel-wise time-series in rows + data = data.reshape((-1, timepoints)) + + # Generate design matrix + X = np.ones((timepoints, 1)) + for i in range(degree): + polynomial_func = legendre(i+1) + value_array = np.linspace(-1, 1, timepoints) + X = np.hstack((X, polynomial_func(value_array)[:, np.newaxis])) + + # Calculate coefficients + betas = np.linalg.pinv(X).dot(data.T) + + # Estimation + datahat = X[:, 1:].dot(betas[1:, ...]).T + regressed_data = data - datahat + + # Back to original shape + return regressed_data.reshape(datashape) class GunzipInputSpec(BaseInterfaceInputSpec): in_file = File(exists=True, mandatory=True) diff --git a/nipype/algorithms/tests/test_compcor.py b/nipype/algorithms/tests/test_compcor.py new file mode 100644 index 0000000000..256c258a17 --- /dev/null +++ b/nipype/algorithms/tests/test_compcor.py @@ -0,0 +1,106 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: +import nipype +from ...testing import assert_equal, assert_true, assert_false, skipif, utils +from ..compcor import CompCor, TCompCor, ACompCor + +import unittest +import nibabel as nb +import numpy as np +import os +import tempfile +import shutil + +class TestCompCor(unittest.TestCase): + ''' Note: Tests currently do a poor job of testing functionality ''' + + filenames = {'functionalnii': 'compcorfunc.nii', + 'masknii': 'compcormask.nii', + 'components_file': None} + + def setUp(self): + # setup + self.orig_dir = os.getcwd() + self.temp_dir = tempfile.mkdtemp() + os.chdir(self.temp_dir) + noise = np.fromfunction(self.fake_noise_fun, self.fake_data.shape) + self.realigned_file = utils.save_toy_nii(self.fake_data + noise, + self.filenames['functionalnii']) + mask = np.ones(self.fake_data.shape[:3]) + mask[0,0,0] = 0 + mask[0,0,1] = 0 + self.mask_file = utils.save_toy_nii(mask, self.filenames['masknii']) + + def test_compcor(self): + expected_components = [['-0.1989607212', '-0.5753813646'], + ['0.5692369697', '0.5674945949'], + ['-0.6662573243', '0.4675843432'], + ['0.4206466244', '-0.3361270124'], + ['-0.1246655485', '-0.1235705610']] + + ccresult = self.run_cc(CompCor(realigned_file=self.realigned_file, + mask_file=self.mask_file), + expected_components) + + accresult = self.run_cc(ACompCor(realigned_file=self.realigned_file, + mask_file=self.mask_file, + components_file='acc_components_file'), + expected_components) + + assert_equal(os.path.getsize(ccresult.outputs.components_file), + os.path.getsize(accresult.outputs.components_file)) + + def test_tcompcor(self): + ccinterface = TCompCor(realigned_file=self.realigned_file, percentile_threshold=0.75) + self.run_cc(ccinterface, [['-0.1535587949', '-0.4318584065'], + ['0.4286265207', '0.7162580102'], + ['-0.6288093202', '0.0465452630'], + ['0.5859742580', '-0.5144309306'], + ['-0.2322326636', '0.1834860639']]) + + def test_tcompcor_with_default_percentile(self): + ccinterface = TCompCor(realigned_file=self.realigned_file) + ccinterface.run() + + mask = nb.load('mask.nii').get_data() + num_nonmasked_voxels = np.count_nonzero(mask) + assert_equal(num_nonmasked_voxels, 1) + + def run_cc(self, ccinterface, expected_components): + # run + ccresult = ccinterface.run() + + # assert + expected_file = ccinterface._list_outputs()['components_file'] + assert_equal(ccresult.outputs.components_file, expected_file) + assert_true(os.path.exists(expected_file)) + assert_true(os.path.getsize(expected_file) > 0) + assert_equal(ccinterface.inputs.num_components, 6) + + with open(ccresult.outputs.components_file, 'r') as components_file: + components_data = [line.split() for line in components_file] + num_got_components = len(components_data) + assert_true(num_got_components == ccinterface.inputs.num_components + or num_got_components == self.fake_data.shape[3]) + first_two = [row[:2] for row in components_data] + assert_equal(first_two, expected_components) + return ccresult + + def tearDown(self): + os.chdir(self.orig_dir) + shutil.rmtree(self.temp_dir) + + def fake_noise_fun(self, i, j, l, m): + return m*i + l - j + + fake_data = np.array([[[[8, 5, 3, 8, 0], + [6, 7, 4, 7, 1]], + + [[7, 9, 1, 6, 5], + [0, 7, 4, 7, 7]]], + + [[[2, 4, 5, 7, 0], + [1, 7, 0, 5, 4]], + + [[7, 3, 9, 0, 4], + [9, 4, 1, 5, 0]]]]) diff --git a/nipype/algorithms/tests/test_tsnr.py b/nipype/algorithms/tests/test_tsnr.py new file mode 100644 index 0000000000..3833d1f27f --- /dev/null +++ b/nipype/algorithms/tests/test_tsnr.py @@ -0,0 +1,125 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: + +from ...testing import (assert_equal, assert_true, assert_almost_equal, + skipif, utils) +from ..misc import TSNR + +import unittest +import mock +import nibabel as nb +import numpy as np +import os +import tempfile +import shutil + +class TestTSNR(unittest.TestCase): + ''' Note: Tests currently do a poor job of testing functionality ''' + + in_filenames = { + 'in_file': 'tsnrinfile.nii', + } + + out_filenames = {# default output file names + 'detrended_file': 'detrend.nii.gz', + 'mean_file': 'mean.nii.gz', + 'stddev_file': 'stdev.nii.gz', + 'tsnr_file': 'tsnr.nii.gz' + } + + def setUp(self): + # setup temp folder + self.orig_dir = os.getcwd() + self.temp_dir = tempfile.mkdtemp() + os.chdir(self.temp_dir) + + utils.save_toy_nii(self.fake_data, self.in_filenames['in_file']) + + def test_tsnr(self): + # run + tsnrresult = TSNR(in_file=self.in_filenames['in_file']).run() + + # assert + self.assert_expected_outputs(tsnrresult, { + 'mean_file': (2.8, 7.4), + 'stddev_file': (0.8, 2.9), + 'tsnr_file': (1.3, 9.25) + }) + + def test_tsnr_withpoly1(self): + # run + tsnrresult = TSNR(in_file=self.in_filenames['in_file'], + regress_poly=1).run() + + # assert + self.assert_expected_outputs_poly(tsnrresult, { + 'detrended_file': (-0.1, 8.7), + 'mean_file': (2.8, 7.4), + 'stddev_file': (0.75, 2.75), + 'tsnr_file': (1.4, 9.9) + }) + + def test_tsnr_withpoly2(self): + # run + tsnrresult = TSNR(in_file=self.in_filenames['in_file'], + regress_poly=2).run() + + # assert + self.assert_expected_outputs_poly(tsnrresult, { + 'detrended_file': (-0.22, 8.55), + 'mean_file': (2.8, 7.7), + 'stddev_file': (0.21, 2.4), + 'tsnr_file': (1.7, 35.9) + }) + + def test_tsnr_withpoly3(self): + # run + tsnrresult = TSNR(in_file=self.in_filenames['in_file'], + regress_poly=3).run() + + # assert + self.assert_expected_outputs_poly(tsnrresult, { + 'detrended_file': (1.8, 7.95), + 'mean_file': (2.8, 7.7), + 'stddev_file': (0.1, 1.7), + 'tsnr_file': (2.6, 57.3) + }) + + def assert_expected_outputs_poly(self, tsnrresult, expected_ranges): + assert_equal(os.path.basename(tsnrresult.outputs.detrended_file), + self.out_filenames['detrended_file']) + self.assert_expected_outputs(tsnrresult, expected_ranges) + + def assert_expected_outputs(self, tsnrresult, expected_ranges): + self.assert_default_outputs(tsnrresult.outputs) + self.assert_unchanged(expected_ranges) + + def assert_default_outputs(self, outputs): + assert_equal(os.path.basename(outputs.mean_file), + self.out_filenames['mean_file']) + assert_equal(os.path.basename(outputs.stddev_file), + self.out_filenames['stddev_file']) + assert_equal(os.path.basename(outputs.tsnr_file), + self.out_filenames['tsnr_file']) + + def assert_unchanged(self, expected_ranges): + for key, (min_, max_) in expected_ranges.items(): + data = np.asarray(nb.load(self.out_filenames[key])._data) + assert_almost_equal(np.amin(data), min_, decimal=1) + assert_almost_equal(np.amax(data), max_, decimal=1) + + def tearDown(self): + os.chdir(self.orig_dir) + shutil.rmtree(self.temp_dir) + + fake_data = np.array([[[[2, 4, 3, 9, 1], + [3, 6, 4, 7, 4]], + + [[8, 3, 4, 6, 2], + [4, 0, 4, 4, 2]]], + + [[[9, 7, 5, 5, 7], + [7, 8, 4, 8, 4]], + + [[0, 4, 7, 1, 7], + [6, 8, 8, 8, 7]]]]) diff --git a/nipype/testing/utils.py b/nipype/testing/utils.py index 7cc3311dad..092f53ea8e 100644 --- a/nipype/testing/utils.py +++ b/nipype/testing/utils.py @@ -19,6 +19,9 @@ __docformat__ = 'restructuredtext' +import numpy as np +import nibabel as nb + def skip_if_no_package(*args, **kwargs): """Raise SkipTest if package_check fails @@ -102,3 +105,8 @@ def __exit__(self, exc_type, exc_val, exc_tb): assert not os.path.exists(self.canary) self.dev_null.close() shutil.rmtree(self.tmpdir) + +def save_toy_nii(ndarray, filename): + toy = nb.Nifti1Image(ndarray, np.eye(4)) + nb.nifti1.save(toy, filename) + return filename diff --git a/nipype/workflows/rsfmri/fsl/resting.py b/nipype/workflows/rsfmri/fsl/resting.py index 4c879a1fbb..a2778fe2d9 100644 --- a/nipype/workflows/rsfmri/fsl/resting.py +++ b/nipype/workflows/rsfmri/fsl/resting.py @@ -8,37 +8,7 @@ from ....interfaces import utility as util # utility from ....pipeline import engine as pe # pypeline engine from ....algorithms.misc import TSNR - - -def extract_noise_components(realigned_file, noise_mask_file, num_components): - """Derive components most reflective of physiological noise - """ - import os - from nibabel import load - import numpy as np - import scipy as sp - imgseries = load(realigned_file) - components = None - mask = load(noise_mask_file).get_data() - voxel_timecourses = imgseries.get_data()[mask > 0] - voxel_timecourses[np.isnan(np.sum(voxel_timecourses, axis=1)), :] = 0 - # remove mean and normalize by variance - # voxel_timecourses.shape == [nvoxels, time] - X = voxel_timecourses.T - stdX = np.std(X, axis=0) - stdX[stdX == 0] = 1. - stdX[np.isnan(stdX)] = 1. - stdX[np.isinf(stdX)] = 1. - X = (X - np.mean(X, axis=0)) / stdX - u, _, _ = sp.linalg.svd(X, full_matrices=False) - if components is None: - components = u[:, :num_components] - else: - components = np.hstack((components, u[:, :num_components])) - components_file = os.path.join(os.getcwd(), 'noise_components.txt') - np.savetxt(components_file, components, fmt=b"%.10f") - return components_file - +from ....algorithms import compcor as cc def select_volume(filename, which): """Return the middle index of a file @@ -96,7 +66,7 @@ def create_realign_flow(name='realign'): return realignflow -def create_resting_preproc(name='restpreproc'): +def create_resting_preproc(name='restpreproc', base_dir=None): """Create a "resting" time series preprocessing workflow The noise removal is based on Behzadi et al. (2007) @@ -128,7 +98,7 @@ def create_resting_preproc(name='restpreproc'): """ - restpreproc = pe.Workflow(name=name) + restpreproc = pe.Workflow(name=name, base_dir=base_dir) # Define nodes inputnode = pe.Node(interface=util.IdentityInterface(fields=['func', @@ -148,12 +118,9 @@ def create_resting_preproc(name='restpreproc'): getthresh = pe.Node(interface=fsl.ImageStats(op_string='-p 98'), name='getthreshold') threshold_stddev = pe.Node(fsl.Threshold(), name='threshold') - compcor = pe.Node(util.Function(input_names=['realigned_file', - 'noise_mask_file', - 'num_components'], - output_names=['noise_components'], - function=extract_noise_components), - name='compcorr') + compcor = pe.Node(cc.ACompCor(components_file="noise_components.txt", + use_regress_poly=False), + name='compcor') remove_noise = pe.Node(fsl.FilterRegressor(filter_all=True), name='remove_noise') bandpass_filter = pe.Node(fsl.TemporalFilter(), @@ -170,12 +137,12 @@ def create_resting_preproc(name='restpreproc'): restpreproc.connect(realigner, 'outputspec.realigned_file', compcor, 'realigned_file') restpreproc.connect(threshold_stddev, 'out_file', - compcor, 'noise_mask_file') + compcor, 'mask_file') restpreproc.connect(inputnode, 'num_noise_components', compcor, 'num_components') restpreproc.connect(tsnr, 'detrended_file', remove_noise, 'in_file') - restpreproc.connect(compcor, 'noise_components', + restpreproc.connect(compcor, 'components_file', remove_noise, 'design_file') restpreproc.connect(inputnode, 'highpass_sigma', bandpass_filter, 'highpass_sigma') diff --git a/nipype/workflows/rsfmri/fsl/tests/__init__.py b/nipype/workflows/rsfmri/fsl/tests/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/nipype/workflows/rsfmri/fsl/tests/test_resting.py b/nipype/workflows/rsfmri/fsl/tests/test_resting.py new file mode 100644 index 0000000000..6590283748 --- /dev/null +++ b/nipype/workflows/rsfmri/fsl/tests/test_resting.py @@ -0,0 +1,115 @@ +# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- +# vi: set ft=python sts=4 ts=4 sw=4 et: + +from .....testing import (assert_equal, assert_true, assert_almost_equal, + skipif, utils) +from .....interfaces import fsl, IdentityInterface, utility +from .....pipeline.engine import Node, Workflow + +from ..resting import create_resting_preproc + +import unittest +import mock +from mock import MagicMock +import nibabel as nb +import numpy as np +import os +import tempfile +import shutil + +all_fields = ['func', 'in_file', 'slice_time_corrected_file', 'stddev_file', + 'out_stat', 'thresh', 'num_noise_components', 'detrended_file', + 'design_file', 'highpass_sigma', 'lowpass_sigma', 'out_file', + 'noise_mask_file', 'filtered_file'] + +def stub_node_factory(*args, **kwargs): + if 'name' not in kwargs.keys(): + raise Exception() + name = kwargs['name'] + if name == 'compcor': + return Node(*args, **kwargs) + else: # replace with an IdentityInterface + return Node(IdentityInterface(fields=all_fields), + name=name) + +def stub_wf(*args, **kwargs): + wf = Workflow(name='realigner') + inputnode = Node(IdentityInterface(fields=['func']), name='inputspec') + outputnode = Node(interface=IdentityInterface(fields=['realigned_file']), + name='outputspec') + wf.connect(inputnode, 'func', outputnode, 'realigned_file') + return wf + +class TestResting(unittest.TestCase): + + in_filenames = { + 'realigned_file': 'rsfmrifunc.nii', + 'mask_file': 'rsfmrimask.nii' + } + + out_filenames = { + 'components_file': 'restpreproc/compcor/noise_components.txt' + } + + num_noise_components = 6 + + def setUp(self): + # setup temp folder + self.orig_dir = os.getcwd() + self.temp_dir = tempfile.mkdtemp() + os.chdir(self.temp_dir) + self.in_filenames = {key: os.path.abspath(value) + for key, value in self.in_filenames.items()} + + # create&save input files + utils.save_toy_nii(self.fake_data, self.in_filenames['realigned_file']) + mask = np.zeros(self.fake_data.shape[:3]) + for i in range(mask.shape[0]): + for j in range(mask.shape[1]): + if i==j: + mask[i,j] = 1 + utils.save_toy_nii(mask, self.in_filenames['mask_file']) + + @mock.patch('nipype.workflows.rsfmri.fsl.resting.create_realign_flow', + side_effect=stub_wf) + @mock.patch('nipype.pipeline.engine.Node', side_effect=stub_node_factory) + def test_create_resting_preproc(self, mock_Node, mock_realign_wf): + wf = create_resting_preproc(base_dir=os.getcwd()) + + wf.inputs.inputspec.num_noise_components = self.num_noise_components + mask_in = wf.get_node('threshold').inputs + mask_in.out_file = self.in_filenames['mask_file'] + func_in = wf.get_node('slicetimer').inputs + func_in.slice_time_corrected_file = self.in_filenames['realigned_file'] + + wf.run() + + # assert + expected_file = os.path.abspath(self.out_filenames['components_file']) + with open(expected_file, 'r') as components_file: + components_data = [line.split() for line in components_file] + num_got_components = len(components_data) + assert_true(num_got_components == self.num_noise_components + or num_got_components == self.fake_data.shape[3]) + first_two = [row[:2] for row in components_data] + assert_equal(first_two, [['-0.5172356654', '-0.6973053243'], + ['0.2574722644', '0.1645270737'], + ['-0.0806469590', '0.5156853779'], + ['0.7187176051', '-0.3235820287'], + ['-0.3783072450', '0.3406749013']]) + + def tearDown(self): + os.chdir(self.orig_dir) + shutil.rmtree(self.temp_dir) + + fake_data = np.array([[[[2, 4, 3, 9, 1], + [3, 6, 4, 7, 4]], + + [[8, 3, 4, 6, 2], + [4, 0, 4, 4, 2]]], + + [[[9, 7, 5, 5, 7], + [7, 8, 4, 8, 4]], + + [[0, 4, 7, 1, 7], + [6, 8, 8, 8, 7]]]])