Skip to content

parrec: support additional dimensions, permute, etc. #259

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 30 additions & 1 deletion bin/parrec2nii
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ from optparse import OptionParser, Option
import sys
import os
import gzip
import warnings
import numpy as np
import nibabel
import nibabel.parrec as pr
import nibabel.nifti1 as nifti1
Expand Down Expand Up @@ -84,6 +86,21 @@ def proc_file(infile, opts):
# load the PAR header
pr_img = pr.load(infile)
pr_hdr = pr_img.header

if len(pr_hdr.permuted_shape) > 4:
warnings.warn("Multiple dimensions were stacked along 4th dimension:\n"
"Original shape: {}\n"
"Dimension labels: {}\n".format(pr_hdr.permuted_shape,
pr_hdr.permuted_labels))
else:
print("Data dimensions are {}".format(pr_hdr.permuted_labels))

#error in cases where a unique affine cannot be defined
if len(np.unique(pr_hdr.image_defs['image angulation']))>1:
raise ValueError("Multiple image angulations within the .PAR")
if len(np.unique(pr_hdr.image_defs['slice orientation']))>1:
raise ValueError("Multiple slice orientations within the .PAR")

# get the raw unscaled data form the REC file
raw_data = pr_img.dataobj.get_unscaled()

Expand Down Expand Up @@ -140,7 +157,7 @@ def proc_file(infile, opts):
# store units -- always mm and msec
nhdr.set_xyzt_units('mm', 'msec')
else:
# anatomical or DTI
# anatomical, DTI, or other
nhdr.set_xyzt_units('mm', 'unknown')

# get original scaling
Expand All @@ -149,6 +166,18 @@ def proc_file(infile, opts):
intercept = 0.0
else:
slope, intercept = pr_hdr.get_data_scaling(method=opts.scaling)
if isinstance(slope,np.ndarray):
if slope.size == 1:
slope = slope[0]
else:
raise ValueError("Can't autoconvert .PAR->.NII for " +
"volume with non-unique slope")
if isinstance(intercept,np.ndarray):
if intercept.size == 1:
intercept = intercept[0]
else:
raise ValueError("Can't autoconvert .PAR->.NII for " +
"volume with non-unique intercept")
nhdr.set_slope_inter(slope, intercept)

# finalize the header: set proper data offset, pixdims, ...
Expand Down
35 changes: 35 additions & 0 deletions nibabel/arrayproxy.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
See :mod:`nibabel.tests.test_proxy_api` for proxy API conformance checks.
"""
import warnings
import numpy as np

from .volumeutils import BinOpener, array_from_file, apply_read_scaling
from .fileslice import fileslice
Expand Down Expand Up @@ -129,6 +130,40 @@ def __getitem__(self, slicer):
return apply_read_scaling(raw_data, self._slope, self._inter)




class PARArrayProxy(ArrayProxy):
""" like ArrayProxy, but reads in based on 'original_shape'
then permutes/reshapes to 4D NIFTI
output will always be ordered as:
(x,y,slice,*remaining_dims)
"""
order = 'F' #Fortran order

def get_unscaled(self):
''' Read of data from file

This is an optional part of the proxy API
'''
with BinOpener(self.file_like) as fileobj:
raw_data = array_from_file(self._header.original_shape,
self._dtype,
fileobj,
offset=self._offset,
order=self.order)

#permute if needed
if self._header.requires_permute:
raw_data = np.transpose(raw_data,
self._header.permute_order)

#reshape if needed
if self._shape != self._header.original_shape:
raw_data = raw_data.reshape(self._shape,order=self.order)

return raw_data


def is_proxy(obj):
""" Return True if `obj` is an array proxy
"""
Expand Down
Loading