Skip to content

COMPAT: Compat issue is DataFrame.dtypes when options.mode.use_inf_as_null is True (GH8722) #8726

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Nov 4, 2014
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion doc/source/whatsnew/v0.15.1.txt
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ Bug Fixes
- Bug in duplicated/drop_duplicates with a Categorical (:issue:`8623`)
- Bug in ``Categorical`` reflected comparison operator raising if the first argument was a numpy array scalar (e.g. np.int64) (:issue:`8658`)
- Bug in Panel indexing with a list-like (:issue:`8710`)

- Compat issue is ``DataFrame.dtypes`` when ``options.mode.use_inf_as_null`` is True (:issue:`8722`)



Expand Down
22 changes: 4 additions & 18 deletions pandas/lib.pyx
Original file line number Diff line number Diff line change
Expand Up @@ -289,8 +289,8 @@ def isnullobj(ndarray[object] arr):
n = len(arr)
result = np.zeros(n, dtype=np.uint8)
for i from 0 <= i < n:
arobj = arr[i]
result[i] = arobj is NaT or _checknull(arobj)
val = arr[i]
result[i] = val is NaT or _checknull(val)
return result.view(np.bool_)

@cython.wraparound(False)
Expand All @@ -303,10 +303,10 @@ def isnullobj_old(ndarray[object] arr):
n = len(arr)
result = np.zeros(n, dtype=np.uint8)
for i from 0 <= i < n:
result[i] = util._checknull_old(arr[i])
val = arr[i]
result[i] = val is NaT or util._checknull_old(val)
return result.view(np.bool_)


@cython.wraparound(False)
@cython.boundscheck(False)
def isnullobj2d(ndarray[object, ndim=2] arr):
Expand All @@ -323,20 +323,6 @@ def isnullobj2d(ndarray[object, ndim=2] arr):
result[i, j] = 1
return result.view(np.bool_)

@cython.wraparound(False)
@cython.boundscheck(False)
def isnullobj_old(ndarray[object] arr):
cdef Py_ssize_t i, n
cdef object val
cdef ndarray[uint8_t] result

n = len(arr)
result = np.zeros(n, dtype=np.uint8)
for i from 0 <= i < n:
result[i] = util._checknull_old(arr[i])
return result.view(np.bool_)


@cython.wraparound(False)
@cython.boundscheck(False)
def isnullobj2d_old(ndarray[object, ndim=2] arr):
Expand Down
2 changes: 1 addition & 1 deletion pandas/src/util.pxd
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ cdef inline bint _checknull_old(object val):
cdef double INF = <double> np.inf
cdef double NEGINF = -INF
try:
return val is None or val != val or val == INF or val == NEGINF
return val is None or (cpython.PyFloat_Check(val) and (val != val or val == INF or val == NEGINF))
except ValueError:
return False

Expand Down
6 changes: 6 additions & 0 deletions pandas/tests/test_frame.py
Original file line number Diff line number Diff line change
Expand Up @@ -6783,6 +6783,12 @@ def test_dtypes(self):
index=result.index)
assert_series_equal(result, expected)

# compat, GH 8722
with option_context('use_inf_as_null',True):
df = DataFrame([[1]])
result = df.dtypes
assert_series_equal(result,Series({0:np.dtype('int64')}))

def test_convert_objects(self):

oops = self.mixed_frame.T.T
Expand Down
6 changes: 3 additions & 3 deletions pandas/tests/test_lib.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import pandas as pd
from pandas.lib import isscalar, item_from_zerodim
import pandas.util.testing as tm

from pandas.compat import u

class TestIsscalar(tm.TestCase):
def test_isscalar_builtin_scalars(self):
Expand All @@ -16,7 +16,7 @@ def test_isscalar_builtin_scalars(self):
self.assertTrue(isscalar(np.nan))
self.assertTrue(isscalar('foobar'))
self.assertTrue(isscalar(b'foobar'))
self.assertTrue(isscalar(u'foobar'))
self.assertTrue(isscalar(u('efoobar')))
self.assertTrue(isscalar(datetime(2014, 1, 1)))
self.assertTrue(isscalar(date(2014, 1, 1)))
self.assertTrue(isscalar(time(12, 0)))
Expand All @@ -38,7 +38,7 @@ def test_isscalar_numpy_array_scalars(self):
self.assertTrue(isscalar(np.int32(1)))
self.assertTrue(isscalar(np.object_('foobar')))
self.assertTrue(isscalar(np.str_('foobar')))
self.assertTrue(isscalar(np.unicode_(u'foobar')))
self.assertTrue(isscalar(np.unicode_(u('foobar'))))
self.assertTrue(isscalar(np.bytes_(b'foobar')))
self.assertTrue(isscalar(np.datetime64('2014-01-01')))
self.assertTrue(isscalar(np.timedelta64(1, 'h')))
Expand Down