diff --git a/pandas/core/algorithms.py b/pandas/core/algorithms.py index 16ec2bb5f253c..7177b44c06a18 100644 --- a/pandas/core/algorithms.py +++ b/pandas/core/algorithms.py @@ -485,9 +485,11 @@ def isin(comps: AnyArrayLike, values: AnyArrayLike) -> np.ndarray: comps = _ensure_arraylike(comps) comps = extract_array(comps, extract_numpy=True) if is_extension_array_dtype(comps.dtype): - # error: Incompatible return value type (got "Series", expected "ndarray") - # error: Item "ndarray" of "Union[Any, ndarray]" has no attribute "isin" - return comps.isin(values) # type: ignore[return-value,union-attr] + # error: Argument 1 to "isin" of "ExtensionArray" has incompatible type + # "Union[Any, ExtensionArray, ndarray]"; expected "Sequence[Any]" + # error: Item "ndarray" of "Union[Any, ExtensionArray, ndarray]" has no + # attribute "isin" + return comps.isin(values) # type: ignore[arg-type, union-attr] elif needs_i8_conversion(comps.dtype): # Dispatch to DatetimeLikeArrayMixin.isin diff --git a/pandas/core/array_algos/putmask.py b/pandas/core/array_algos/putmask.py index 3daf1b3ae3902..0666112cec33d 100644 --- a/pandas/core/array_algos/putmask.py +++ b/pandas/core/array_algos/putmask.py @@ -191,7 +191,7 @@ def extract_bool_array(mask: ArrayLike) -> np.ndarray: # We could have BooleanArray, Sparse[bool], ... # Except for BooleanArray, this is equivalent to just # np.asarray(mask, dtype=bool) - mask = mask.to_numpy(dtype=bool, na_value=False) + mask = mask.to_numpy(dtype=np.dtype(bool), na_value=False) mask = np.asarray(mask, dtype=bool) return mask diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py index 5a2643dd531ed..150e0af05b994 100644 --- a/pandas/core/arrays/base.py +++ b/pandas/core/arrays/base.py @@ -13,6 +13,7 @@ TYPE_CHECKING, Any, Callable, + Iterator, Sequence, TypeVar, cast, @@ -24,6 +25,7 @@ from pandas._typing import ( ArrayLike, Dtype, + NpDtype, PositionalIndexer, Shape, ) @@ -69,6 +71,7 @@ ) if TYPE_CHECKING: + from typing import Literal class ExtensionArraySupportsAnyAll("ExtensionArray"): def any(self, *, skipna: bool = True) -> bool: @@ -375,7 +378,7 @@ def __len__(self) -> int: """ raise AbstractMethodError(self) - def __iter__(self): + def __iter__(self) -> Iterator[Any]: """ Iterate over elements of the array. """ @@ -424,9 +427,9 @@ def __ne__(self, other: Any) -> ArrayLike: # type: ignore[override] def to_numpy( self, - dtype: Dtype | None = None, + dtype: NpDtype | None = None, copy: bool = False, - na_value=lib.no_default, + na_value: Any | None = lib.no_default, ) -> np.ndarray: """ Convert to a NumPy ndarray. @@ -453,12 +456,7 @@ def to_numpy( ------- numpy.ndarray """ - # error: Argument "dtype" to "asarray" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]"; expected "Union[dtype[Any], - # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" - result = np.asarray(self, dtype=dtype) # type: ignore[arg-type] + result = np.asarray(self, dtype=dtype) if copy or na_value is not lib.no_default: result = result.copy() if na_value is not lib.no_default: @@ -510,8 +508,7 @@ def nbytes(self) -> int: # ------------------------------------------------------------------------ # Additional Methods # ------------------------------------------------------------------------ - - def astype(self, dtype, copy=True): + def astype(self, dtype: Dtype, copy: bool = True): """ Cast to a NumPy array with 'dtype'. @@ -544,8 +541,11 @@ def astype(self, dtype, copy=True): dtype, (ArrowStringDtype, StringDtype) ): # allow conversion to StringArrays return dtype.construct_array_type()._from_sequence(self, copy=False) - - return np.array(self, dtype=dtype, copy=copy) + # error: Argument "dtype" to "array" has incompatible type + # "Union[ExtensionDtype, dtype[Any]]"; expected "Union[dtype[Any], None, + # type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, + # Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" + return np.array(self, dtype=dtype, copy=copy) # type: ignore[arg-type] def isna(self) -> np.ndarray | ExtensionArraySupportsAnyAll: """ @@ -591,8 +591,8 @@ def argsort( ascending: bool = True, kind: str = "quicksort", na_position: str = "last", - *args, - **kwargs, + *args: Any, + **kwargs: Any, ) -> np.ndarray: """ Return the indices that would sort this array. @@ -680,7 +680,12 @@ def argmax(self, skipna: bool = True) -> int: raise NotImplementedError return nargminmax(self, "argmax") - def fillna(self, value=None, method=None, limit=None): + def fillna( + self, + value: Any | ArrayLike | None = None, + method: Literal["backfill", "bfill", "ffill", "pad"] | None = None, + limit: int | None = None, + ) -> ExtensionArray: """ Fill NA/NaN values using the specified method. @@ -729,7 +734,7 @@ def fillna(self, value=None, method=None, limit=None): new_values = self.copy() return new_values - def dropna(self): + def dropna(self) -> ExtensionArrayT: """ Return ExtensionArray without NA values. @@ -737,8 +742,11 @@ def dropna(self): ------- valid : ExtensionArray """ - # error: Unsupported operand type for ~ ("ExtensionArray") - return self[~self.isna()] # type: ignore[operator] + # error: Incompatible return value type (got "Union[ExtensionArray, Any]", + # expected "ExtensionArrayT") + # error: Unsupported operand type for ~ ("Union[ndarray, + # ExtensionArraySupportsAnyAll]") + return self[~self.isna()] # type: ignore[return-value, operator] def shift(self, periods: int = 1, fill_value: object = None) -> ExtensionArray: """ @@ -794,7 +802,7 @@ def shift(self, periods: int = 1, fill_value: object = None) -> ExtensionArray: b = empty return self._concat_same_type([a, b]) - def unique(self): + def unique(self) -> ExtensionArray: """ Compute the ExtensionArray of unique values. @@ -805,7 +813,12 @@ def unique(self): uniques = unique(self.astype(object)) return self._from_sequence(uniques, dtype=self.dtype) - def searchsorted(self, value, side="left", sorter=None): + def searchsorted( + self, + value: Sequence[Any], + side: Literal["left", "right"] = "left", + sorter: Sequence[Any] | None = None, + ) -> np.ndarray: """ Find indices where elements should be inserted to maintain order. @@ -850,7 +863,8 @@ def searchsorted(self, value, side="left", sorter=None): # 1. Values outside the range of the `data_for_sorting` fixture # 2. Values between the values in the `data_for_sorting` fixture # 3. Missing values. - arr = self.astype(object) + # TODO: overload astype so that cast is unnecessary + arr = cast(np.ndarray, self.astype(object)) return arr.searchsorted(value, side=side, sorter=sorter) def equals(self, other: object) -> bool: @@ -887,7 +901,7 @@ def equals(self, other: object) -> bool: equal_na = self.isna() & other.isna() # type: ignore[operator] return bool((equal_values | equal_na).all()) - def isin(self, values) -> np.ndarray: + def isin(self, values: Sequence[Any]) -> np.ndarray: """ Pointwise comparison for set containment in the given values. @@ -901,7 +915,9 @@ def isin(self, values) -> np.ndarray: ------- np.ndarray[bool] """ - return isin(np.asarray(self), values) + # error: Argument 2 to "isin" has incompatible type "Sequence[Any]"; expected + # "Union[Union[ExtensionArray, ndarray], Index, Series]" + return isin(self.astype(object), values) # type: ignore[arg-type] def _values_for_factorize(self) -> tuple[np.ndarray, Any]: """ @@ -925,7 +941,7 @@ def _values_for_factorize(self) -> tuple[np.ndarray, Any]: The values returned by this method are also used in :func:`pandas.util.hash_pandas_object`. """ - return self.astype(object), np.nan + return cast(np.ndarray, self.astype(object)), np.nan def factorize(self, na_sentinel: int = -1) -> tuple[np.ndarray, ExtensionArray]: """ @@ -1023,7 +1039,11 @@ def factorize(self, na_sentinel: int = -1) -> tuple[np.ndarray, ExtensionArray]: @Substitution(klass="ExtensionArray") @Appender(_extension_array_shared_docs["repeat"]) - def repeat(self, repeats, axis=None): + def repeat( + self, + repeats: int | Sequence[int], + axis: Literal[None, 0] = None, + ) -> ExtensionArray: nv.validate_repeat((), {"axis": axis}) ind = np.arange(len(self)).repeat(repeats) return self.take(ind) @@ -1033,12 +1053,12 @@ def repeat(self, repeats, axis=None): # ------------------------------------------------------------------------ def take( - self: ExtensionArrayT, + self, indices: Sequence[int], *, allow_fill: bool = False, fill_value: Any = None, - ) -> ExtensionArrayT: + ) -> ExtensionArray: """ Take elements from an array. @@ -1127,7 +1147,7 @@ def take(self, indices, allow_fill=False, fill_value=None): # pandas.api.extensions.take raise AbstractMethodError(self) - def copy(self: ExtensionArrayT) -> ExtensionArrayT: + def copy(self) -> ExtensionArray: """ Return a copy of the array. @@ -1207,7 +1227,7 @@ def _formatter(self, boxed: bool = False) -> Callable[[Any], str | None]: # Reshaping # ------------------------------------------------------------------------ - def transpose(self, *axes) -> ExtensionArray: + def transpose(self, *axes: int) -> ExtensionArray: """ Return a transposed view on this array. @@ -1220,7 +1240,7 @@ def transpose(self, *axes) -> ExtensionArray: def T(self) -> ExtensionArray: return self.transpose() - def ravel(self, order="C") -> ExtensionArray: + def ravel(self, order: Literal["C", "F", "A", "K"] | None = "C") -> ExtensionArray: """ Return a flattened view on this array. @@ -1294,13 +1314,13 @@ def _reduce(self, name: str, *, skipna: bool = True, **kwargs): """ raise TypeError(f"cannot perform {name} with type {self.dtype}") - def __hash__(self): + def __hash__(self) -> int: raise TypeError(f"unhashable type: {repr(type(self).__name__)}") # ------------------------------------------------------------------------ # Non-Optimized Default Methods - def delete(self: ExtensionArrayT, loc) -> ExtensionArrayT: + def delete(self, loc: int | Sequence[int]) -> ExtensionArray: indexer = np.delete(np.arange(len(self)), loc) return self.take(indexer) diff --git a/pandas/core/arrays/boolean.py b/pandas/core/arrays/boolean.py index 14d059c04b7c0..221a6df80b535 100644 --- a/pandas/core/arrays/boolean.py +++ b/pandas/core/arrays/boolean.py @@ -392,7 +392,7 @@ def reconstruct(x): def _coerce_to_array(self, value) -> tuple[np.ndarray, np.ndarray]: return coerce_to_array(value) - def astype(self, dtype, copy: bool = True) -> ArrayLike: + def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: """ Cast to a NumPy array or ExtensionArray with 'dtype'. diff --git a/pandas/core/arrays/floating.py b/pandas/core/arrays/floating.py index 1acbcf17dfffd..15848c22d0db3 100644 --- a/pandas/core/arrays/floating.py +++ b/pandas/core/arrays/floating.py @@ -10,6 +10,7 @@ ) from pandas._typing import ( ArrayLike, + Dtype, DtypeObj, ) from pandas.compat.numpy import function as nv @@ -271,7 +272,7 @@ def _from_sequence_of_strings( def _coerce_to_array(self, value) -> tuple[np.ndarray, np.ndarray]: return coerce_to_array(value, dtype=self.dtype) - def astype(self, dtype, copy: bool = True) -> ArrayLike: + def astype(self, dtype: Dtype, copy: bool = True) -> ArrayLike: """ Cast to a NumPy array or ExtensionArray with 'dtype'. diff --git a/pandas/core/arrays/interval.py b/pandas/core/arrays/interval.py index 8d3a8feb89d67..90a4094c8a5e6 100644 --- a/pandas/core/arrays/interval.py +++ b/pandas/core/arrays/interval.py @@ -1517,7 +1517,9 @@ def delete(self: IntervalArrayT, loc) -> IntervalArrayT: return self._shallow_copy(left=new_left, right=new_right) @Appender(_extension_array_shared_docs["repeat"] % _shared_docs_kwargs) - def repeat(self: IntervalArrayT, repeats: int, axis=None) -> IntervalArrayT: + def repeat( + self: IntervalArrayT, repeats: int | Sequence[int], axis=None + ) -> IntervalArrayT: nv.validate_repeat((), {"axis": axis}) left_repeat = self.left.repeat(repeats) right_repeat = self.right.repeat(repeats) diff --git a/pandas/core/arrays/masked.py b/pandas/core/arrays/masked.py index 11f9f645920ec..d7c746ba29677 100644 --- a/pandas/core/arrays/masked.py +++ b/pandas/core/arrays/masked.py @@ -208,10 +208,7 @@ def __len__(self) -> int: def __invert__(self: BaseMaskedArrayT) -> BaseMaskedArrayT: return type(self)(~self._data, self._mask.copy()) - # error: Argument 1 of "to_numpy" is incompatible with supertype "ExtensionArray"; - # supertype defines the argument type as "Union[ExtensionDtype, str, dtype[Any], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" - def to_numpy( # type: ignore[override] + def to_numpy( self, dtype: NpDtype | None = None, copy: bool = False, diff --git a/pandas/core/arrays/numpy_.py b/pandas/core/arrays/numpy_.py index 6e4aa1a5efacf..9ad909fc378f1 100644 --- a/pandas/core/arrays/numpy_.py +++ b/pandas/core/arrays/numpy_.py @@ -366,10 +366,7 @@ def skew( # ------------------------------------------------------------------------ # Additional Methods - # error: Argument 1 of "to_numpy" is incompatible with supertype "ExtensionArray"; - # supertype defines the argument type as "Union[ExtensionDtype, str, dtype[Any], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" - def to_numpy( # type: ignore[override] + def to_numpy( self, dtype: NpDtype | None = None, copy: bool = False, diff --git a/pandas/core/arrays/string_arrow.py b/pandas/core/arrays/string_arrow.py index b7a0e70180ae4..27f4749149b2f 100644 --- a/pandas/core/arrays/string_arrow.py +++ b/pandas/core/arrays/string_arrow.py @@ -15,7 +15,7 @@ missing as libmissing, ) from pandas._typing import ( - Dtype, + DtypeArg, NpDtype, PositionalIndexer, type_t, @@ -227,7 +227,7 @@ def _chk_pyarrow_available(cls) -> None: raise ImportError(msg) @classmethod - def _from_sequence(cls, scalars, dtype: Dtype | None = None, copy: bool = False): + def _from_sequence(cls, scalars, dtype: DtypeArg | None = None, copy: bool = False): cls._chk_pyarrow_available() # convert non-na-likes to str, and nan-likes to ArrowStringDtype.na_value scalars = lib.ensure_string_array(scalars, copy=False) @@ -235,7 +235,7 @@ def _from_sequence(cls, scalars, dtype: Dtype | None = None, copy: bool = False) @classmethod def _from_sequence_of_strings( - cls, strings, dtype: Dtype | None = None, copy: bool = False + cls, strings, dtype: DtypeArg | None = None, copy: bool = False ): return cls._from_sequence(strings, dtype=dtype, copy=copy) @@ -254,10 +254,7 @@ def __arrow_array__(self, type=None): """Convert myself to a pyarrow Array or ChunkedArray.""" return self._data - # error: Argument 1 of "to_numpy" is incompatible with supertype "ExtensionArray"; - # supertype defines the argument type as "Union[ExtensionDtype, str, dtype[Any], - # Type[str], Type[float], Type[int], Type[complex], Type[bool], Type[object], None]" - def to_numpy( # type: ignore[override] + def to_numpy( self, dtype: NpDtype | None = None, copy: bool = False, @@ -696,7 +693,7 @@ def value_counts(self, dropna: bool = True) -> Series: _str_na_value = ArrowStringDtype.na_value - def _str_map(self, f, na_value=None, dtype: Dtype | None = None): + def _str_map(self, f, na_value=None, dtype: DtypeArg | None = None): # TODO: de-duplicate with StringArray method. This method is moreless copy and # paste. diff --git a/pandas/core/base.py b/pandas/core/base.py index 42f52618eb07b..a31c4214ef789 100644 --- a/pandas/core/base.py +++ b/pandas/core/base.py @@ -16,9 +16,10 @@ import pandas._libs.lib as lib from pandas._typing import ( - Dtype, + ArrayLike, DtypeObj, IndexLabel, + NpDtype, Shape, final, ) @@ -435,7 +436,7 @@ def array(self) -> ExtensionArray: def to_numpy( self, - dtype: Dtype | None = None, + dtype: NpDtype | None = None, copy: bool = False, na_value=lib.no_default, **kwargs, @@ -545,12 +546,7 @@ def to_numpy( f"to_numpy() got an unexpected keyword argument '{bad_keys}'" ) - # error: Argument "dtype" to "asarray" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[str], Type[float], Type[int], - # Type[complex], Type[bool], Type[object], None]"; expected "Union[dtype[Any], - # None, type, _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" - result = np.asarray(self._values, dtype=dtype) # type: ignore[arg-type] + result = np.asarray(self._values, dtype=dtype) # TODO(GH-24345): Avoid potential double copy if copy or na_value is not lib.no_default: result = result.copy() @@ -995,8 +991,8 @@ def value_counts( def unique(self): values = self._values - if not isinstance(values, np.ndarray): - result = values.unique() + if isinstance(values, ExtensionArray): + result: ArrayLike = values.unique() if self.dtype.kind in ["m", "M"] and isinstance(self, ABCSeries): # GH#31182 Series._values returns EA, unpack for backward-compat if getattr(self.dtype, "tz", None) is None: diff --git a/pandas/core/dtypes/base.py b/pandas/core/dtypes/base.py index 9671c340a0a92..b65ac8b01150b 100644 --- a/pandas/core/dtypes/base.py +++ b/pandas/core/dtypes/base.py @@ -8,6 +8,7 @@ TYPE_CHECKING, Any, TypeVar, + cast, ) import numpy as np @@ -28,7 +29,7 @@ from pandas.core.arrays import ExtensionArray # To parameterize on same ExtensionDtype - E = TypeVar("E", bound="ExtensionDtype") + ExtensionDtypeT = TypeVar("ExtensionDtypeT", bound="ExtensionDtype") class ExtensionDtype: @@ -155,7 +156,7 @@ def na_value(self) -> object: return np.nan @property - def type(self) -> type_t[Any]: + def type(self) -> type[Any]: """ The scalar type for the array, e.g. ``int`` @@ -213,7 +214,7 @@ def construct_array_type(cls) -> type_t[ExtensionArray]: raise NotImplementedError @classmethod - def construct_from_string(cls, string: str): + def construct_from_string(cls, string: str) -> ExtensionDtype: r""" Construct this type from a string. @@ -368,7 +369,7 @@ def _get_common_dtype(self, dtypes: list[DtypeObj]) -> DtypeObj | None: return None -def register_extension_dtype(cls: type[E]) -> type[E]: +def register_extension_dtype(cls: type[ExtensionDtypeT]) -> type[ExtensionDtypeT]: """ Register an ExtensionType with pandas as class decorator. @@ -424,7 +425,7 @@ def register(self, dtype: type[ExtensionDtype]) -> None: self.dtypes.append(dtype) - def find(self, dtype: type[ExtensionDtype] | str) -> type[ExtensionDtype] | None: + def find(self, dtype: type[ExtensionDtype] | str) -> ExtensionDtype | None: """ Parameters ---------- @@ -439,7 +440,7 @@ def find(self, dtype: type[ExtensionDtype] | str) -> type[ExtensionDtype] | None if not isinstance(dtype, type): dtype_type = type(dtype) if issubclass(dtype_type, ExtensionDtype): - return dtype + return cast(ExtensionDtype, dtype) return None diff --git a/pandas/core/dtypes/common.py b/pandas/core/dtypes/common.py index 593e42f7ed749..ddc389934daaa 100644 --- a/pandas/core/dtypes/common.py +++ b/pandas/core/dtypes/common.py @@ -1789,9 +1789,7 @@ def pandas_dtype(dtype) -> DtypeObj: # registered extension types result = registry.find(dtype) if result is not None: - # error: Incompatible return value type (got "Type[ExtensionDtype]", - # expected "Union[dtype, ExtensionDtype]") - return result # type: ignore[return-value] + return result # try a numpy dtype # raise a consistent TypeError if failed diff --git a/pandas/core/groupby/groupby.py b/pandas/core/groupby/groupby.py index ce7f0de616e18..bc2695158dbbc 100644 --- a/pandas/core/groupby/groupby.py +++ b/pandas/core/groupby/groupby.py @@ -2285,12 +2285,12 @@ def pre_processor(vals: ArrayLike) -> tuple[np.ndarray, np.dtype | None]: inference: np.dtype | None = None if is_integer_dtype(vals.dtype): if isinstance(vals, ExtensionArray): - out = vals.to_numpy(dtype=float, na_value=np.nan) + out = vals.to_numpy(dtype=np.dtype(float), na_value=np.nan) else: out = vals inference = np.dtype(np.int64) elif is_bool_dtype(vals.dtype) and isinstance(vals, ExtensionArray): - out = vals.to_numpy(dtype=float, na_value=np.nan) + out = vals.to_numpy(dtype=np.dtype(float), na_value=np.nan) elif is_datetime64_dtype(vals.dtype): inference = np.dtype("datetime64[ns]") out = np.asarray(vals).astype(float) diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py index 9b3f2d191831d..be0f6f59af6fe 100644 --- a/pandas/core/indexes/base.py +++ b/pandas/core/indexes/base.py @@ -4899,7 +4899,11 @@ def asof_locs(self, where: Index, mask: np.ndarray) -> np.ndarray: which correspond to the return values of the `asof` function for every element in `where`. """ - locs = self._values[mask].searchsorted(where._values, side="right") + # error: Argument 1 to "searchsorted" of "ExtensionArray" has incompatible + # type "Union[ExtensionArray, ndarray]"; expected "Sequence[Any]" + locs = self._values[mask].searchsorted( + where._values, side="right" # type: ignore[arg-type] + ) locs = np.where(locs > 0, locs - 1, 0) result = np.arange(len(self), dtype=np.intp)[mask].take(locs) diff --git a/pandas/core/indexes/extension.py b/pandas/core/indexes/extension.py index 83998a2792a8a..215255d24aa90 100644 --- a/pandas/core/indexes/extension.py +++ b/pandas/core/indexes/extension.py @@ -337,7 +337,10 @@ def _get_unique_index(self): return self result = self._data.unique() - return type(self)._simple_new(result, name=self.name) + # error: Argument 1 to "_simple_new" of "ExtensionIndex" has incompatible + # type "ExtensionArray"; expected "Union[IntervalArray, + # NDArrayBackedExtensionArray]" + return type(self)._simple_new(result, name=self.name) # type: ignore[arg-type] @doc(Index.map) def map(self, mapper, na_action=None): diff --git a/pandas/core/internals/managers.py b/pandas/core/internals/managers.py index 487047f1a1dbb..4ed3477f634c8 100644 --- a/pandas/core/internals/managers.py +++ b/pandas/core/internals/managers.py @@ -3,6 +3,7 @@ from collections import defaultdict import itertools from typing import ( + TYPE_CHECKING, Any, Callable, DefaultDict, @@ -24,6 +25,7 @@ ArrayLike, Dtype, DtypeObj, + NpDtype, Shape, type_t, ) @@ -82,6 +84,9 @@ operate_blockwise, ) +if TYPE_CHECKING: + from pandas.core.arrays.base import ExtensionArray + # TODO: flexible with index=None and/or items=None T = TypeVar("T", bound="BaseBlockManager") @@ -623,7 +628,7 @@ def copy_func(ax): def as_array( self, transpose: bool = False, - dtype: Dtype | None = None, + dtype: NpDtype | None = None, copy: bool = False, na_value=lib.no_default, ) -> np.ndarray: @@ -668,12 +673,7 @@ def as_array( else: arr = np.asarray(blk.get_values()) if dtype: - # error: Argument 1 to "astype" of "_ArrayOrScalarCommon" has - # incompatible type "Union[ExtensionDtype, str, dtype[Any], - # Type[object]]"; expected "Union[dtype[Any], None, type, - # _SupportsDType, str, Union[Tuple[Any, int], Tuple[Any, Union[int, - # Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, Any]]]" - arr = arr.astype(dtype, copy=False) # type: ignore[arg-type] + arr = arr.astype(dtype, copy=False) else: arr = self._interleave(dtype=dtype, na_value=na_value) # The underlying data was copied within _interleave @@ -706,25 +706,17 @@ def _interleave( elif is_dtype_equal(dtype, str): dtype = "object" - # error: Argument "dtype" to "empty" has incompatible type - # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected - # "Union[dtype[Any], None, type, _SupportsDType, str, Union[Tuple[Any, int], - # Tuple[Any, Union[int, Sequence[int]]], List[Any], _DTypeDict, Tuple[Any, - # Any]]]" - result = np.empty(self.shape, dtype=dtype) # type: ignore[arg-type] - + # At this point, we know that dtype is a valid numpy type, so we do a cast + # to make the typing check pass. + result = np.empty(self.shape, dtype=cast(NpDtype, dtype)) itemmask = np.zeros(self.shape[0]) for blk in self.blocks: rl = blk.mgr_locs if blk.is_extension: # Avoid implicit conversion of extension blocks to object - - # error: Item "ndarray" of "Union[ndarray, ExtensionArray]" has no - # attribute "to_numpy" - arr = blk.values.to_numpy( # type: ignore[union-attr] - dtype=dtype, na_value=na_value - ) + blk_values = cast("ExtensionArray", blk.values) + arr = blk_values.to_numpy(dtype=cast(NpDtype, dtype), na_value=na_value) else: # error: Argument 1 to "get_values" of "Block" has incompatible type # "Union[ExtensionDtype, str, dtype[Any], Type[object], None]"; expected diff --git a/pandas/io/formats/format.py b/pandas/io/formats/format.py index ba406a1ef117c..61c9c0aa06e09 100644 --- a/pandas/io/formats/format.py +++ b/pandas/io/formats/format.py @@ -1795,19 +1795,13 @@ def get_format_timedelta64( If box, then show the return in quotes """ - values_int = values.view(np.int64) + values_int = np.asarray(values).view(np.int64) consider_values = values_int != iNaT one_day_nanos = 86400 * 10 ** 9 - # error: Unsupported operand types for % ("ExtensionArray" and "int") - not_midnight = values_int % one_day_nanos != 0 # type: ignore[operator] - # error: Argument 1 to "__call__" of "ufunc" has incompatible type - # "Union[Any, ExtensionArray, ndarray]"; expected - # "Union[Union[int, float, complex, str, bytes, generic], - # Sequence[Union[int, float, complex, str, bytes, generic]], - # Sequence[Sequence[Any]], _SupportsArray]" - both = np.logical_and(consider_values, not_midnight) # type: ignore[arg-type] + not_midnight = values_int % one_day_nanos != 0 + both = np.logical_and(consider_values, not_midnight) even_days = both.sum() == 0 if even_days: