23
23
from enum import Enum as _Enum
24
24
25
25
from . import _typing as _t
26
- from ._optional_deps import (
27
- np as _np ,
28
- pa as _pa ,
29
- )
26
+
27
+
28
+ if _t .TYPE_CHECKING :
29
+ # "Why?", I hear you ask. Because sphinx of course.
30
+ # This beautiful construct helps sphinx to properly resolve the type hints.
31
+ import numpy as _np
32
+ import pyarrow as _pa
33
+ else :
34
+ from ._optional_deps import (
35
+ np as _np ,
36
+ pa as _pa ,
37
+ )
30
38
31
39
32
40
if False :
41
49
_swap_endian_unchecked_rust = None
42
50
_vec_rust = None
43
51
44
- if _t .TYPE_CHECKING :
45
- import numpy # type: ignore[import]
46
- import pyarrow # type: ignore[import]
47
-
48
52
49
53
__all__ = [
50
54
"Vector" ,
@@ -84,14 +88,18 @@ class Vector:
84
88
Use an iterable of floats or an iterable of ints to construct the
85
89
vector from native Python values.
86
90
The ``dtype`` parameter is required.
91
+ See also: :meth:`.from_native`.
87
92
* ``bytes``, ``bytearray``: Use raw bytes to construct the vector.
88
93
The ``dtype`` parameter is required and ``byteorder`` is optional.
89
94
* ``numpy.ndarray``: Use a numpy array to construct the vector.
90
95
No further parameters are accepted.
96
+ See also: :meth:`.from_numpy`.
91
97
* ``pyarrow.Array``: Use a pyarrow array to construct the vector.
92
98
No further parameters are accepted.
99
+ See also: :meth:`.from_pyarrow`.
93
100
:param dtype: The type of the vector.
94
- See :attr:`.dtype` for currently supported inner data types.
101
+ See :class:`.VectorDType` for currently supported inner data types.
102
+ See also :attr:`.dtype`.
95
103
96
104
This parameter is required if ``data`` is of type :class:`bytes`,
97
105
:class:`bytearray`, ``Iterable[float]``, or ``Iterable[int]``.
@@ -163,10 +171,10 @@ def __init__(
163
171
) -> None : ...
164
172
165
173
@_t .overload
166
- def __init__ (self , data : numpy .ndarray , / ) -> None : ...
174
+ def __init__ (self , data : _np .ndarray , / ) -> None : ...
167
175
168
176
@_t .overload
169
- def __init__ (self , data : pyarrow .Array , / ) -> None : ...
177
+ def __init__ (self , data : _pa .Array , / ) -> None : ...
170
178
171
179
def __init__ (self , data , * args , ** kwargs ) -> None :
172
180
if isinstance (data , (bytes , bytearray )):
@@ -373,15 +381,15 @@ def to_native(self) -> list[object]:
373
381
return self ._inner .to_native ()
374
382
375
383
@classmethod
376
- def from_numpy (cls , data : numpy .ndarray , / ) -> _t .Self :
384
+ def from_numpy (cls , data : _np .ndarray , / ) -> _t .Self :
377
385
"""
378
386
Create a Vector instance from a numpy array.
379
387
380
388
:param data: The numpy array to create the vector from.
381
389
The array must be one-dimensional and have a dtype that is
382
390
supported by Neo4j vectors: ``float64``, ``float32``,
383
391
``int64``, ``int32``, ``int16``, or ``int8``.
384
- See also :attr :`.dtype `.
392
+ See also :class :`.VectorDType `.
385
393
386
394
:raises ValueError:
387
395
* If the dtype is not supported.
@@ -394,7 +402,7 @@ def from_numpy(cls, data: numpy.ndarray, /) -> _t.Self:
394
402
obj ._set_numpy (data )
395
403
return obj
396
404
397
- def to_numpy (self ) -> numpy .ndarray :
405
+ def to_numpy (self ) -> _np .ndarray :
398
406
"""
399
407
Convert the vector to a numpy array.
400
408
@@ -407,7 +415,7 @@ def to_numpy(self) -> numpy.ndarray:
407
415
"""
408
416
return self ._inner .to_numpy ()
409
417
410
- def _set_numpy (self , data : numpy .ndarray , / ) -> None :
418
+ def _set_numpy (self , data : _np .ndarray , / ) -> None :
411
419
if data .ndim != 1 :
412
420
raise ValueError ("Data must be one-dimensional" )
413
421
type_ : type [_InnerVector ]
@@ -429,18 +437,17 @@ def _set_numpy(self, data: numpy.ndarray, /) -> None:
429
437
self ._inner = type_ .from_numpy (data )
430
438
431
439
@classmethod
432
- def from_pyarrow (cls , data : pyarrow .Array , / ) -> _t .Self :
440
+ def from_pyarrow (cls , data : _pa .Array , / ) -> _t .Self :
433
441
"""
434
442
Create a Vector instance from a pyarrow array.
435
443
436
- :param data: The pyarrow array to create the vector from.
437
- The array must have a type that is supported by Neo4j.
438
- See also :attr:`.dtype`.
439
-
440
444
PyArrow stores data in little endian. Therefore, the byte-order needs
441
445
to be swapped. If ``neo4j-rust-ext`` or ``numpy`` is installed, it will
442
446
be used to speed up the byte flipping.
443
447
448
+ :param data: The pyarrow array to create the vector from.
449
+ The array must have a type that is supported by Neo4j.
450
+ See also :class:`.VectorDType`.
444
451
:raises ValueError:
445
452
* If the array's type is not supported.
446
453
* If the array contains null values.
@@ -452,7 +459,7 @@ def from_pyarrow(cls, data: pyarrow.Array, /) -> _t.Self:
452
459
obj ._set_pyarrow (data )
453
460
return obj
454
461
455
- def to_pyarrow (self ) -> pyarrow .Array :
462
+ def to_pyarrow (self ) -> _pa .Array :
456
463
"""
457
464
Convert the vector to a pyarrow array.
458
465
@@ -462,7 +469,7 @@ def to_pyarrow(self) -> pyarrow.Array:
462
469
"""
463
470
return self ._inner .to_pyarrow ()
464
471
465
- def _set_pyarrow (self , data : pyarrow .Array , / ) -> None :
472
+ def _set_pyarrow (self , data : _pa .Array , / ) -> None :
466
473
import pyarrow
467
474
468
475
type_ : type [_InnerVector ]
@@ -581,6 +588,7 @@ def _swap_endian(type_size: int, data: bytes, /) -> bytes:
581
588
582
589
583
590
def _swap_endian_unchecked_np (type_size : int , data : bytes , / ) -> bytes :
591
+ dtype : _np .dtype
584
592
match type_size :
585
593
case 2 :
586
594
dtype = _np .dtype ("<i2" )
@@ -727,18 +735,18 @@ def from_native(cls, data: _t.Iterable[object], /) -> _t.Self: ...
727
735
def to_native (self ) -> list [object ]: ...
728
736
729
737
@classmethod
730
- def from_numpy (cls , data : numpy .ndarray , / ) -> _t .Self :
738
+ def from_numpy (cls , data : _np .ndarray , / ) -> _t .Self :
731
739
if data .dtype .byteorder == "<" or (
732
740
data .dtype .byteorder == "=" and _sys .byteorder == "little"
733
741
):
734
742
data = data .byteswap ()
735
743
return cls (data .tobytes ())
736
744
737
745
@_abc .abstractmethod
738
- def to_numpy (self ) -> numpy .ndarray : ...
746
+ def to_numpy (self ) -> _np .ndarray : ...
739
747
740
748
@classmethod
741
- def from_pyarrow (cls , data : pyarrow .Array , / ) -> _t .Self :
749
+ def from_pyarrow (cls , data : _pa .Array , / ) -> _t .Self :
742
750
width = data .type .byte_width
743
751
assert cls .size == width
744
752
if _pa .compute .count (data , mode = "only_null" ).as_py ():
@@ -750,7 +758,7 @@ def from_pyarrow(cls, data: pyarrow.Array, /) -> _t.Self:
750
758
return cls (bytes (buffer ), byteorder = _sys .byteorder )
751
759
752
760
@_abc .abstractmethod
753
- def to_pyarrow (self ) -> pyarrow .Array : ...
761
+ def to_pyarrow (self ) -> _pa .Array : ...
754
762
755
763
756
764
class _InnerVectorFloat (_InnerVector , _abc .ABC ):
@@ -822,12 +830,12 @@ def _to_native_py(self) -> list[object]:
822
830
else :
823
831
to_native = _to_native_py
824
832
825
- def to_numpy (self ) -> numpy .ndarray :
833
+ def to_numpy (self ) -> _np .ndarray :
826
834
import numpy
827
835
828
836
return numpy .frombuffer (self .data , dtype = numpy .dtype (">f8" ))
829
837
830
- def to_pyarrow (self ) -> pyarrow .Array :
838
+ def to_pyarrow (self ) -> _pa .Array :
831
839
import pyarrow
832
840
833
841
buffer = pyarrow .py_buffer (self .data_le )
@@ -897,12 +905,12 @@ def _to_native_py(self) -> list[object]:
897
905
else :
898
906
to_native = _to_native_py
899
907
900
- def to_numpy (self ) -> numpy .ndarray :
908
+ def to_numpy (self ) -> _np .ndarray :
901
909
import numpy
902
910
903
911
return numpy .frombuffer (self .data , dtype = numpy .dtype (">f4" ))
904
912
905
- def to_pyarrow (self ) -> pyarrow .Array :
913
+ def to_pyarrow (self ) -> _pa .Array :
906
914
import pyarrow
907
915
908
916
buffer = pyarrow .py_buffer (self .data_le )
@@ -997,12 +1005,12 @@ def _to_native_py(self) -> list[object]:
997
1005
else :
998
1006
to_native = _to_native_py
999
1007
1000
- def to_numpy (self ) -> numpy .ndarray :
1008
+ def to_numpy (self ) -> _np .ndarray :
1001
1009
import numpy
1002
1010
1003
1011
return numpy .frombuffer (self .data , dtype = numpy .dtype (">i8" ))
1004
1012
1005
- def to_pyarrow (self ) -> pyarrow .Array :
1013
+ def to_pyarrow (self ) -> _pa .Array :
1006
1014
import pyarrow
1007
1015
1008
1016
buffer = pyarrow .py_buffer (self .data_le )
@@ -1090,12 +1098,12 @@ def _to_native_py(self) -> list[object]:
1090
1098
else :
1091
1099
to_native = _to_native_py
1092
1100
1093
- def to_numpy (self ) -> numpy .ndarray :
1101
+ def to_numpy (self ) -> _np .ndarray :
1094
1102
import numpy
1095
1103
1096
1104
return numpy .frombuffer (self .data , dtype = numpy .dtype (">i4" ))
1097
1105
1098
- def to_pyarrow (self ) -> pyarrow .Array :
1106
+ def to_pyarrow (self ) -> _pa .Array :
1099
1107
import pyarrow
1100
1108
1101
1109
buffer = pyarrow .py_buffer (self .data_le )
@@ -1183,12 +1191,12 @@ def _to_native_py(self) -> list[object]:
1183
1191
else :
1184
1192
to_native = _to_native_py
1185
1193
1186
- def to_numpy (self ) -> numpy .ndarray :
1194
+ def to_numpy (self ) -> _np .ndarray :
1187
1195
import numpy
1188
1196
1189
1197
return numpy .frombuffer (self .data , dtype = numpy .dtype (">i2" ))
1190
1198
1191
- def to_pyarrow (self ) -> pyarrow .Array :
1199
+ def to_pyarrow (self ) -> _pa .Array :
1192
1200
import pyarrow
1193
1201
1194
1202
buffer = pyarrow .py_buffer (self .data_le )
@@ -1276,12 +1284,12 @@ def _to_native_py(self) -> list[object]:
1276
1284
else :
1277
1285
to_native = _to_native_py
1278
1286
1279
- def to_numpy (self ) -> numpy .ndarray :
1287
+ def to_numpy (self ) -> _np .ndarray :
1280
1288
import numpy
1281
1289
1282
1290
return numpy .frombuffer (self .data , dtype = numpy .dtype (">i1" ))
1283
1291
1284
- def to_pyarrow (self ) -> pyarrow .Array :
1292
+ def to_pyarrow (self ) -> _pa .Array :
1285
1293
import pyarrow
1286
1294
1287
1295
buffer = pyarrow .py_buffer (self .data_le )
0 commit comments