|
36 | 36 | has_dask,
|
37 | 37 | raise_if_dask_computes,
|
38 | 38 | requires_bottleneck,
|
| 39 | + requires_cupy, |
39 | 40 | requires_dask,
|
40 | 41 | requires_iris,
|
41 | 42 | requires_numbagg,
|
42 | 43 | requires_numexpr,
|
| 44 | + requires_pint_0_15, |
43 | 45 | requires_scipy,
|
44 | 46 | requires_sparse,
|
45 | 47 | source_ndarray,
|
@@ -7375,3 +7377,87 @@ def test_drop_duplicates(keep):
|
7375 | 7377 | expected = xr.DataArray(data, dims="time", coords={"time": time}, name="test")
|
7376 | 7378 | result = ds.drop_duplicates("time", keep=keep)
|
7377 | 7379 | assert_equal(expected, result)
|
| 7380 | + |
| 7381 | + |
| 7382 | +class TestNumpyCoercion: |
| 7383 | + # TODO once flexible indexes refactor complete also test coercion of dimension coords |
| 7384 | + def test_from_numpy(self): |
| 7385 | + da = xr.DataArray([1, 2, 3], dims="x", coords={"lat": ("x", [4, 5, 6])}) |
| 7386 | + |
| 7387 | + assert_identical(da.as_numpy(), da) |
| 7388 | + np.testing.assert_equal(da.to_numpy(), np.array([1, 2, 3])) |
| 7389 | + np.testing.assert_equal(da["lat"].to_numpy(), np.array([4, 5, 6])) |
| 7390 | + |
| 7391 | + @requires_dask |
| 7392 | + def test_from_dask(self): |
| 7393 | + da = xr.DataArray([1, 2, 3], dims="x", coords={"lat": ("x", [4, 5, 6])}) |
| 7394 | + da_chunked = da.chunk(1) |
| 7395 | + |
| 7396 | + assert_identical(da_chunked.as_numpy(), da.compute()) |
| 7397 | + np.testing.assert_equal(da.to_numpy(), np.array([1, 2, 3])) |
| 7398 | + np.testing.assert_equal(da["lat"].to_numpy(), np.array([4, 5, 6])) |
| 7399 | + |
| 7400 | + @requires_pint_0_15 |
| 7401 | + def test_from_pint(self): |
| 7402 | + from pint import Quantity |
| 7403 | + |
| 7404 | + arr = np.array([1, 2, 3]) |
| 7405 | + da = xr.DataArray( |
| 7406 | + Quantity(arr, units="Pa"), |
| 7407 | + dims="x", |
| 7408 | + coords={"lat": ("x", Quantity(arr + 3, units="m"))}, |
| 7409 | + ) |
| 7410 | + |
| 7411 | + expected = xr.DataArray(arr, dims="x", coords={"lat": ("x", arr + 3)}) |
| 7412 | + assert_identical(da.as_numpy(), expected) |
| 7413 | + np.testing.assert_equal(da.to_numpy(), arr) |
| 7414 | + np.testing.assert_equal(da["lat"].to_numpy(), arr + 3) |
| 7415 | + |
| 7416 | + @requires_sparse |
| 7417 | + def test_from_sparse(self): |
| 7418 | + import sparse |
| 7419 | + |
| 7420 | + arr = np.diagflat([1, 2, 3]) |
| 7421 | + sparr = sparse.COO.from_numpy(arr) |
| 7422 | + da = xr.DataArray( |
| 7423 | + sparr, dims=["x", "y"], coords={"elev": (("x", "y"), sparr + 3)} |
| 7424 | + ) |
| 7425 | + |
| 7426 | + expected = xr.DataArray( |
| 7427 | + arr, dims=["x", "y"], coords={"elev": (("x", "y"), arr + 3)} |
| 7428 | + ) |
| 7429 | + assert_identical(da.as_numpy(), expected) |
| 7430 | + np.testing.assert_equal(da.to_numpy(), arr) |
| 7431 | + |
| 7432 | + @requires_cupy |
| 7433 | + def test_from_cupy(self): |
| 7434 | + import cupy as cp |
| 7435 | + |
| 7436 | + arr = np.array([1, 2, 3]) |
| 7437 | + da = xr.DataArray( |
| 7438 | + cp.array(arr), dims="x", coords={"lat": ("x", cp.array(arr + 3))} |
| 7439 | + ) |
| 7440 | + |
| 7441 | + expected = xr.DataArray(arr, dims="x", coords={"lat": ("x", arr + 3)}) |
| 7442 | + assert_identical(da.as_numpy(), expected) |
| 7443 | + np.testing.assert_equal(da.to_numpy(), arr) |
| 7444 | + |
| 7445 | + @requires_dask |
| 7446 | + @requires_pint_0_15 |
| 7447 | + def test_from_pint_wrapping_dask(self): |
| 7448 | + import dask |
| 7449 | + from pint import Quantity |
| 7450 | + |
| 7451 | + arr = np.array([1, 2, 3]) |
| 7452 | + d = dask.array.from_array(arr) |
| 7453 | + da = xr.DataArray( |
| 7454 | + Quantity(d, units="Pa"), |
| 7455 | + dims="x", |
| 7456 | + coords={"lat": ("x", Quantity(d, units="m") * 2)}, |
| 7457 | + ) |
| 7458 | + |
| 7459 | + result = da.as_numpy() |
| 7460 | + result.name = None # remove dask-assigned name |
| 7461 | + expected = xr.DataArray(arr, dims="x", coords={"lat": ("x", arr * 2)}) |
| 7462 | + assert_identical(result, expected) |
| 7463 | + np.testing.assert_equal(da.to_numpy(), arr) |
0 commit comments