From 885f91a9fd1cbd12f7123690f78bb00c02caf6fb Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Thu, 11 Aug 2022 10:27:02 -0500 Subject: [PATCH 01/19] dpnp_take_c uses SYCL kernel, no need to use no_sycl parameter in adapter The reason this crashed with CPU device and gave incorrect results on Windows was deeper. 1. Adapter call allocated USM-shared buffer and copies data into it 2. Kernel is submitted to work on USM-shared pointer 3. dpnp_take_c returns kernel submission even 4. Adapter class goes out of scope and frees USM allocation without making sure that the kernel that works on it has completed its execution 5. If kernel execution was in progress we got a crash on CPU, or incorrect result on GPU If kernel execution was complete it worked as expected. This change fixes the problem because it removes creation of unprotected USM-shared temporary. --- dpnp/backend/kernels/dpnp_krnl_indexing.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpnp/backend/kernels/dpnp_krnl_indexing.cpp b/dpnp/backend/kernels/dpnp_krnl_indexing.cpp index ef6fffb8fb6..d37e319b7e3 100644 --- a/dpnp/backend/kernels/dpnp_krnl_indexing.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_indexing.cpp @@ -901,7 +901,7 @@ DPCTLSyclEventRef dpnp_take_c(DPCTLSyclQueueRef q_ref, DPCTLSyclEventRef event_ref = nullptr; sycl::queue q = *(reinterpret_cast(q_ref)); - DPNPC_ptr_adapter<_DataType> input1_ptr(q_ref, array1_in, array1_size, true); + DPNPC_ptr_adapter<_DataType> input1_ptr(q_ref, array1_in, array1_size); DPNPC_ptr_adapter<_IndecesType> input2_ptr(q_ref, indices1, size); _DataType* array_1 = input1_ptr.get_ptr(); _IndecesType* indices = input2_ptr.get_ptr(); From 7220d872ee78929cad120f45da5f74e5d6feb4cd Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Thu, 11 Aug 2022 10:31:12 -0500 Subject: [PATCH 02/19] Change to DPNPC_adapter to set/use events upon which deallocation must depend The deallocation routine simply calls sycl::event::wait on the stored vector. --- dpnp/backend/src/dpnpc_memory_adapter.hpp | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/dpnp/backend/src/dpnpc_memory_adapter.hpp b/dpnp/backend/src/dpnpc_memory_adapter.hpp index 19b8df3bea7..7b04b42a756 100644 --- a/dpnp/backend/src/dpnpc_memory_adapter.hpp +++ b/dpnp/backend/src/dpnpc_memory_adapter.hpp @@ -52,6 +52,7 @@ class DPNPC_ptr_adapter final bool target_no_queue = false; /**< Indicates that original memory will be accessed from non SYCL environment */ bool copy_back = false; /**< If the memory is 'result' it needs to be copied back to original */ const bool verbose = false; + std::vector deps{}; public: DPNPC_ptr_adapter() = delete; @@ -68,6 +69,7 @@ class DPNPC_ptr_adapter final copy_back = copy_back_request; orig_ptr = const_cast(src_ptr); size_in_bytes = size * sizeof(_DataType); + deps = std::vector{}; // enum class alloc { host = 0, device = 1, shared = 2, unknown = 3 }; sycl::usm::alloc src_ptr_type = sycl::usm::alloc::unknown; @@ -117,6 +119,8 @@ class DPNPC_ptr_adapter final std::cerr << "DPNPC_ptr_converter::free_memory at=" << aux_ptr << std::endl; } + sycl::event::wait(deps); + if (copy_back) { copy_data_back(); @@ -158,6 +162,15 @@ class DPNPC_ptr_adapter final dpnp_memory_memcpy_c(queue_ref, orig_ptr, aux_ptr, size_in_bytes); } + + void depends_on(const std::vector &new_deps) { + deps.insert(std::end(deps), std::begin(new_deps), std::end(new_deps)); + } + + void depends_on(const sycl::event &new_dep) { + deps.push_back(new_dep); + } + }; #endif // DPNP_MEMORY_ADAPTER_H From 37386bb5802c5f3350cfca9059dd93d0ff231a26 Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Sun, 14 Aug 2022 07:08:37 -0500 Subject: [PATCH 03/19] Used DPNPC_ptr_adapter::depends_on Also applied DPCTLEvent_Delete in legacy interfaces to avoid memory leak. --- .../kernels/dpnp_krnl_arraycreation.cpp | 47 ++++++++++++------- dpnp/backend/kernels/dpnp_krnl_bitwise.cpp | 24 ++++++---- dpnp/backend/kernels/dpnp_krnl_elemwise.cpp | 23 +++++++++ .../kernels/dpnp_krnl_mathematical.cpp | 8 ++++ dpnp/backend/kernels/dpnp_krnl_reduction.cpp | 2 + dpnp/backend/kernels/dpnp_krnl_sorting.cpp | 4 ++ dpnp/backend/kernels/dpnp_krnl_statistics.cpp | 34 +++++++++----- 7 files changed, 105 insertions(+), 37 deletions(-) diff --git a/dpnp/backend/kernels/dpnp_krnl_arraycreation.cpp b/dpnp/backend/kernels/dpnp_krnl_arraycreation.cpp index 302289220f8..3b7f362d51a 100644 --- a/dpnp/backend/kernels/dpnp_krnl_arraycreation.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_arraycreation.cpp @@ -493,6 +493,9 @@ DPCTLSyclEventRef dpnp_ptp_c(DPCTLSyclQueueRef q_ref, (void)dep_event_vec_ref; DPCTLSyclEventRef event_ref = nullptr; + DPCTLSyclEventRef e1_ref = nullptr; + DPCTLSyclEventRef e2_ref = nullptr; + DPCTLSyclEventRef e3_ref = nullptr; if ((input1_in == nullptr) || (result1_out == nullptr)) { @@ -514,29 +517,36 @@ DPCTLSyclEventRef dpnp_ptp_c(DPCTLSyclQueueRef q_ref, _DataType* min_arr = reinterpret_cast<_DataType*>(sycl::malloc_shared(result_size * sizeof(_DataType), q)); _DataType* max_arr = reinterpret_cast<_DataType*>(sycl::malloc_shared(result_size * sizeof(_DataType), q)); - dpnp_min_c<_DataType>(arr, min_arr, result_size, input_shape, input_ndim, axis, naxis); - dpnp_max_c<_DataType>(arr, max_arr, result_size, input_shape, input_ndim, axis, naxis); + e1_ref = dpnp_min_c<_DataType>(q_ref, arr, min_arr, result_size, input_shape, input_ndim, axis, naxis, NULL); + e2_ref = dpnp_max_c<_DataType>(q_ref, arr, max_arr, result_size, input_shape, input_ndim, axis, naxis, NULL); shape_elem_type* _strides = reinterpret_cast(sycl::malloc_shared(result_ndim * sizeof(shape_elem_type), q)); get_shape_offsets_inkernel(result_shape, result_ndim, _strides); - dpnp_subtract_c<_DataType, _DataType, _DataType>(result, - result_size, - result_ndim, - result_shape, - result_strides, - max_arr, - result_size, - result_ndim, - result_shape, - _strides, - min_arr, - result_size, - result_ndim, - result_shape, - _strides, - NULL); + e3_ref = dpnp_subtract_c<_DataType, _DataType, _DataType>(q_ref, result, + result_size, + result_ndim, + result_shape, + result_strides, + max_arr, + result_size, + result_ndim, + result_shape, + _strides, + min_arr, + result_size, + result_ndim, + result_shape, + _strides, + NULL, NULL); + + DPCTLEvent_Wait(e1_ref); + DPCTLEvent_Wait(e2_ref); + DPCTLEvent_Wait(e3_ref); + DPCTLEvent_Delete(e1_ref); + DPCTLEvent_Delete(e2_ref); + DPCTLEvent_Delete(e3_ref); sycl::free(min_arr, q); sycl::free(max_arr, q); @@ -576,6 +586,7 @@ void dpnp_ptp_c(void* result1_out, naxis, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template diff --git a/dpnp/backend/kernels/dpnp_krnl_bitwise.cpp b/dpnp/backend/kernels/dpnp_krnl_bitwise.cpp index 4d0f6498ed0..b64670be4e0 100644 --- a/dpnp/backend/kernels/dpnp_krnl_bitwise.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_bitwise.cpp @@ -148,16 +148,16 @@ static void func_map_init_bitwise_1arg_1type(func_map_t& fmap) \ sycl::queue q = *(reinterpret_cast(q_ref)); \ \ - DPNPC_ptr_adapter<_DataType> input1_ptr(q_ref, input1_in, input1_size); \ - DPNPC_ptr_adapter input1_shape_ptr(q_ref, input1_shape, input1_ndim, true); \ - DPNPC_ptr_adapter input1_strides_ptr(q_ref, input1_strides, input1_ndim, true); \ + DPNPC_ptr_adapter<_DataType> input1_ptr(q_ref, input1_in, input1_size); \ + DPNPC_ptr_adapter input1_shape_ptr(q_ref, input1_shape, input1_ndim, true); \ + DPNPC_ptr_adapter input1_strides_ptr(q_ref, input1_strides, input1_ndim, true); \ \ - DPNPC_ptr_adapter<_DataType> input2_ptr(q_ref, input2_in, input2_size); \ - DPNPC_ptr_adapter input2_shape_ptr(q_ref, input2_shape, input2_ndim, true); \ - DPNPC_ptr_adapter input2_strides_ptr(q_ref, input2_strides, input2_ndim, true); \ + DPNPC_ptr_adapter<_DataType> input2_ptr(q_ref, input2_in, input2_size); \ + DPNPC_ptr_adapter input2_shape_ptr(q_ref, input2_shape, input2_ndim, true); \ + DPNPC_ptr_adapter input2_strides_ptr(q_ref, input2_strides, input2_ndim, true); \ \ - DPNPC_ptr_adapter<_DataType> result_ptr(q_ref, result_out, result_size, false, true); \ - DPNPC_ptr_adapter result_strides_ptr(q_ref, result_strides, result_ndim); \ + DPNPC_ptr_adapter<_DataType> result_ptr(q_ref, result_out, result_size, false, true); \ + DPNPC_ptr_adapter result_strides_ptr(q_ref, result_strides, result_ndim); \ \ _DataType* input1_data = input1_ptr.get_ptr(); \ shape_elem_type* input1_shape_data = input1_shape_ptr.get_ptr(); \ @@ -226,6 +226,14 @@ static void func_map_init_bitwise_1arg_1type(func_map_t& fmap) }; \ event = q.submit(kernel_func); \ } \ + input1_ptr.depends_on(event); \ + input1_shape_ptr.depends_on(event); \ + input1_strides_ptr.depends_on(event); \ + input2_ptr.depends_on(event); \ + input2_shape_ptr.depends_on(event); \ + input2_strides_ptr.depends_on(event); \ + result_ptr.depends_on(event); \ + result_strides_ptr.depends_on(event); \ event_ref = reinterpret_cast(&event); \ \ return DPCTLEvent_Copy(event_ref); \ diff --git a/dpnp/backend/kernels/dpnp_krnl_elemwise.cpp b/dpnp/backend/kernels/dpnp_krnl_elemwise.cpp index 98aff7eac75..63b6195e788 100644 --- a/dpnp/backend/kernels/dpnp_krnl_elemwise.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_elemwise.cpp @@ -143,6 +143,12 @@ } \ } \ \ + input1_ptr.depends_on(event); \ + input1_shape_ptr.depends_on(event); \ + input1_strides_ptr.depends_on(event); \ + result_ptr.depends_on(event); \ + result_strides_ptr.depends_on(event); \ + \ event_ref = reinterpret_cast(&event); \ \ return DPCTLEvent_Copy(event_ref); \ @@ -644,6 +650,12 @@ static void func_map_init_elemwise_1arg_2type(func_map_t& fmap) } \ } \ \ + input1_ptr.depends_on(event); \ + input1_shape_ptr.depends_on(event); \ + input1_strides_ptr.depends_on(event); \ + result_ptr.depends_on(event); \ + result_strides_ptr.depends_on(event); \ + \ event_ref = reinterpret_cast(&event); \ \ return DPCTLEvent_Copy(event_ref); \ @@ -998,6 +1010,17 @@ static void func_map_init_elemwise_1arg_1type(func_map_t& fmap) event = q.submit(kernel_func); \ } \ } \ + \ + input1_ptr.depends_on(event); \ + input1_shape_ptr.depends_on(event); \ + input1_strides_ptr.depends_on(event); \ + input2_ptr.depends_on(event); \ + input2_shape_ptr.depends_on(event); \ + input2_strides_ptr.depends_on(event); \ + result_ptr.depends_on(event); \ + result_shape_ptr.depends_on(event); \ + result_strides_ptr.depends_on(event); \ + \ event_ref = reinterpret_cast(&event); \ \ return DPCTLEvent_Copy(event_ref); \ diff --git a/dpnp/backend/kernels/dpnp_krnl_mathematical.cpp b/dpnp/backend/kernels/dpnp_krnl_mathematical.cpp index dc2493094a2..32f8ffe465d 100644 --- a/dpnp/backend/kernels/dpnp_krnl_mathematical.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_mathematical.cpp @@ -170,6 +170,8 @@ DPCTLSyclEventRef dpnp_elemwise_absolute_c(DPCTLSyclQueueRef q_ref, event = q.submit(kernel_func); } + input1_ptr.depends_on(event); + result1_ptr.depends_on(event); event_ref = reinterpret_cast(&event); return DPCTLEvent_Copy(event_ref); @@ -483,6 +485,8 @@ DPCTLSyclEventRef dpnp_ediff1d_c(DPCTLSyclQueueRef q_ref, }; event = q.submit(kernel_func); + input1_ptr.depends_on(event); + result_ptr.depends_on(event); event_ref = reinterpret_cast(&event); return DPCTLEvent_Copy(event_ref); @@ -676,6 +680,7 @@ void dpnp_floor_divide_c(void* result_out, where, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -770,6 +775,7 @@ void dpnp_modf_c(void* array1_in, void* result1_out, void* result2_out, size_t s size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -911,6 +917,7 @@ void dpnp_remainder_c(void* result_out, where, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -1041,6 +1048,7 @@ void dpnp_trapz_c( array2_size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template diff --git a/dpnp/backend/kernels/dpnp_krnl_reduction.cpp b/dpnp/backend/kernels/dpnp_krnl_reduction.cpp index acda35db3a3..e03aabee5ae 100644 --- a/dpnp/backend/kernels/dpnp_krnl_reduction.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_reduction.cpp @@ -162,6 +162,7 @@ void dpnp_sum_c(void* result_out, where, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -278,6 +279,7 @@ void dpnp_prod_c(void* result_out, where, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template diff --git a/dpnp/backend/kernels/dpnp_krnl_sorting.cpp b/dpnp/backend/kernels/dpnp_krnl_sorting.cpp index 84e919954d4..614bb94f070 100644 --- a/dpnp/backend/kernels/dpnp_krnl_sorting.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_sorting.cpp @@ -91,6 +91,7 @@ void dpnp_argsort_c(void* array1_in, void* result1, size_t size) size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -242,6 +243,7 @@ void dpnp_partition_c( ndim, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -394,6 +396,7 @@ void dpnp_searchsorted_c( v_size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -459,6 +462,7 @@ void dpnp_sort_c(void* array1_in, void* result1, size_t size) size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template diff --git a/dpnp/backend/kernels/dpnp_krnl_statistics.cpp b/dpnp/backend/kernels/dpnp_krnl_statistics.cpp index 0617529b81b..abf77ff25ee 100644 --- a/dpnp/backend/kernels/dpnp_krnl_statistics.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_statistics.cpp @@ -101,6 +101,7 @@ void dpnp_correlate_c(void* result_out, where, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -298,6 +299,7 @@ void dpnp_count_nonzero_c(void* array1_in, void* result1_out, size_t size) size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -539,6 +541,7 @@ void dpnp_max_c(void* array1_in, naxis, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -636,6 +639,7 @@ void dpnp_mean_c(void* array1_in, naxis, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -721,6 +725,7 @@ void dpnp_median_c(void* array1_in, naxis, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -963,6 +968,7 @@ void dpnp_min_c(void* array1_in, naxis, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -1044,6 +1050,7 @@ void dpnp_nanvar_c(void* array1_in, void* mask_arr1, void* result1, const size_t arr_size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -1100,17 +1107,20 @@ DPCTLSyclEventRef dpnp_std_c(DPCTLSyclQueueRef q_ref, q)); *var_strides = 1; - dpnp_sqrt_c<_ResultType, _ResultType>(result1, - result1_size, - result1_ndim, - result1_shape, - result1_strides, - var, - var_size, - var_ndim, - var_shape, - var_strides, - NULL); + DPCTLSyclEventRef e_sqrt_ref = + dpnp_sqrt_c<_ResultType, _ResultType>(q_ref, result1, + result1_size, + result1_ndim, + result1_shape, + result1_strides, + var, + var_size, + var_ndim, + var_shape, + var_strides, + NULL, NULL); + DPCTLEvent_WaitAndThrow(e_sqrt_ref); + DPCTLEvent_Delete(e_sqrt_ref); sycl::free(var, q); sycl::free(result1_shape, q); @@ -1142,6 +1152,7 @@ void dpnp_std_c(void* array1_in, ddof, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -1253,6 +1264,7 @@ void dpnp_var_c(void* array1_in, ddof, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template From 16a7632b87faba7257d6cb1b2e7bca2df6c4196b Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Wed, 17 Aug 2022 13:04:52 +0200 Subject: [PATCH 04/19] Get rid of "Improper Null Termination" issue Add a null-terminated symbol at the end of char array to avoid "Improper Null Termination" issue reported by Checkmarx scan. --- dpnp/backend/kernels/dpnp_krnl_random.cpp | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/dpnp/backend/kernels/dpnp_krnl_random.cpp b/dpnp/backend/kernels/dpnp_krnl_random.cpp index afc5df8187d..47d10467747 100644 --- a/dpnp/backend/kernels/dpnp_krnl_random.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_random.cpp @@ -1674,14 +1674,17 @@ DPCTLSyclEventRef dpnp_rng_shuffle_c(DPCTLSyclQueueRef q_ref, // Fast, statically typed path: shuffle the underlying buffer. // Only for non-empty, 1d objects of class ndarray (subclasses such // as MaskedArrays may not support this approach). - char* buf = reinterpret_cast(sycl::malloc_shared(itemsize * sizeof(char), q)); + char* buf = reinterpret_cast(sycl::malloc_shared((itemsize + 1) * sizeof(char), q)); for (size_t i = uvec_size; i > 0; i--) { size_t j = (size_t)(floor((i + 1) * Uvec[i - 1])); if (i != j) { - auto memcpy1 = - q.submit([&](sycl::handler& h) { h.memcpy(buf, result1 + j * itemsize, itemsize); }); + auto memcpy1 = q.submit([&](sycl::handler& h) { + h.memcpy(buf, result1 + j * itemsize, itemsize); + // Make as null-terminated buffer to resolve CheckMarx's false positive issue + buf[itemsize] = '\0'; + }); auto memcpy2 = q.submit([&](sycl::handler& h) { h.depends_on({memcpy1}); h.memcpy(result1 + j * itemsize, result1 + i * itemsize, itemsize); @@ -1699,14 +1702,17 @@ DPCTLSyclEventRef dpnp_rng_shuffle_c(DPCTLSyclQueueRef q_ref, { // Multidimensional ndarrays require a bounce buffer. size_t step_size = (size / high_dim_size) * itemsize; // size in bytes for x[i] element - char* buf = reinterpret_cast(sycl::malloc_shared(step_size * sizeof(char), q)); + char* buf = reinterpret_cast(sycl::malloc_shared((step_size + 1) * sizeof(char), q)); for (size_t i = uvec_size; i > 0; i--) { size_t j = (size_t)(floor((i + 1) * Uvec[i - 1])); if (j < i) { - auto memcpy1 = - q.submit([&](sycl::handler& h) { h.memcpy(buf, result1 + j * step_size, step_size); }); + auto memcpy1 = q.submit([&](sycl::handler& h) { + h.memcpy(buf, result1 + j * step_size, step_size); + // Make as null-terminated buffer to resolve CheckMarx's false positive issue + buf[step_size] = '\0'; + }); auto memcpy2 = q.submit([&](sycl::handler& h) { h.depends_on({memcpy1}); h.memcpy(result1 + j * step_size, result1 + i * step_size, step_size); From d839ea1f9378229e6564a9ff6cc19017221be725 Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Wed, 17 Aug 2022 13:32:08 -0500 Subject: [PATCH 05/19] implemented PR feedback --- dpnp/backend/src/dpnpc_memory_adapter.hpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dpnp/backend/src/dpnpc_memory_adapter.hpp b/dpnp/backend/src/dpnpc_memory_adapter.hpp index 7b04b42a756..3b07795ed5f 100644 --- a/dpnp/backend/src/dpnpc_memory_adapter.hpp +++ b/dpnp/backend/src/dpnpc_memory_adapter.hpp @@ -52,7 +52,7 @@ class DPNPC_ptr_adapter final bool target_no_queue = false; /**< Indicates that original memory will be accessed from non SYCL environment */ bool copy_back = false; /**< If the memory is 'result' it needs to be copied back to original */ const bool verbose = false; - std::vector deps{}; + std::vector deps; public: DPNPC_ptr_adapter() = delete; @@ -164,10 +164,12 @@ class DPNPC_ptr_adapter final } void depends_on(const std::vector &new_deps) { + assert(allocated); deps.insert(std::end(deps), std::begin(new_deps), std::end(new_deps)); } void depends_on(const sycl::event &new_dep) { + assert(allocated); deps.push_back(new_dep); } From 818dc82991642a621d10213ee573dde8a35310d4 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 18 Aug 2022 14:48:14 -0500 Subject: [PATCH 06/19] Reworked solution with a pointer on void --- dpnp/backend/kernels/dpnp_krnl_random.cpp | 38 +++++------------------ 1 file changed, 8 insertions(+), 30 deletions(-) diff --git a/dpnp/backend/kernels/dpnp_krnl_random.cpp b/dpnp/backend/kernels/dpnp_krnl_random.cpp index 47d10467747..5e3f9531b6c 100644 --- a/dpnp/backend/kernels/dpnp_krnl_random.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_random.cpp @@ -1674,26 +1674,15 @@ DPCTLSyclEventRef dpnp_rng_shuffle_c(DPCTLSyclQueueRef q_ref, // Fast, statically typed path: shuffle the underlying buffer. // Only for non-empty, 1d objects of class ndarray (subclasses such // as MaskedArrays may not support this approach). - char* buf = reinterpret_cast(sycl::malloc_shared((itemsize + 1) * sizeof(char), q)); + void* buf = sycl::malloc_device(itemsize, q); for (size_t i = uvec_size; i > 0; i--) { size_t j = (size_t)(floor((i + 1) * Uvec[i - 1])); if (i != j) { - auto memcpy1 = q.submit([&](sycl::handler& h) { - h.memcpy(buf, result1 + j * itemsize, itemsize); - // Make as null-terminated buffer to resolve CheckMarx's false positive issue - buf[itemsize] = '\0'; - }); - auto memcpy2 = q.submit([&](sycl::handler& h) { - h.depends_on({memcpy1}); - h.memcpy(result1 + j * itemsize, result1 + i * itemsize, itemsize); - }); - auto memcpy3 = q.submit([&](sycl::handler& h) { - h.depends_on({memcpy2}); - h.memcpy(result1 + i * itemsize, buf, itemsize); - }); - memcpy3.wait(); + auto memcpy1 = q.memcpy(buf, result1 + j * itemsize, itemsize); + auto memcpy2 = q.memcpy(result1 + j * itemsize, result1 + i * itemsize, itemsize, memcpy1); + q.memcpy(result1 + i * itemsize, buf, itemsize, memcpy2).wait(); } } sycl::free(buf, q); @@ -1702,26 +1691,15 @@ DPCTLSyclEventRef dpnp_rng_shuffle_c(DPCTLSyclQueueRef q_ref, { // Multidimensional ndarrays require a bounce buffer. size_t step_size = (size / high_dim_size) * itemsize; // size in bytes for x[i] element - char* buf = reinterpret_cast(sycl::malloc_shared((step_size + 1) * sizeof(char), q)); + void* buf = sycl::malloc_device(step_size, q); for (size_t i = uvec_size; i > 0; i--) { size_t j = (size_t)(floor((i + 1) * Uvec[i - 1])); if (j < i) { - auto memcpy1 = q.submit([&](sycl::handler& h) { - h.memcpy(buf, result1 + j * step_size, step_size); - // Make as null-terminated buffer to resolve CheckMarx's false positive issue - buf[step_size] = '\0'; - }); - auto memcpy2 = q.submit([&](sycl::handler& h) { - h.depends_on({memcpy1}); - h.memcpy(result1 + j * step_size, result1 + i * step_size, step_size); - }); - auto memcpy3 = q.submit([&](sycl::handler& h) { - h.depends_on({memcpy2}); - h.memcpy(result1 + i * step_size, buf, step_size); - }); - memcpy3.wait(); + auto memcpy1 = q.memcpy(buf, result1 + j * step_size, step_size); + auto memcpy2 = q.memcpy(result1 + j * step_size, result1 + i * step_size, step_size, memcpy1); + q.memcpy(result1 + i * step_size, buf, step_size, memcpy2).wait(); } } sycl::free(buf, q); From b29d95782706d9056b2962de9e104046dc19721a Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Thu, 18 Aug 2022 22:06:54 +0200 Subject: [PATCH 07/19] Update dpnp/backend/kernels/dpnp_krnl_random.cpp Co-authored-by: Oleksandr Pavlyk --- dpnp/backend/kernels/dpnp_krnl_random.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpnp/backend/kernels/dpnp_krnl_random.cpp b/dpnp/backend/kernels/dpnp_krnl_random.cpp index 5e3f9531b6c..7b115351c14 100644 --- a/dpnp/backend/kernels/dpnp_krnl_random.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_random.cpp @@ -1674,7 +1674,7 @@ DPCTLSyclEventRef dpnp_rng_shuffle_c(DPCTLSyclQueueRef q_ref, // Fast, statically typed path: shuffle the underlying buffer. // Only for non-empty, 1d objects of class ndarray (subclasses such // as MaskedArrays may not support this approach). - void* buf = sycl::malloc_device(itemsize, q); + void* buf = sycl::malloc_device(itemsize, q); for (size_t i = uvec_size; i > 0; i--) { size_t j = (size_t)(floor((i + 1) * Uvec[i - 1])); From 629d0e0367bc4851a292021eae7a115c46223b06 Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Thu, 18 Aug 2022 22:07:07 +0200 Subject: [PATCH 08/19] Update dpnp/backend/kernels/dpnp_krnl_random.cpp Co-authored-by: Oleksandr Pavlyk --- dpnp/backend/kernels/dpnp_krnl_random.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dpnp/backend/kernels/dpnp_krnl_random.cpp b/dpnp/backend/kernels/dpnp_krnl_random.cpp index 7b115351c14..53207e67ff3 100644 --- a/dpnp/backend/kernels/dpnp_krnl_random.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_random.cpp @@ -1691,7 +1691,7 @@ DPCTLSyclEventRef dpnp_rng_shuffle_c(DPCTLSyclQueueRef q_ref, { // Multidimensional ndarrays require a bounce buffer. size_t step_size = (size / high_dim_size) * itemsize; // size in bytes for x[i] element - void* buf = sycl::malloc_device(step_size, q); + void* buf = sycl::malloc_device(step_size, q); for (size_t i = uvec_size; i > 0; i--) { size_t j = (size_t)(floor((i + 1) * Uvec[i - 1])); From 2fed06a9210c9251fe3e04a424d3f217cf9e0d86 Mon Sep 17 00:00:00 2001 From: Alexander Rybkin Date: Wed, 24 Aug 2022 15:14:31 +0200 Subject: [PATCH 09/19] Skip for two more tests till waiting fix (#1171) * Skip for two more tests till waiting fix tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_ptp_all_nan tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_ptp_nan Need to skip them because CI does not work due to this. * The same tests skip for gpu --- tests/skipped_tests.tbl | 2 ++ tests/skipped_tests_gpu.tbl | 2 ++ 2 files changed, 4 insertions(+) diff --git a/tests/skipped_tests.tbl b/tests/skipped_tests.tbl index f9863951f5b..bbf3c1c3b53 100644 --- a/tests/skipped_tests.tbl +++ b/tests/skipped_tests.tbl @@ -1377,3 +1377,5 @@ tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentil tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_uxpected_interpolation +tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_ptp_all_nan +tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_ptp_nan diff --git a/tests/skipped_tests_gpu.tbl b/tests/skipped_tests_gpu.tbl index 5c823c299d5..6a4bd6e6bb8 100644 --- a/tests/skipped_tests_gpu.tbl +++ b/tests/skipped_tests_gpu.tbl @@ -1804,3 +1804,5 @@ tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentil tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_uxpected_interpolation +tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_ptp_all_nan +tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_ptp_nan From 9b14f0ca76a9e0c309bb97b4d5caa0870eecd6bb Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Tue, 30 Aug 2022 14:35:28 +0200 Subject: [PATCH 10/19] dpnp_take failed on Windows due to memory corruption (#1172) * dpnp_take failed on Windows due to memory corruption * Add more tests * Integer indexes types with different types of input data * Add trailing empty line to .gitignore --- .gitignore | 10 ++++- dpnp/backend/kernels/dpnp_krnl_indexing.cpp | 49 +++++++++++++-------- tests/skipped_tests_gpu.tbl | 13 ------ tests/test_indexing.py | 12 +++-- 4 files changed, 49 insertions(+), 35 deletions(-) diff --git a/.gitignore b/.gitignore index 2ac17b1752b..ea56758f290 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,15 @@ +# CMake build and local install directory build build_cython + +# Byte-compiled / optimized / DLL files __pycache__/ + +# Code project files +.vscode + *dpnp_backend* dpnp/**/*.cpython*.so dpnp/**/*.pyd -*~ \ No newline at end of file +*~ +core diff --git a/dpnp/backend/kernels/dpnp_krnl_indexing.cpp b/dpnp/backend/kernels/dpnp_krnl_indexing.cpp index d37e319b7e3..5cde013b69f 100644 --- a/dpnp/backend/kernels/dpnp_krnl_indexing.cpp +++ b/dpnp/backend/kernels/dpnp_krnl_indexing.cpp @@ -901,10 +901,8 @@ DPCTLSyclEventRef dpnp_take_c(DPCTLSyclQueueRef q_ref, DPCTLSyclEventRef event_ref = nullptr; sycl::queue q = *(reinterpret_cast(q_ref)); - DPNPC_ptr_adapter<_DataType> input1_ptr(q_ref, array1_in, array1_size); - DPNPC_ptr_adapter<_IndecesType> input2_ptr(q_ref, indices1, size); - _DataType* array_1 = input1_ptr.get_ptr(); - _IndecesType* indices = input2_ptr.get_ptr(); + _DataType* array_1 = reinterpret_cast<_DataType*>(array1_in); + _IndecesType* indices = reinterpret_cast<_IndecesType*>(indices1); _DataType* result = reinterpret_cast<_DataType*>(result1); sycl::range<1> gws(size); @@ -920,7 +918,6 @@ DPCTLSyclEventRef dpnp_take_c(DPCTLSyclQueueRef q_ref, sycl::event event = q.submit(kernel_func); event_ref = reinterpret_cast(&event); - return DPCTLEvent_Copy(event_ref); } @@ -937,6 +934,7 @@ void dpnp_take_c(void* array1_in, const size_t array1_size, void* indices1, void size, dep_event_vec_ref); DPCTLEvent_WaitAndThrow(event_ref); + DPCTLEvent_Delete(event_ref); } template @@ -1073,21 +1071,36 @@ void func_map_init_indexing_func(func_map_t& fmap) fmap[DPNPFuncName::DPNP_FN_PUT_ALONG_AXIS_EXT][eft_DBL][eft_DBL] = {eft_DBL, (void*)dpnp_put_along_axis_ext_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE][eft_BLN][eft_BLN] = {eft_BLN, (void*)dpnp_take_default_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE][eft_INT][eft_INT] = {eft_INT, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_BLN][eft_INT] = {eft_BLN, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_INT][eft_INT] = {eft_INT, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_LNG][eft_INT] = {eft_LNG, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_FLT][eft_INT] = {eft_FLT, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_DBL][eft_INT] = {eft_DBL, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_C128][eft_INT] = {eft_C128, + (void*)dpnp_take_default_c, int32_t>}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_BLN][eft_LNG] = {eft_BLN, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_INT][eft_LNG] = {eft_INT, (void*)dpnp_take_default_c}; fmap[DPNPFuncName::DPNP_FN_TAKE][eft_LNG][eft_LNG] = {eft_LNG, (void*)dpnp_take_default_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE][eft_FLT][eft_FLT] = {eft_FLT, (void*)dpnp_take_default_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE][eft_DBL][eft_DBL] = {eft_DBL, (void*)dpnp_take_default_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE][eft_C128][eft_C128] = {eft_C128, - (void*)dpnp_take_default_c, int64_t>}; - - fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_BLN][eft_BLN] = {eft_BLN, (void*)dpnp_take_ext_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_INT][eft_INT] = {eft_INT, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_FLT][eft_LNG] = {eft_FLT, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_DBL][eft_LNG] = {eft_DBL, (void*)dpnp_take_default_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE][eft_C128][eft_LNG] = {eft_C128, + (void*)dpnp_take_default_c, int64_t>}; + + // TODO: add a handling of other indexes types once DPCtl implementation of data copy is ready + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_BLN][eft_INT] = {eft_BLN, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_INT][eft_INT] = {eft_INT, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_LNG][eft_INT] = {eft_LNG, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_FLT][eft_INT] = {eft_FLT, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_DBL][eft_INT] = {eft_DBL, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_C128][eft_INT] = {eft_C128, + (void*)dpnp_take_ext_c, int32_t>}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_BLN][eft_LNG] = {eft_BLN, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_INT][eft_LNG] = {eft_INT, (void*)dpnp_take_ext_c}; fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_LNG][eft_LNG] = {eft_LNG, (void*)dpnp_take_ext_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_FLT][eft_FLT] = {eft_FLT, (void*)dpnp_take_ext_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_DBL][eft_DBL] = {eft_DBL, (void*)dpnp_take_ext_c}; - fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_C128][eft_C128] = {eft_C128, - (void*)dpnp_take_ext_c, int64_t>}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_FLT][eft_LNG] = {eft_FLT, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_DBL][eft_LNG] = {eft_DBL, (void*)dpnp_take_ext_c}; + fmap[DPNPFuncName::DPNP_FN_TAKE_EXT][eft_C128][eft_LNG] = {eft_C128, + (void*)dpnp_take_ext_c, int64_t>}; return; } diff --git a/tests/skipped_tests_gpu.tbl b/tests/skipped_tests_gpu.tbl index 6a4bd6e6bb8..d41fe24c3c7 100644 --- a/tests/skipped_tests_gpu.tbl +++ b/tests/skipped_tests_gpu.tbl @@ -138,19 +138,6 @@ tests/test_indexing.py::test_nonzero[[[0, 1, 2], [3, 0, 5], [6, 7, 0]]] tests/test_indexing.py::test_nonzero[[[0, 1, 0, 3, 0], [5, 0, 7, 0, 9]]] tests/test_indexing.py::test_nonzero[[[[1, 2], [0, 4]], [[0, 2], [0, 1]], [[0, 0], [3, 1]]]] tests/test_indexing.py::test_nonzero[[[[[1, 2, 3], [3, 4, 5]], [[1, 2, 3], [2, 1, 0]]], [[[1, 3, 5], [3, 1, 0]], [[0, 1, 2], [1, 3, 4]]]]] -tests/test_indexing.py::test_take[[[0, 1, 2], [3, 4, 5], [6, 7, 8]]-[[0, 0], [0, 0]]] -tests/test_indexing.py::test_take[[[0, 1, 2], [3, 4, 5], [6, 7, 8]]-[[1, 2], [1, 2]]] -tests/test_indexing.py::test_take[[[0, 1, 2], [3, 4, 5], [6, 7, 8]]-[[1, 2], [3, 4]]] -tests/test_indexing.py::test_take[[[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]-[[1, 2], [1, 2]]] -tests/test_indexing.py::test_take[[[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]-[[1, 2], [3, 4]]] -tests/test_indexing.py::test_take[[[[1, 2], [3, 4]], [[1, 2], [2, 1]], [[1, 3], [3, 1]]]-[[1, 2], [1, 2]]] -tests/test_indexing.py::test_take[[[[1, 2], [3, 4]], [[1, 2], [2, 1]], [[1, 3], [3, 1]]]-[[1, 2], [3, 4]]] -tests/test_indexing.py::test_take[[[[[1, 2], [3, 4]], [[1, 2], [2, 1]]], [[[1, 3], [3, 1]], [[0, 1], [1, 3]]]]-[[1, 2], [1, 2]]] -tests/test_indexing.py::test_take[[[[[1, 2], [3, 4]], [[1, 2], [2, 1]]], [[[1, 3], [3, 1]], [[0, 1], [1, 3]]]]-[[1, 2], [3, 4]]] -tests/test_indexing.py::test_take[[[[[1, 2, 3], [3, 4, 5]], [[1, 2, 3], [2, 1, 0]]], [[[1, 3, 5], [3, 1, 0]], [[0, 1, 2], [1, 3, 4]]]]-[[1, 2], [1, 2]]] -tests/test_indexing.py::test_take[[[[[1, 2, 3], [3, 4, 5]], [[1, 2, 3], [2, 1, 0]]], [[[1, 3, 5], [3, 1, 0]], [[0, 1, 2], [1, 3, 4]]]]-[[1, 2], [3, 4]]] -tests/test_indexing.py::test_take[[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]], [[[13, 14, 15], [16, 17, 18]], [[19, 20, 21], [22, 23, 24]]]]-[[1, 2], [1, 2]]] -tests/test_indexing.py::test_take[[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]], [[[13, 14, 15], [16, 17, 18]], [[19, 20, 21], [22, 23, 24]]]]-[[1, 2], [3, 4]]] tests/third_party/cupy/creation_tests/test_ranges.py::TestRanges::test_arange_no_dtype_int tests/third_party/cupy/indexing_tests/test_indexing.py::TestIndexing::test_take_no_axis tests/third_party/cupy/indexing_tests/test_insert.py::TestPlace_param_3_{n_vals=1, shape=(7,)}::test_place diff --git a/tests/test_indexing.py b/tests/test_indexing.py index c07beee0262..6519576171d 100644 --- a/tests/test_indexing.py +++ b/tests/test_indexing.py @@ -374,6 +374,12 @@ def test_select(): numpy.testing.assert_array_equal(expected, result) +@pytest.mark.parametrize("array_type", + [numpy.bool8, numpy.int32, numpy.int64, numpy.float32, numpy.float64, numpy.complex128], + ids=['bool8', 'int32', 'int64', 'float32', 'float64', 'complex128']) +@pytest.mark.parametrize("indices_type", + [numpy.int32, numpy.int64], + ids=['int32', 'int64']) @pytest.mark.parametrize("indices", [[[0, 0], [0, 0]], [[1, 2], [1, 2]], @@ -395,9 +401,9 @@ def test_select(): '[[[[1, 2], [3, 4]], [[1, 2], [2, 1]]], [[[1, 3], [3, 1]], [[0, 1], [1, 3]]]]', '[[[[1, 2, 3], [3, 4, 5]], [[1, 2, 3], [2, 1, 0]]], [[[1, 3, 5], [3, 1, 0]], [[0, 1, 2], [1, 3, 4]]]]', '[[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]], [[[13, 14, 15], [16, 17, 18]], [[19, 20, 21], [22, 23, 24]]]]']) -def test_take(array, indices): - a = numpy.array(array) - ind = numpy.array(indices) +def test_take(array, indices, array_type, indices_type): + a = numpy.array(array, dtype=array_type) + ind = numpy.array(indices, dtype=indices_type) ia = dpnp.array(a) iind = dpnp.array(ind) expected = numpy.take(a, ind) From c91f91282c6b0e0528445fd5b0cc4ce8e16946d3 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 8 Sep 2022 07:15:32 -0500 Subject: [PATCH 11/19] Add workflow for Win Fix typo Relax a strict pinning for numpy & cmake Update run command for conda build on Win Fix declaring DPLROOT env Fix DPLROOT source Fix DPLROOT for Win Add missing double quotes Try conda-incubator for Linux Setup conda-incubator for Linux Update caching Exclude python 3.8 Strickly pin on 3.8.13 Change channel order Fix artifcat uploading Replace to single quotes Add missing backslash Corect backslash --- .github/workflows/conda-package.yml | 178 ++++++++++++++++++++++++---- conda-recipe/bld.bat | 1 + conda-recipe/meta.yaml | 8 +- scripts/build_conda_package.sh | 2 +- utils/dpnp_build_utils.py | 2 +- 5 files changed, 164 insertions(+), 27 deletions(-) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index e401cf78c83..00e05f687ea 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -1,20 +1,35 @@ name: Conda package -on: push +on: + push: + branches: + - master + pull_request: env: - PACKAGE_NAME: dpctl + PACKAGE_NAME: dpnp + MODULE_NAME: dpnp + VER_SCRIPT1: "import json; f = open('ver.json', 'r'); j = json.load(f); f.close(); " + VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))" jobs: - build: - runs-on: ubuntu-20.04 + build_linux: + runs-on: ubuntu-latest + + defaults: + run: + shell: bash -l {0} strategy: matrix: - python: [3.8, 3.9] + python: ['3.8', '3.9'] + + env: + conda-pkgs: '/home/runner/conda_pkgs_dir/' + conda-bld: '/usr/share/miniconda3/envs/build/conda-bld/linux-64/' steps: - - name: Checkout repo + - name: Checkout DPNP repo uses: actions/checkout@v3 with: fetch-depth: 0 @@ -24,44 +39,165 @@ jobs: with: repository: oneapi-src/oneDPL path: oneDPL - ref: oneDPL-2021.6.1-release + ref: oneDPL-2021.7.0-release + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'build' + use-only-tar-bz2: true + + - name: Cache conda packages + uses: actions/cache@v3 + env: + CACHE_NUMBER: 1 # Increase to reset cache + with: + path: ${{ env.conda-pkgs }} + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- - - name: Add conda to system path - run: echo $CONDA/bin >> $GITHUB_PATH - name: Install conda-build run: conda install conda-build + - name: Build conda package run: ./scripts/build_conda_package.sh ${{ matrix.python }} $GITHUB_WORKSPACE/oneDPL - name: Upload artifact uses: actions/upload-artifact@v2 with: - name: dpnp ${{ runner.os }} ${{ matrix.python }} - path: /usr/share/miniconda/conda-bld/linux-64/dpnp-*.tar.bz2 + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2 + + build_windows: + runs-on: windows-latest + + defaults: + run: + shell: cmd /C CALL {0} + + strategy: + matrix: + python: ['3.8', '3.9'] + + env: + conda-pkgs: 'C:\Users\runneradmin\conda_pkgs_dir\' + conda-bld: 'C:\Miniconda3\envs\build\conda-bld\win-64\' + + steps: + - name: Checkout DPNP repo + uses: actions/checkout@v3 + with: + fetch-depth: 0 + + - name: Checkout oneDPL + uses: actions/checkout@v3 + with: + repository: oneapi-src/oneDPL + path: oneDPL + ref: oneDPL-2021.7.0-release + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'build' + use-only-tar-bz2: true + + - name: Cache conda packages + uses: actions/cache@v3 + env: + CACHE_NUMBER: 1 # Increase to reset cache + with: + path: ${{ env.conda-pkgs }} + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('**/meta.yaml') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Install conda-build + run: conda install conda-build + + - name: Build conda package + run: conda build --no-test --python ${{ matrix.python }} -c dppy/label/dev -c intel -c defaults --override-channels conda-recipe + env: + DPLROOT: '%GITHUB_WORKSPACE%\oneDPL' + + - name: Upload artifact + uses: actions/upload-artifact@v2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2 + + upload_linux: + needs: build_linux + + if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} - upload: - needs: build - if: ${{ github.ref == 'refs/heads/master' }} runs-on: ubuntu-latest strategy: matrix: - python: [3.8, 3.9] + python: ['3.8', '3.9'] + + steps: + - name: Download artifact + uses: actions/download-artifact@v2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'upload' + + - name: Install anaconda-client + run: conda install anaconda-client + + - name: Upload + env: + ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} + run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 + + upload_windows: + needs: build_windows + + if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} + + runs-on: windows-latest + + strategy: + matrix: + python: ['3.8', '3.9'] steps: - name: Download artifact uses: actions/download-artifact@v2 with: - name: dpnp ${{ runner.os }} ${{ matrix.python }} + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'upload' - name: Install anaconda-client run: conda install anaconda-client - - name: Add conda to system path - run: echo $CONDA/bin >> $GITHUB_PATH - name: Upload env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} - run: | - conda install anaconda-client - anaconda --token $ANACONDA_TOKEN upload --user dppy --label dev dpnp-*.tar.bz2 + run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat index 1695b2e74c9..6d4389b93dd 100644 --- a/conda-recipe/bld.bat +++ b/conda-recipe/bld.bat @@ -1,4 +1,5 @@ REM A workaround for activate-dpcpp.bat issue to be addressed in 2021.4 +set "LIB=%BUILD_PREFIX%\Library\lib;%BUILD_PREFIX%\compiler\lib;%LIB%" SET "INCLUDE=%BUILD_PREFIX%\include;%INCLUDE%" IF DEFINED DPLROOT ( diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index bc705e2715d..c24d86e5473 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -8,16 +8,16 @@ requirements: host: - python - setuptools - - numpy 1.19 + - numpy >=1.19 - cython - - cmake 3.19 + - cmake >=3.19 - dpctl >=0.13 - mkl-devel-dpcpp {{ environ.get('MKL_VER', '>=2021.1.1') }} - tbb-devel - wheel build: - - {{ compiler('dpcpp') }} - - dpcpp-cpp-rt {{ environ.get('DPCPP_VER', '>=2021.1.1') }} + - {{ compiler('cxx') }} + - {{ compiler('dpcpp') }} >=2022.1 # [not osx] run: - python - dpctl >=0.13 diff --git a/scripts/build_conda_package.sh b/scripts/build_conda_package.sh index c9ad065b150..ae9474e1f77 100755 --- a/scripts/build_conda_package.sh +++ b/scripts/build_conda_package.sh @@ -5,7 +5,7 @@ DPLROOT=$2 export DPLROOT -CHANNELS="-c dppy/label/dev -c intel -c defaults --override-channels" +CHANNELS="-c dppy/label/dev -c defaults -c intel --override-channels" VERSIONS="--python $PYTHON_VERSION" TEST="--no-test" diff --git a/utils/dpnp_build_utils.py b/utils/dpnp_build_utils.py index d06096c4b08..2ccf211587d 100644 --- a/utils/dpnp_build_utils.py +++ b/utils/dpnp_build_utils.py @@ -126,7 +126,7 @@ def find_cmplr(verbose=False): verbose=verbose) # try to find in Python environment - if not cmplr_include or not mathlib_path: + if not cmplr_include or not cmplr_libpath: if sys.platform in ['linux']: rel_include_path = os.path.join('include') rel_libdir_path = os.path.join('lib') From 31144c604277c6e98146bd8579f31c7e2a9d5535 Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Thu, 8 Sep 2022 15:57:06 -0500 Subject: [PATCH 12/19] Attempt to fix workflow --- .github/workflows/conda-package.yml | 18 ++++++++---------- 1 file changed, 8 insertions(+), 10 deletions(-) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index 00e05f687ea..a2321b268e9 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -162,13 +162,12 @@ jobs: miniconda-version: 'latest' activate-environment: 'upload' - - name: Install anaconda-client - run: conda install anaconda-client - - - name: Upload + - name: Install client and Upload env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} - run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 + run: | + conda install anaconda-client + anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 upload_windows: needs: build_windows @@ -194,10 +193,9 @@ jobs: miniconda-version: 'latest' activate-environment: 'upload' - - name: Install anaconda-client - run: conda install anaconda-client - - - name: Upload + - name: Install client and Upload env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} - run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 + run: | + conda install anaconda-client + anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 From ee65713c94c047731369e0051be3cb227b3b195d Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Thu, 8 Sep 2022 16:52:28 -0500 Subject: [PATCH 13/19] attempt to fix upload steps of the workflow on Linux --- .github/workflows/conda-package.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index a2321b268e9..23c36f29c4e 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -45,6 +45,7 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true + auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'build' @@ -106,6 +107,7 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true + auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'build' @@ -158,6 +160,7 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true + auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'upload' @@ -166,6 +169,7 @@ jobs: env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} run: | + conda activate upload conda install anaconda-client anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 @@ -189,6 +193,7 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true + auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'upload' @@ -197,5 +202,6 @@ jobs: env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} run: | + conda activate upload conda install anaconda-client anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 From 2ea4aac5377d381228a8e59da1876c16cc551035 Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Thu, 8 Sep 2022 19:52:57 -0500 Subject: [PATCH 14/19] Another attempt to fix upload step of conda-package workflow --- .github/workflows/conda-package.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index 23c36f29c4e..c87084600eb 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -169,7 +169,10 @@ jobs: env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} run: | + source $CONDA/etc/profile.d/conda.sh + conda info conda activate upload + ls -lF $CONDA_PREFIX/bin conda install anaconda-client anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 From ac79e9fb7b8f405a2d26b943ca4bef0e318095ec Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Fri, 9 Sep 2022 13:52:56 +0200 Subject: [PATCH 15/19] Set default shell in upload actions (#1180) --- .github/workflows/conda-package.yml | 35 +++++++++++++++-------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index c87084600eb..50b573a3e36 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -45,7 +45,6 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true - auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'build' @@ -107,7 +106,6 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true - auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'build' @@ -146,6 +144,10 @@ jobs: runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + strategy: matrix: python: ['3.8', '3.9'] @@ -160,21 +162,17 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true - auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'upload' - - name: Install client and Upload + - name: Install anaconda-client + run: conda install anaconda-client + + - name: Upload env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} - run: | - source $CONDA/etc/profile.d/conda.sh - conda info - conda activate upload - ls -lF $CONDA_PREFIX/bin - conda install anaconda-client - anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 + run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 upload_windows: needs: build_windows @@ -183,6 +181,10 @@ jobs: runs-on: windows-latest + defaults: + run: + shell: cmd /C CALL {0} + strategy: matrix: python: ['3.8', '3.9'] @@ -196,15 +198,14 @@ jobs: uses: conda-incubator/setup-miniconda@v2 with: auto-update-conda: true - auto-activate-base: true python-version: ${{ matrix.python }} miniconda-version: 'latest' activate-environment: 'upload' - - name: Install client and Upload + - name: Install anaconda-client + run: conda install anaconda-client + + - name: Upload env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} - run: | - conda activate upload - conda install anaconda-client - anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 + run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 From 64478ae66190eb0f6b1df5221edd919dfb1eb868 Mon Sep 17 00:00:00 2001 From: Oleksandr Pavlyk Date: Fri, 9 Sep 2022 07:14:39 -0500 Subject: [PATCH 16/19] Use pin_compatible for run-time dependency generation on numpy, restrict numpy version bracket for host section --- conda-recipe/meta.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index c24d86e5473..dccf855c184 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -8,7 +8,7 @@ requirements: host: - python - setuptools - - numpy >=1.19 + - numpy >=1.19,<1.22a0 - cython - cmake >=3.19 - dpctl >=0.13 @@ -23,7 +23,7 @@ requirements: - dpctl >=0.13 - {{ pin_compatible('dpcpp-cpp-rt', min_pin='x.x', max_pin='x') }} - {{ pin_compatible('mkl-dpcpp', min_pin='x.x', max_pin='x') }} - - numpy >=1.15 + - {{ pin_compatible('numpy', min_pin='x.x', max_pin='x') }} build: number: {{ GIT_DESCRIBE_NUMBER }} From bd1a414803a787f6368374fd72d9d074c447f269 Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Sat, 10 Sep 2022 15:51:29 +0200 Subject: [PATCH 17/19] Reorder channels in conda-build (#1182) * Reorder channels in conda-build * Remove conda-build script for Linux --- .github/workflows/conda-package.yml | 9 +++++---- scripts/build_conda_package.sh | 16 ---------------- 2 files changed, 5 insertions(+), 20 deletions(-) delete mode 100755 scripts/build_conda_package.sh diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index 50b573a3e36..756c5f8709d 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -9,8 +9,7 @@ on: env: PACKAGE_NAME: dpnp MODULE_NAME: dpnp - VER_SCRIPT1: "import json; f = open('ver.json', 'r'); j = json.load(f); f.close(); " - VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))" + CHANNELS: '-c dppy/label/dev -c intel -c defaults --override-channels' jobs: build_linux: @@ -66,7 +65,9 @@ jobs: run: conda install conda-build - name: Build conda package - run: ./scripts/build_conda_package.sh ${{ matrix.python }} $GITHUB_WORKSPACE/oneDPL + run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe + env: + DPLROOT: '${{ github.workspace }}/oneDPL' - name: Upload artifact uses: actions/upload-artifact@v2 @@ -127,7 +128,7 @@ jobs: run: conda install conda-build - name: Build conda package - run: conda build --no-test --python ${{ matrix.python }} -c dppy/label/dev -c intel -c defaults --override-channels conda-recipe + run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe env: DPLROOT: '%GITHUB_WORKSPACE%\oneDPL' diff --git a/scripts/build_conda_package.sh b/scripts/build_conda_package.sh deleted file mode 100755 index ae9474e1f77..00000000000 --- a/scripts/build_conda_package.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -PYTHON_VERSION=$1 -DPLROOT=$2 - -export DPLROOT - -CHANNELS="-c dppy/label/dev -c defaults -c intel --override-channels" -VERSIONS="--python $PYTHON_VERSION" -TEST="--no-test" - -conda build \ - $TEST \ - $VERSIONS \ - $CHANNELS \ - conda-recipe From 3be4b2e1d0ce7399761f2f4d0af6cb92ce2271fe Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Tue, 20 Sep 2022 20:56:43 +0200 Subject: [PATCH 18/19] Add tests running as a part of github actions (#1184) --- .github/workflows/conda-package.yml | 285 +++++++++++++++++++++++++++- tests/skipped_tests.tbl | 1 + 2 files changed, 281 insertions(+), 5 deletions(-) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index 756c5f8709d..83045bcd8f8 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -10,6 +10,9 @@ env: PACKAGE_NAME: dpnp MODULE_NAME: dpnp CHANNELS: '-c dppy/label/dev -c intel -c defaults --override-channels' + VER_JSON_NAME: 'version.json' + VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); " + VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))" jobs: build_linux: @@ -49,6 +52,9 @@ jobs: activate-environment: 'build' use-only-tar-bz2: true + - name: Install conda-build + run: conda install conda-build + - name: Cache conda packages uses: actions/cache@v3 env: @@ -61,9 +67,6 @@ jobs: ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- - - name: Install conda-build - run: conda install conda-build - - name: Build conda package run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe env: @@ -138,9 +141,281 @@ jobs: name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2 - upload_linux: + test_linux: needs: build_linux + runs-on: ubuntu-latest + + defaults: + run: + shell: bash -l {0} + + strategy: + matrix: + python: ['3.8', '3.9'] + dpctl: ['0.13.0'] + experimental: [false] + + continue-on-error: ${{ matrix.experimental }} + + env: + conda-pkgs: '/home/runner/conda_pkgs_dir/' + channel-path: '${{ github.workspace }}/channel/' + pkg-path-in-channel: '${{ github.workspace }}/channel/linux-64/' + extracted-pkg-path: '${{ github.workspace }}/pkg/' + tests-path: '${{ github.workspace }}/pkg/info/test/' + ver-json-path: '${{ github.workspace }}/version.json' + + steps: + - name: Download artifact + uses: actions/download-artifact@v2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.pkg-path-in-channel }} + + - name: Extract package archive + run: | + mkdir -p ${{ env.extracted-pkg-path }} + tar -xvf ${{ env.pkg-path-in-channel }}/${{ env.PACKAGE_NAME }}-*.tar.bz2 -C ${{ env.extracted-pkg-path }} + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: 'test' + + # Needed to be able to run conda index + - name: Install conda-build + run: conda install conda-build + + - name: Create conda channel + run: conda index ${{ env.channel-path }} + + - name: Test conda channel + run: | + conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }} + cat ${{ env.ver-json-path }} + + - name: Collect dependencies + run: | + export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}") + echo PACKAGE_VERSION=${PACKAGE_VERSION} + + conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile + cat lockfile + env: + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' + + - name: Cache conda packages + uses: actions/cache@v3 + env: + CACHE_NUMBER: 1 # Increase to reset cache + with: + path: ${{ env.conda-pkgs }} + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Install dpnp + run: | + export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}") + echo PACKAGE_VERSION=${PACKAGE_VERSION} + + conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} dpctl=${{ matrix.dpctl }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} + env: + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' + + - name: List installed packages + run: conda list + + - name: Smoke test + run: python -c "import dpnp, dpctl; dpctl.lsplatform()" + + # TODO: run the whole scope once the issues on CPU are resolved + - name: Run tests + run: python -m pytest -q -ra --disable-warnings -vv tests/test_arraycreation.py tests/test_dparray.py tests/test_mathematical.py + env: + SYCL_ENABLE_HOST_DEVICE: '1' + working-directory: ${{ env.tests-path }} + + test_windows: + needs: build_windows + + runs-on: windows-latest + + defaults: + run: + shell: cmd /C CALL {0} + + strategy: + matrix: + python: ['3.8', '3.9'] + dpctl: ['0.13.0'] + experimental: [false] + + continue-on-error: ${{ matrix.experimental }} + + env: + conda-pkgs: 'C:\Users\runneradmin\conda_pkgs_dir\' + channel-path: '${{ github.workspace }}\channel\' + pkg-path-in-channel: '${{ github.workspace }}\channel\win-64\' + extracted-pkg-path: '${{ github.workspace }}\pkg' + tests-path: '${{ github.workspace }}\pkg\info\test\' + ver-json-path: '${{ github.workspace }}\version.json' + active-env-name: 'test' + miniconda-lib-path: 'C:\Miniconda3\envs\test\Library\lib\' + miniconda-bin-path: 'C:\Miniconda3\envs\test\Library\bin\' + + steps: + - name: Download artifact + uses: actions/download-artifact@v2 + with: + name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }} + path: ${{ env.pkg-path-in-channel }} + + - name: Extract package archive + run: | + @echo on + mkdir -p ${{ env.extracted-pkg-path }} + + set SEARCH_SCRIPT="DIR ${{ env.pkg-path-in-channel }} /s/b | FINDSTR /r "dpnp-.*\.tar\.bz2"" + FOR /F "tokens=* USEBACKQ" %%F IN (`%SEARCH_SCRIPT%`) DO ( + SET FULL_PACKAGE_PATH=%%F + ) + echo FULL_PACKAGE_PATH: %FULL_PACKAGE_PATH% + + python -c "import shutil; shutil.unpack_archive(r\"%FULL_PACKAGE_PATH%\", extract_dir=r\"${{ env.extracted-pkg-path }}\")" + dir ${{ env.extracted-pkg-path }} + + - name: Setup miniconda + uses: conda-incubator/setup-miniconda@v2 + with: + auto-update-conda: true + python-version: ${{ matrix.python }} + miniconda-version: 'latest' + activate-environment: ${{ env.active-env-name }} + + # Needed to be able to run conda index + - name: Install conda-build + run: conda install conda-build + + - name: Create conda channel + run: conda index ${{ env.channel-path }} + + - name: Test conda channel + run: | + @echo on + conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }} + + - name: Dump version.json + run: more ${{ env.ver-json-path }} + + - name: Collect dependencies + run: | + @echo on + set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}" + FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( + SET PACKAGE_VERSION=%%F + ) + echo PACKAGE_VERSION: %PACKAGE_VERSION% + + conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% dpctl=${{ matrix.dpctl }} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile + env: + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' + + - name: Dump lockfile + run: more lockfile + + - name: Cache conda packages + uses: actions/cache@v3 + env: + CACHE_NUMBER: 1 # Increase to reset cache + with: + path: ${{ env.conda-pkgs }} + key: + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }} + restore-keys: | + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}- + ${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}- + + - name: Install opencl_rt + run: conda install opencl_rt -c intel --override-channels + + - name: Install dpnp + run: | + @echo on + set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}" + FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO ( + SET PACKAGE_VERSION=%%F + ) + echo PACKAGE_VERSION: %PACKAGE_VERSION% + + conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% dpctl=${{ matrix.dpctl }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} + env: + TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}' + + - name: List installed packages + run: conda list + + - name: Add library + shell: pwsh + run: | + # Make sure the below libraries exist + Get-Item -Path ${{ env.miniconda-bin-path }}\OpenCL.dll + Get-Item -Path ${{ env.miniconda-lib-path }}\intelocl64.dll + + echo "OCL_ICD_FILENAMES=${{ env.miniconda-lib-path }}\intelocl64.dll" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + try {$list = Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors | Select-Object -ExpandProperty Property } catch {$list=@()} + + if ($list.count -eq 0) { + if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos)) { + New-Item -Path HKLM:\SOFTWARE\Khronos + } + + if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos\OpenCL)) { + New-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL + } + + if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors)) { + New-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors + } + + New-ItemProperty -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors -Name ${{ env.miniconda-lib-path }}\intelocl64.dll -Value 0 + try {$list = Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors | Select-Object -ExpandProperty Property } catch {$list=@()} + Write-Output $(Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors) + + # Now copy OpenCL.dll into system folder + $system_ocl_icd_loader="C:\Windows\System32\OpenCL.dll" + $python_ocl_icd_loader="${{ env.miniconda-bin-path }}\OpenCL.dll" + Copy-Item -Path $python_ocl_icd_loader -Destination $system_ocl_icd_loader + + if (Test-Path -Path $system_ocl_icd_loader) { + Write-Output "$system_ocl_icd_loader has been copied" + $acl = Get-Acl $system_ocl_icd_loader + Write-Output $acl + } else { + Write-Output "OCL-ICD-Loader was not copied" + } + + # Variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default + echo "TBB_DLL_PATH=${{ env.miniconda-bin-path }}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + } + + - name: Smoke test + run: python -c "import dpnp, dpctl; dpctl.lsplatform()" + + # TODO: run the whole scope once the issues on CPU are resolved + - name: Run tests + run: python -m pytest -q -ra --disable-warnings -vv tests\test_arraycreation.py tests\test_dparray.py tests\test_mathematical.py + working-directory: ${{ env.tests-path }} + + upload_linux: + needs: test_linux + if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} runs-on: ubuntu-latest @@ -176,7 +451,7 @@ jobs: run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2 upload_windows: - needs: build_windows + needs: test_windows if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}} diff --git a/tests/skipped_tests.tbl b/tests/skipped_tests.tbl index bbf3c1c3b53..b781e377202 100644 --- a/tests/skipped_tests.tbl +++ b/tests/skipped_tests.tbl @@ -129,6 +129,7 @@ tests/test_linalg.py::test_svd[(2,2)-complex128] tests/test_linalg.py::test_svd[(3,4)-complex128] tests/test_linalg.py::test_svd[(5,3)-complex128] tests/test_linalg.py::test_svd[(16,16)-complex128] +tests/test_mathematical.py::TestGradient::test_gradient_y1_dx[3.5-array1] tests/test_random.py::TestPermutationsTestShuffle::test_shuffle1[lambda x: (dpnp.asarray([(i, i) for i in x], [("a", int), ("b", int)]).view(dpnp.recarray))] tests/test_random.py::TestPermutationsTestShuffle::test_shuffle1[lambda x: dpnp.asarray([(i, i) for i in x], [("a", object), ("b", dpnp.int32)])]] tests/test_random.py::TestPermutationsTestShuffle::test_shuffle1[lambda x: dpnp.asarray(x).astype(dpnp.int8)] From 614c829d8fdd1311fee35464c869f113b2c8dd29 Mon Sep 17 00:00:00 2001 From: Anton <100830759+antonwolfy@users.noreply.github.com> Date: Wed, 21 Sep 2022 11:23:02 +0200 Subject: [PATCH 19/19] [Build] setuptools 63.4.1 breaks build for Windows (#1185) * [SAT-5366] setuptools 63.4.1 breaks build for Windows * Add TODO note as suggested in review comment --- conda-recipe/bld.bat | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat index 6d4389b93dd..8ec6c1fb158 100644 --- a/conda-recipe/bld.bat +++ b/conda-recipe/bld.bat @@ -2,6 +2,13 @@ REM A workaround for activate-dpcpp.bat issue to be addressed in 2021.4 set "LIB=%BUILD_PREFIX%\Library\lib;%BUILD_PREFIX%\compiler\lib;%LIB%" SET "INCLUDE=%BUILD_PREFIX%\include;%INCLUDE%" +REM Since the 60.0.0 release, setuptools includes a local, vendored copy +REM of distutils (from late copies of CPython) that is enabled by default. +REM It breaks build for Windows, so use distutils from "stdlib" as before. +REM @TODO: remove the setting, once transition to build backend on Windows +REM to cmake is complete. +SET "SETUPTOOLS_USE_DISTUTILS=stdlib" + IF DEFINED DPLROOT ( ECHO "Sourcing DPLROOT" SET "INCLUDE=%DPLROOT%\include;%INCLUDE%"