Skip to content

Add tests running as a part of github actions #1184

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Sep 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
285 changes: 280 additions & 5 deletions .github/workflows/conda-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,9 @@ env:
PACKAGE_NAME: dpnp
MODULE_NAME: dpnp
CHANNELS: '-c dppy/label/dev -c intel -c defaults --override-channels'
VER_JSON_NAME: 'version.json'
VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); "
VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))"

jobs:
build_linux:
Expand Down Expand Up @@ -49,6 +52,9 @@ jobs:
activate-environment: 'build'
use-only-tar-bz2: true

- name: Install conda-build
run: conda install conda-build

- name: Cache conda packages
uses: actions/cache@v3
env:
Expand All @@ -61,9 +67,6 @@ jobs:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-

- name: Install conda-build
run: conda install conda-build

- name: Build conda package
run: conda build --no-test --python ${{ matrix.python }} ${{ env.CHANNELS }} conda-recipe
env:
Expand Down Expand Up @@ -138,9 +141,281 @@ jobs:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.conda-bld }}${{ env.PACKAGE_NAME }}-*.tar.bz2

upload_linux:
test_linux:
needs: build_linux

runs-on: ubuntu-latest

defaults:
run:
shell: bash -l {0}

strategy:
matrix:
python: ['3.8', '3.9']
dpctl: ['0.13.0']
experimental: [false]

continue-on-error: ${{ matrix.experimental }}

env:
conda-pkgs: '/home/runner/conda_pkgs_dir/'
channel-path: '${{ github.workspace }}/channel/'
pkg-path-in-channel: '${{ github.workspace }}/channel/linux-64/'
extracted-pkg-path: '${{ github.workspace }}/pkg/'
tests-path: '${{ github.workspace }}/pkg/info/test/'
ver-json-path: '${{ github.workspace }}/version.json'

steps:
- name: Download artifact
uses: actions/download-artifact@v2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.pkg-path-in-channel }}

- name: Extract package archive
run: |
mkdir -p ${{ env.extracted-pkg-path }}
tar -xvf ${{ env.pkg-path-in-channel }}/${{ env.PACKAGE_NAME }}-*.tar.bz2 -C ${{ env.extracted-pkg-path }}

- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: 'test'

# Needed to be able to run conda index
- name: Install conda-build
run: conda install conda-build

- name: Create conda channel
run: conda index ${{ env.channel-path }}

- name: Test conda channel
run: |
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }}
cat ${{ env.ver-json-path }}

- name: Collect dependencies
run: |
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}")
echo PACKAGE_VERSION=${PACKAGE_VERSION}

conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
cat lockfile
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'

- name: Cache conda packages
uses: actions/cache@v3
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.conda-pkgs }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-

- name: Install dpnp
run: |
export PACKAGE_VERSION=$(python -c "${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}")
echo PACKAGE_VERSION=${PACKAGE_VERSION}

conda install ${{ env.PACKAGE_NAME }}=${PACKAGE_VERSION} dpctl=${{ matrix.dpctl }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'

- name: List installed packages
run: conda list

- name: Smoke test
run: python -c "import dpnp, dpctl; dpctl.lsplatform()"

# TODO: run the whole scope once the issues on CPU are resolved
- name: Run tests
run: python -m pytest -q -ra --disable-warnings -vv tests/test_arraycreation.py tests/test_dparray.py tests/test_mathematical.py
env:
SYCL_ENABLE_HOST_DEVICE: '1'
working-directory: ${{ env.tests-path }}

test_windows:
needs: build_windows

runs-on: windows-latest

defaults:
run:
shell: cmd /C CALL {0}

strategy:
matrix:
python: ['3.8', '3.9']
dpctl: ['0.13.0']
experimental: [false]

continue-on-error: ${{ matrix.experimental }}

env:
conda-pkgs: 'C:\Users\runneradmin\conda_pkgs_dir\'
channel-path: '${{ github.workspace }}\channel\'
pkg-path-in-channel: '${{ github.workspace }}\channel\win-64\'
extracted-pkg-path: '${{ github.workspace }}\pkg'
tests-path: '${{ github.workspace }}\pkg\info\test\'
ver-json-path: '${{ github.workspace }}\version.json'
active-env-name: 'test'
miniconda-lib-path: 'C:\Miniconda3\envs\test\Library\lib\'
miniconda-bin-path: 'C:\Miniconda3\envs\test\Library\bin\'

steps:
- name: Download artifact
uses: actions/download-artifact@v2
with:
name: ${{ env.PACKAGE_NAME }} ${{ runner.os }} Python ${{ matrix.python }}
path: ${{ env.pkg-path-in-channel }}

- name: Extract package archive
run: |
@echo on
mkdir -p ${{ env.extracted-pkg-path }}

set SEARCH_SCRIPT="DIR ${{ env.pkg-path-in-channel }} /s/b | FINDSTR /r "dpnp-.*\.tar\.bz2""
FOR /F "tokens=* USEBACKQ" %%F IN (`%SEARCH_SCRIPT%`) DO (
SET FULL_PACKAGE_PATH=%%F
)
echo FULL_PACKAGE_PATH: %FULL_PACKAGE_PATH%

python -c "import shutil; shutil.unpack_archive(r\"%FULL_PACKAGE_PATH%\", extract_dir=r\"${{ env.extracted-pkg-path }}\")"
dir ${{ env.extracted-pkg-path }}

- name: Setup miniconda
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
python-version: ${{ matrix.python }}
miniconda-version: 'latest'
activate-environment: ${{ env.active-env-name }}

# Needed to be able to run conda index
- name: Install conda-build
run: conda install conda-build

- name: Create conda channel
run: conda index ${{ env.channel-path }}

- name: Test conda channel
run: |
@echo on
conda search ${{ env.PACKAGE_NAME }} -c ${{ env.channel-path }} --override-channels --info --json > ${{ env.ver-json-path }}

- name: Dump version.json
run: more ${{ env.ver-json-path }}

- name: Collect dependencies
run: |
@echo on
set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}"
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO (
SET PACKAGE_VERSION=%%F
)
echo PACKAGE_VERSION: %PACKAGE_VERSION%

conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% dpctl=${{ matrix.dpctl }} python=${{ matrix.python }} ${{ env.TEST_CHANNELS }} --only-deps --dry-run > lockfile
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'

- name: Dump lockfile
run: more lockfile

- name: Cache conda packages
uses: actions/cache@v3
env:
CACHE_NUMBER: 1 # Increase to reset cache
with:
path: ${{ env.conda-pkgs }}
key:
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-${{hashFiles('lockfile') }}
restore-keys: |
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-python-${{ matrix.python }}-
${{ runner.os }}-conda-${{ env.CACHE_NUMBER }}-

- name: Install opencl_rt
run: conda install opencl_rt -c intel --override-channels

- name: Install dpnp
run: |
@echo on
set "SCRIPT=${{ env.VER_SCRIPT1 }} ${{ env.VER_SCRIPT2 }}"
FOR /F "tokens=* USEBACKQ" %%F IN (`python -c "%SCRIPT%"`) DO (
SET PACKAGE_VERSION=%%F
)
echo PACKAGE_VERSION: %PACKAGE_VERSION%

conda install ${{ env.PACKAGE_NAME }}=%PACKAGE_VERSION% dpctl=${{ matrix.dpctl }} pytest python=${{ matrix.python }} ${{ env.TEST_CHANNELS }}
env:
TEST_CHANNELS: '-c ${{ env.channel-path }} ${{ env.CHANNELS }}'

- name: List installed packages
run: conda list

- name: Add library
shell: pwsh
run: |
# Make sure the below libraries exist
Get-Item -Path ${{ env.miniconda-bin-path }}\OpenCL.dll
Get-Item -Path ${{ env.miniconda-lib-path }}\intelocl64.dll

echo "OCL_ICD_FILENAMES=${{ env.miniconda-lib-path }}\intelocl64.dll" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
try {$list = Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors | Select-Object -ExpandProperty Property } catch {$list=@()}

if ($list.count -eq 0) {
if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos)) {
New-Item -Path HKLM:\SOFTWARE\Khronos
}

if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos\OpenCL)) {
New-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL
}

if (-not (Test-Path -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors)) {
New-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors
}

New-ItemProperty -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors -Name ${{ env.miniconda-lib-path }}\intelocl64.dll -Value 0
try {$list = Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors | Select-Object -ExpandProperty Property } catch {$list=@()}
Write-Output $(Get-Item -Path HKLM:\SOFTWARE\Khronos\OpenCL\Vendors)

# Now copy OpenCL.dll into system folder
$system_ocl_icd_loader="C:\Windows\System32\OpenCL.dll"
$python_ocl_icd_loader="${{ env.miniconda-bin-path }}\OpenCL.dll"
Copy-Item -Path $python_ocl_icd_loader -Destination $system_ocl_icd_loader

if (Test-Path -Path $system_ocl_icd_loader) {
Write-Output "$system_ocl_icd_loader has been copied"
$acl = Get-Acl $system_ocl_icd_loader
Write-Output $acl
} else {
Write-Output "OCL-ICD-Loader was not copied"
}

# Variable assisting OpenCL CPU driver to find TBB DLLs which are not located where it expects them by default
echo "TBB_DLL_PATH=${{ env.miniconda-bin-path }}" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
}

- name: Smoke test
run: python -c "import dpnp, dpctl; dpctl.lsplatform()"

# TODO: run the whole scope once the issues on CPU are resolved
- name: Run tests
run: python -m pytest -q -ra --disable-warnings -vv tests\test_arraycreation.py tests\test_dparray.py tests\test_mathematical.py
working-directory: ${{ env.tests-path }}

upload_linux:
needs: test_linux

if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}}

runs-on: ubuntu-latest
Expand Down Expand Up @@ -176,7 +451,7 @@ jobs:
run: anaconda --token ${{ env.ANACONDA_TOKEN }} upload --user dppy --label dev ${{ env.PACKAGE_NAME }}-*.tar.bz2

upload_windows:
needs: build_windows
needs: test_windows

if: ${{github.ref == 'refs/heads/master' || (startsWith(github.ref, 'refs/heads/release') == true) || github.event_name == 'push' && contains(github.ref, 'refs/tags/')}}

Expand Down
1 change: 1 addition & 0 deletions tests/skipped_tests.tbl
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,7 @@ tests/test_linalg.py::test_svd[(2,2)-complex128]
tests/test_linalg.py::test_svd[(3,4)-complex128]
tests/test_linalg.py::test_svd[(5,3)-complex128]
tests/test_linalg.py::test_svd[(16,16)-complex128]
tests/test_mathematical.py::TestGradient::test_gradient_y1_dx[3.5-array1]
tests/test_random.py::TestPermutationsTestShuffle::test_shuffle1[lambda x: (dpnp.asarray([(i, i) for i in x], [("a", int), ("b", int)]).view(dpnp.recarray))]
tests/test_random.py::TestPermutationsTestShuffle::test_shuffle1[lambda x: dpnp.asarray([(i, i) for i in x], [("a", object), ("b", dpnp.int32)])]]
tests/test_random.py::TestPermutationsTestShuffle::test_shuffle1[lambda x: dpnp.asarray(x).astype(dpnp.int8)]
Expand Down