@@ -36,11 +36,13 @@ jobs:
3636 pull-requests : write
3737
3838 env :
39- python-ver : ' 3.12'
40- CHANNELS : ' -c dppy/label/dev -c intel -c conda-forge --override-channels'
41- NO_INTEL_CHANNELS : ' -c dppy/label/dev -c conda-forge --override-channels'
42- # Install the latest oneAPI compiler to work around an issue
43- INSTALL_ONE_API : ' yes'
39+ environment-file : ' environments/environment.yml'
40+ build-with-oneapi-env : ' environments/build_with_oneapi.yml'
41+ building-docs-env : ' environments/building_docs.yml'
42+ oneapi-pkgs-env : ' '
43+ # Enable env when it's required to use only conda packages without OneAPI installation
44+ # oneapi-pkgs-env: '${{ github.workspace }}/environments/oneapi_pkgs.yml'
45+ dpctl-pkg-txt : ' environments/dpctl_pkg.txt'
4446
4547 steps :
4648 - name : Cancel Previous Runs
6062 docker-images : false
6163
6264 - name : Add Intel repository
65+ if : env.oneapi-pkgs-env == ''
6366 run : |
6467 wget https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB
6568 cat GPG-PUB-KEY-INTEL-SW-PRODUCTS.PUB | gpg --dearmor | sudo tee /usr/share/keyrings/oneapi-archive-keyring.gpg > /dev/null
7578 sudo apt install --reinstall -y gcc-9 g++-9 libstdc++-9-dev
7679
7780 - name : Install Intel OneAPI
81+ if : env.oneapi-pkgs-env == ''
7882 run : |
7983 sudo apt install hwloc \
8084 intel-oneapi-mkl \
@@ -101,16 +105,25 @@ jobs:
101105 with :
102106 fetch-depth : 0
103107
108+ - name : Install conda-merge tool
109+ uses : BSFishy/pip-action@8f2d471d809dc20b6ada98c91910b6ae6243f318 # v1
110+ with :
111+ packages : conda-merge
112+
113+ - name : Merge conda env files
114+ run : |
115+ conda-merge ${{ env.build-with-oneapi-env }} ${{ env.building-docs-env }} ${{ env.oneapi-pkgs-env }} > ${{ env.environment-file }}
116+ cat ${{ env.environment-file }}
117+
104118 - name : Setup miniconda
105119 id : setup_miniconda
106120 continue-on-error : true
107121 uses : conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1
108122 with :
109123 miniforge-version : latest
110124 use-mamba : ' true'
111- channels : conda-forge
112125 conda-remove-defaults : ' true'
113- python-version : ${{ env.python-ver }}
126+ environment-file : ${{ env.environment-file }}
114127 activate-environment : ' docs'
115128
116129 - name : ReSetup miniconda
@@ -119,40 +132,30 @@ jobs:
119132 with :
120133 miniforge-version : latest
121134 use-mamba : ' true'
122- channels : conda-forge
123135 conda-remove-defaults : ' true'
124- python-version : ${{ env.python-ver }}
136+ environment-file : ${{ env.environment-file }}
125137 activate-environment : ' docs'
126138
127- # Sometimes `mamba install ...` fails due to slow download speed rate, so disable the check in mamba
128- - name : Disable speed limit check in mamba
129- run : echo "MAMBA_NO_LOW_SPEED_LIMIT=1" >> $GITHUB_ENV
130-
131- - name : Install sphinx dependencies
132- run : |
133- mamba install sphinx sphinx_rtd_theme
134- pip install sphinxcontrib-googleanalytics==0.4 \
135- pyenchant sphinxcontrib-spelling
136-
137- - name : Install dpnp dependencies
138- if : env.INSTALL_ONE_API == 'yes'
139- run : |
140- mamba install numpy dpctl">=0.18.0dev0" cmake cython pytest ninja scikit-build ${{ env.NO_INTEL_CHANNELS }}
141-
142- - name : Install dpnp dependencies
143- if : env.INSTALL_ONE_API != 'yes'
139+ # We can't install dpctl as a conda package when the environment is created through
140+ # installing of Intel OneAPI packages because the dpctl conda package has a runtime
141+ # dependency on DPC++ RT one. Whereas the DPC++ RT package has been already installed
142+ # by the apt command above and its version has been matched with the DPC++ compiler.
143+ # In case where we install the DPC++ compiler with the apt (including DPC++ RT) and
144+ # install the DPC++ RT conda package while resolving dependencies, this can lead
145+ # to a versioning error, i.e. compatibility issue as the DPC++ compiler only guarantees
146+ # backwards compatibility, not forward compatibility (DPC++ RT may not run a binary built
147+ # with a newer version of the DPC++ compiler).
148+ # Installing dpctl via the pip manager has no such limitation, as the package has no
149+ # run dependency on the DPC++ RT pip package, so this is why the step is necessary here.
150+ - name : Install dpctl
151+ if : env.oneapi-pkgs-env == ''
144152 run : |
145- mamba install numpy dpctl">=0.18.0dev0" mkl-devel-dpcpp onedpl-devel tbb-devel dpcpp_linux-64 \
146- cmake cython pytest ninja scikit-build ${{ env.CHANNELS }}
147-
148- - name : Install cuPy dependencies
149- run : mamba install cupy
153+ pip install -r ${{ env.dpctl-pkg-txt }}
150154
151155 - name : Conda info
152- run : mamba info
153-
154- - name : Conda list
155- run : mamba list
156+ run : |
157+ mamba info
158+ mamba list
156159
157160 - name : Build library
158161 run : |
@@ -178,7 +181,6 @@ jobs:
178181 echo PROJECT_NUMBER=${PROJECT_NUMBER}
179182 echo "PROJECT_NUMBER=$PROJECT_NUMBER" >> $GITHUB_ENV
180183
181- # https://github.com/marketplace/actions/doxygen-action
182184 - name : Build backend docs
183185 uses : mattnotmitt/doxygen-action@b84fe17600245bb5db3d6c247cc274ea98c15a3b # v1.12
184186 with :
@@ -187,7 +189,6 @@ jobs:
187189 - name : Copy backend docs
188190 run : cp -r dpnp/backend/doc/html ${{ env.PUBLISH_DIR }}/backend_doc
189191
190- # https://github.com/marketplace/actions/github-pages-action
191192 # The step is only used to build docs while pushing a PR to "master"
192193 - name : Deploy docs
193194 if : env.GH_EVENT_PUSH_UPSTREAM == 'true'
0 commit comments