|
51 | 51 | install_root="$(dirname $(which python))/../lib/python${py_dot}/site-packages/torch/"
|
52 | 52 | fi
|
53 | 53 |
|
54 |
| -if [[ "$DESIRED_CUDA" != 'cpu' && "$DESIRED_CUDA" != 'cpu-cxx11-abi' && "$DESIRED_CUDA" != *"rocm"* ]]; then |
55 |
| - # cu90, cu92, cu100, cu101 |
56 |
| - if [[ ${#DESIRED_CUDA} -eq 4 ]]; then |
57 |
| - CUDA_VERSION="${DESIRED_CUDA:2:1}.${DESIRED_CUDA:3:1}" |
58 |
| - elif [[ ${#DESIRED_CUDA} -eq 5 ]]; then |
59 |
| - CUDA_VERSION="${DESIRED_CUDA:2:2}.${DESIRED_CUDA:4:1}" |
60 |
| - fi |
61 |
| - echo "Using CUDA $CUDA_VERSION as determined by DESIRED_CUDA" |
62 |
| - |
63 |
| - # Switch `/usr/local/cuda` to the desired CUDA version |
64 |
| - rm -rf /usr/local/cuda || true |
65 |
| - ln -s "/usr/local/cuda-${CUDA_VERSION}" /usr/local/cuda |
66 |
| - export LD_LIBRARY_PATH=/usr/local/cuda/lib64:$LD_LIBRARY_PATH |
67 |
| -fi |
68 |
| - |
69 | 54 | ###############################################################################
|
70 | 55 | # Check GCC ABI
|
71 | 56 | ###############################################################################
|
@@ -311,6 +296,10 @@ build_example_cpp_with_incorrect_abi () {
|
311 | 296 | # Check simple Python/C++ calls
|
312 | 297 | ###############################################################################
|
313 | 298 | if [[ "$PACKAGE_TYPE" == 'libtorch' ]]; then
|
| 299 | + # NS: Set LD_LIBRARY_PATH for CUDA builds, but perhaps it should be removed |
| 300 | + if [[ "$DESIRED_CUDA" == "cu"* ]]; then |
| 301 | + export LD_LIBRARY_PATH=/usr/local/cuda/lib64 |
| 302 | + fi |
314 | 303 | build_and_run_example_cpp simple-torch-test
|
315 | 304 | # `_GLIBCXX_USE_CXX11_ABI` is always ignored by gcc in devtoolset7, so we test
|
316 | 305 | # the expected failure case for Ubuntu 16.04 + gcc 5.4 only.
|
|
0 commit comments