Skip to content

Commit 6d9eabf

Browse files
committed
feat(//circleci): Adding release automation
Signed-off-by: Naren Dasan <[email protected]> Signed-off-by: Naren Dasan <[email protected]>
1 parent 515b9b9 commit 6d9eabf

File tree

13 files changed

+508
-125
lines changed

13 files changed

+508
-125
lines changed

.circleci/config.yml

Lines changed: 240 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -155,18 +155,37 @@ commands:
155155
platform:
156156
type: string
157157
default: "x86_64"
158+
release:
159+
type: boolean
160+
default: false
158161
steps:
159162
- run:
160-
name: Build torch-tensorrt python release
163+
name: Build setup
161164
command: |
162165
mv toolchains/ci_workspaces/WORKSPACE.<< parameters.platform >> WORKSPACE
163166
cd py
164167
python3 -m pip install wheel setuptools
165168
python3 -m pip install pybind11==2.6.2
166-
python3 setup.py bdist_wheel --use-cxx11-abi
167-
python3 setup.py install --use-cxx11-abi
168-
mkdir -p /tmp/dist/builds
169-
cp dist/* /tmp/dist/builds
169+
- when:
170+
condition: << parameters.release >>
171+
steps:
172+
- run:
173+
name: Build torch-tensorrt python release package
174+
command: |
175+
python3 setup.py bdist_wheel --use-cxx11-abi --release
176+
python3 setup.py install --use-cxx11-abi --release
177+
mkdir -p /tmp/dist/builds
178+
cp dist/* /tmp/dist/builds
179+
- unless:
180+
condition: << parameters.release >>
181+
steps:
182+
- run:
183+
name: Build torch-tensorrt python package
184+
command: |
185+
python3 setup.py bdist_wheel --use-cxx11-abi
186+
python3 setup.py install --use-cxx11-abi
187+
mkdir -p /tmp/dist/builds
188+
cp dist/* /tmp/dist/builds
170189
171190
build-py-fx-only:
172191
description: "Build the torch-tensorrt python release with only the fx backend"
@@ -579,6 +598,143 @@ jobs:
579598
- dump-test-env
580599
- test-fx
581600

601+
package-x86_64:
602+
parameters:
603+
enabled:
604+
type: boolean
605+
default: false
606+
torch-build:
607+
type: string
608+
torch-build-index:
609+
type: string
610+
machine:
611+
image: ubuntu-2004-cuda-11.4:202110-01
612+
resource_class: xlarge
613+
steps:
614+
- when:
615+
condition: << parameters.enabled >>
616+
steps:
617+
- checkout
618+
- run:
619+
name: "Build packaging container"
620+
command: |
621+
cd ~/project/py/
622+
docker build -t torch_tensorrt_release_env --build-arg trt_version=<< pipeline.parameters.trt-release-version-short >> -f ci/Dockerfile.ci .
623+
- run:
624+
name: Build Python packages and pre-cxx11-abi tarball
625+
command: |
626+
cd ~/project/py/
627+
cp ~/project/toolchains/ci_workspaces/WORKSPACE.x86_64.release ~/project/WORKSPACE
628+
docker run -it --rm -v$(pwd)/..:/workspace/Torch-TensorRT torch_tensorrt_release_env /bin/bash /workspace/Torch-TensorRT/py/ci/build_whl.sh
629+
- create-env:
630+
os: "ubuntu2004"
631+
platform: "x86_64"
632+
cudnn-version: << pipeline.parameters.cudnn-release-version >>
633+
trt-version-short: << pipeline.parameters.trt-release-version-short >>
634+
bazel-version: "5.1.1"
635+
bazel-platform: "x86_64"
636+
- run:
637+
name: Build cxx11-abi tarball
638+
command: |
639+
set -e
640+
bazel build //:libtorchtrt -c opt
641+
CUDA_VERSION=$(cd ~/project/py/torch_tensorrt && python3 -c "from _version import __cuda_version__;print(__cuda_version__)")
642+
TORCHTRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __version__;print(__version__)")
643+
TRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __tensorrt_version__;print(__tensorrt_version__)")
644+
CUDNN_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cudnn_version__;print(__cudnn_version__)")
645+
pip3 install -r ~/project/py/requirements.txt
646+
TORCH_VERSION=$(python3 -c "from torch import __version__;print(__version__.split('+')[0])")
647+
cp ~/project/bazel-bin/libtorchtrt.tar.gz ~/project/py/dist/libtorchtrt-${TORCHTRT_VERSION}-cudnn${CUDNN_VERSION}-tensorrt${TRT_VERSION}-cuda${CUDA_VERSION}-libtorch-${TORCH_VERSION}.tar.gz
648+
- run:
649+
name: Collect packages
650+
command: |
651+
mkdir -p /tmp/dist/release
652+
cp -r ~/project/py/dist/* /tmp/dist/release
653+
- store_artifacts:
654+
path: /tmp/dist/release
655+
destination: x86_64-release-pkgs
656+
- unless:
657+
condition: << parameters.enabled >>
658+
steps:
659+
- run:
660+
name: Skipped packaging
661+
command: echo -e "Packaging stage not enabled"
662+
663+
package-jetson:
664+
parameters:
665+
torch-build:
666+
type: string
667+
jetpack-version:
668+
type: string
669+
cxx11-abi:
670+
type: boolean
671+
default: true
672+
python-version:
673+
type: string
674+
default: 3.8.10
675+
machine:
676+
image: ubuntu-2004:202201-02
677+
resource_class: arm.xlarge
678+
steps:
679+
- checkout
680+
#- run:
681+
# name: Upgrade base
682+
# command: |
683+
# sudo apt clean
684+
# sudo apt update
685+
# sudo apt upgrade
686+
# sudo apt install software-properties-common
687+
- install-cuda:
688+
os: "ubuntu2004"
689+
platform: "sbsa"
690+
cuda-pkg-name: "cuda-toolkit-11-4"
691+
- run:
692+
name: Install openblas
693+
command: sudo apt install libopenblas-dev
694+
- create-env:
695+
os: "ubuntu2004"
696+
platform: "sbsa"
697+
cudnn-version: << pipeline.parameters.cudnn-jetson-version >>
698+
trt-version-short: << pipeline.parameters.trt-jetson-version-short >>
699+
bazel-version: "5.1.1"
700+
bazel-platform: "arm64"
701+
- run:
702+
name: Set python version
703+
command: |
704+
pyenv install << parameters.python-version >>
705+
pyenv global << parameters.python-version >>
706+
- run:
707+
name: Install NGC Torch
708+
environment:
709+
TORCH_INSTALL: https://developer.download.nvidia.com/compute/redist/jp/v<< parameters.jetpack-version >>/pytorch/<< parameters.torch-build >>
710+
command: |
711+
set -e
712+
python3 -m pip install --upgrade pip; python3 -m pip install setuptools wheel; python3 -m pip install expecttest xmlrunner hypothesis aiohttp numpy=='1.19.4' pyyaml scipy=='1.5.3' ninja cython typing_extensions protobuf; export "LD_LIBRARY_PATH=/usr/lib/llvm-8/lib:$LD_LIBRARY_PATH"; python3 -m pip install --upgrade protobuf; python3 -m pip install --no-cache $TORCH_INSTALL
713+
- build-py-cxx11-abi:
714+
platform: "sbsa"
715+
release: true
716+
- run:
717+
name: Build cxx11-abi tarball
718+
command: |
719+
set -e
720+
bazel build //:libtorchtrt -c opt
721+
CUDA_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cuda_version__;print(__cuda_version__)")
722+
TORCHTRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __version__;print(__version__)")
723+
TRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __tensorrt_version__;print(__tensorrt_version__)")
724+
CUDNN_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cudnn_version__;print(__cudnn_version__)")
725+
pip3 install -r ~/project/py/requirements.txt
726+
TORCH_VERSION=$(python3 -c "from torch import __version__;print(__version__.split('+')[0])")
727+
cp ~/project/bazel-bin/libtorchtrt.tar.gz ~/project/py/dist/libtorchtrt-${TORCHTRT_VERSION}-cudnn${CUDNN_VERSION}-tensorrt${TRT_VERSION}-cuda${CUDA_VERSION}-libtorch-${TORCH_VERSION}.tar.gz
728+
- run:
729+
name: Move to release dir
730+
command: |
731+
mkdir -p /tmp/dist/jetson
732+
cp -r ~/project/py/dist/* /tmp/dist/jetson
733+
- store_artifacts:
734+
path: /tmp/dist/jetson
735+
destination: aarch64-release-pkgs
736+
737+
582738
parameters:
583739
# Nightly platform config
584740
torch-nightly-build:
@@ -631,6 +787,10 @@ parameters:
631787
type: string
632788
default: "8.4.1.5"
633789

790+
packaging-enabled:
791+
type: boolean
792+
default: false
793+
634794
# Invoke jobs via workflows
635795
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows
636796
workflows:
@@ -685,7 +845,6 @@ workflows:
685845

686846

687847

688-
689848
- build-x86_64-pyt-nightly:
690849
torch-build: << pipeline.parameters.torch-nightly-build >>
691850
torch-build-index: << pipeline.parameters.torch-nightly-build-index >>
@@ -718,6 +877,74 @@ workflows:
718877
trt-version-long: << pipeline.parameters.trt-nightly-version-long >>
719878
requires:
720879
- build-x86_64-pyt-nightly
880+
release:
881+
triggers:
882+
- schedule:
883+
cron: "0 0 * * *"
884+
filters:
885+
branches:
886+
only:
887+
- master
888+
- release/*
889+
890+
jobs:
891+
- build-aarch64-pyt-jetson:
892+
torch-build: << pipeline.parameters.torch-jetson-build >>
893+
jetpack-version: << pipeline.parameters.jetpack-version >>
894+
python-version: 3.8.10
895+
896+
- build-x86_64-pyt-release:
897+
torch-build: << pipeline.parameters.torch-release-build >>
898+
torch-build-index: << pipeline.parameters.torch-release-build-index >>
899+
900+
- test-core-cpp-x86_64:
901+
name: test-core-cpp-x86_64-pyt-release
902+
channel: "release"
903+
torch-build: << pipeline.parameters.torch-release-build >>
904+
torch-build-index: << pipeline.parameters.torch-release-build-index >>
905+
trt-version-short: << pipeline.parameters.trt-release-version-short >>
906+
trt-version-long: << pipeline.parameters.trt-release-version-long >>
907+
cudnn-version: << pipeline.parameters.cudnn-release-version >>
908+
requires:
909+
- build-x86_64-pyt-release
910+
911+
- test-py-ts-x86_64:
912+
name: test-py-ts-x86_64-pyt-release
913+
channel: "release"
914+
torch-build: << pipeline.parameters.torch-release-build >>
915+
torch-build-index: << pipeline.parameters.torch-release-build-index >>
916+
trt-version-long: << pipeline.parameters.trt-release-version-long >>
917+
requires:
918+
- build-x86_64-pyt-release
919+
920+
- test-py-ts-x86_64:
921+
name: test-py-fx-x86_64-pyt-release
922+
channel: "release"
923+
torch-build: << pipeline.parameters.torch-release-build >>
924+
torch-build-index: << pipeline.parameters.torch-release-build-index >>
925+
trt-version-long: << pipeline.parameters.trt-release-version-long >>
926+
requires:
927+
- build-x86_64-pyt-release
928+
929+
- package-x86_64:
930+
name: package-release-x86_64
931+
enabled: << pipeline.parameters.packaging-enabled >>
932+
torch-build: << pipeline.parameters.torch-release-build >>
933+
torch-build-index: << pipeline.parameters.torch-release-build-index >>
934+
requires:
935+
- test-core-cpp-x86_64-pyt-release
936+
- test-py-ts-x86_64-pyt-release
937+
- test-py-fx-x86_64-pyt-release
938+
939+
- package-jetson:
940+
name: package-release-aarch64-jetson
941+
enabled: << pipeline.parameters.packaging-enabled >>
942+
torch-build: << pipeline.parameters.torch-jetson-build >>
943+
jetpack-version: << pipeline.parameters.jetpack-version >>
944+
python-version: 3.8.10
945+
946+
requires:
947+
- build-aarch64-pyt-jetson
721948

722949
on-push:
723950
jobs:
@@ -761,6 +988,13 @@ workflows:
761988
requires:
762989
- build-x86_64-pyt-release
763990

991+
- deploy-py-x86_64:
992+
name: deploy-release-x86_86-py
993+
torch-build: << pipeline.parameters.torch-release-build >>
994+
torch-build-index: << pipeline.parameters.torch-release-build-index >>
995+
trt-version-short: << pipeline.parameters.trt-release-version-short >>
996+
trt-version-long: << pipeline.parameters.trt-release-version-long >>
997+
764998

765999

7661000
- build-x86_64-pyt-nightly:

core/conversion/converters/impl/max.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,8 @@ auto max_registrations TORCHTRT_UNUSED = RegisterNodeConversionPatterns().patter
2929
TORCHTRT_CHECK(topk_layer, "Unable to create max layer from node: " << *n);
3030
auto topk_dims = util::toVec(topk_layer->getOutput(0)->getDimensions());
3131

32-
nvinfer1::ITensor* out0;
33-
nvinfer1::ITensor* out1;
32+
nvinfer1::ITensor* out0 = nullptr;
33+
nvinfer1::ITensor* out1 = nullptr;
3434
if (!keep_dims) {
3535
if (topk_dims[dim] == 1) {
3636
auto squeeze_layer = ctx->net->addShuffle(*topk_layer->getOutput(0));

py/ci/Dockerfile.ci

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
FROM pytorch/manylinux-builder:cuda11.3
2+
ARG trt_version
3+
4+
RUN echo -e "Installing with TensorRT ${trt_version}"
5+
6+
RUN yum install -y ninja-build tensorrt-${trt_version}.*
7+
8+
RUN wget https://github.com/bazelbuild/bazelisk/releases/download/v1.11.0/bazelisk-linux-amd64 \
9+
&& mv bazelisk-linux-amd64 /usr/bin/bazel \
10+
&& chmod +x /usr/bin/bazel
11+
12+
RUN mkdir /workspace
13+
14+
WORKDIR /workspace

py/ci/build_whl.sh

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
#!/bin/bash
2+
3+
# Example usage: docker run -it -v$(pwd)/..:/workspace/TRTorch build_trtorch_wheel /bin/bash /workspace/TRTorch/py/build_whl.sh
4+
5+
cd /workspace/Torch-TensorRT/py
6+
7+
export CXX=g++
8+
export CUDA_HOME=/usr/local/cuda-11.3
9+
10+
build_py37() {
11+
/opt/python/cp37-cp37m/bin/python -m pip install -r requirements.txt
12+
/opt/python/cp37-cp37m/bin/python setup.py bdist_wheel --release --ci
13+
#auditwheel repair --plat manylinux2014_x86_64
14+
}
15+
16+
build_py38() {
17+
/opt/python/cp38-cp38/bin/python -m pip install -r requirements.txt
18+
/opt/python/cp38-cp38/bin/python setup.py bdist_wheel --release --ci
19+
#auditwheel repair --plat manylinux2014_x86_64
20+
}
21+
22+
build_py39() {
23+
/opt/python/cp39-cp39/bin/python -m pip install -r requirements.txt
24+
/opt/python/cp39-cp39/bin/python setup.py bdist_wheel --release --ci
25+
#auditwheel repair --plat manylinux2014_x86_64
26+
}
27+
28+
build_py310() {
29+
/opt/python/cp310-cp310/bin/python -m pip install -r requirements.txt
30+
/opt/python/cp310-cp310/bin/python setup.py bdist_wheel --release --ci
31+
#auditwheel repair --plat manylinux2014_x86_64
32+
}
33+
34+
build_libtorchtrt() {
35+
bazel clean
36+
bazel build //:libtorchtrt --platforms //toolchains:ci_rhel_x86_64_linux -c opt
37+
CUDA_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cuda_version__;print(__cuda_version__)")
38+
TORCHTRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __version__;print(__version__)")
39+
TRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __tensorrt_version__;print(__tensorrt_version__)")
40+
CUDNN_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cudnn_version__;print(__cudnn_version__)")
41+
TORCH_VERSION=$(/opt/python/cp310-cp310/bin/python -c "from torch import __version__;print(__version__.split('+')[0])")
42+
cp ../bazel-bin/libtorchtrt.tar.gz dist/libtorchtrt-${TORCHTRT_VERSION}-cudnn${CUDNN_VERSION}-tensorrt${TRT_VERSION}-cuda${CUDA_VERSION}-libtorch-${TORCH_VERSION}.tar.gz
43+
}
44+
45+
build_libtorchtrt_pre_cxx11_abi() {
46+
bazel build //:libtorchtrt --config pre_cxx11_abi --platforms //toolchains:ci_rhel_x86_64_linux -c opt
47+
CUDA_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cuda_version__;print(__cuda_version__)")
48+
TORCHTRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __version__;print(__version__)")
49+
TRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __tensorrt_version__;print(__tensorrt_version__)")
50+
CUDNN_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cudnn_version__;print(__cudnn_version__)")
51+
TORCH_VERSION=$(/opt/python/cp310-cp310/bin/python -c "from torch import __version__;print(__version__.split('+')[0])")
52+
cp ../bazel-bin/libtorchtrt.tar.gz dist/libtorchtrt-${TORCHTRT_VERSION}-pre-cxx11-abi-cudnn${CUDNN_VERSION}-tensorrt${TRT_VERSION}-cuda${CUDA_VERSION}-libtorch-${TORCH_VERSION}.tar.gz
53+
54+
}
55+
56+
build_py37
57+
build_py38
58+
build_py39
59+
build_py310
60+
build_libtorchtrt_pre_cxx11_abi
61+
build_libtorchtrt

py/requirements.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
11
--extra-index-url https://download.pytorch.org/whl/cu113
2-
torch==1.12.0+cu113
2+
torch==1.12.0
33
pybind11==2.6.2

0 commit comments

Comments
 (0)