Skip to content

feat(//circleci): Adding release automation #1215

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 7 commits into from
Aug 3, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
257 changes: 241 additions & 16 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -155,18 +155,38 @@ commands:
platform:
type: string
default: "x86_64"
release:
type: boolean
default: false
steps:
- run:
name: Build torch-tensorrt python release
name: Build setup
command: |
mv toolchains/ci_workspaces/WORKSPACE.<< parameters.platform >> WORKSPACE
cd py
mv ~/project/toolchains/ci_workspaces/WORKSPACE.<< parameters.platform >> ~/project/WORKSPACE
python3 -m pip install wheel setuptools
python3 -m pip install pybind11==2.6.2
python3 setup.py bdist_wheel --use-cxx11-abi
python3 setup.py install --use-cxx11-abi
mkdir -p /tmp/dist/builds
cp dist/* /tmp/dist/builds
- when:
condition: << parameters.release >>
steps:
- run:
name: Build torch-tensorrt python release package
command: |
cd ~/project/py
python3 setup.py bdist_wheel --use-cxx11-abi --release
python3 setup.py install --use-cxx11-abi --release
mkdir -p /tmp/dist/builds
cp dist/* /tmp/dist/builds
- unless:
condition: << parameters.release >>
steps:
- run:
name: Build torch-tensorrt python package
command: |
cd ~/project/py
python3 setup.py bdist_wheel --use-cxx11-abi
python3 setup.py install --use-cxx11-abi
mkdir -p /tmp/dist/builds
cp dist/* /tmp/dist/builds

build-py-fx-only:
description: "Build the torch-tensorrt python release with only the fx backend"
Expand Down Expand Up @@ -579,6 +599,150 @@ jobs:
- dump-test-env
- test-fx

package-x86_64:
parameters:
enabled:
type: boolean
default: false
torch-build:
type: string
torch-build-index:
type: string
machine:
image: ubuntu-2004-cuda-11.4:202110-01
resource_class: xlarge
steps:
- when:
condition: << parameters.enabled >>
steps:
- checkout
- run:
name: "Build packaging container"
command: |
cd ~/project/py/
docker build -t torch_tensorrt_release_env --build-arg trt_version=<< pipeline.parameters.trt-release-version-short >> -f ci/Dockerfile.ci .
- run:
name: Build Python packages and pre-cxx11-abi tarball
command: |
cd ~/project/py/
cp ~/project/toolchains/ci_workspaces/WORKSPACE.x86_64.release.rhel ~/project/WORKSPACE
docker run -it --rm -v ~/project:/workspace/project torch_tensorrt_release_env /bin/bash /workspace/project/py/ci/build_whl.sh
- create-env:
os: "ubuntu2004"
platform: "x86_64"
cudnn-version: << pipeline.parameters.cudnn-release-version >>
trt-version-short: << pipeline.parameters.trt-release-version-short >>
bazel-version: "5.1.1"
bazel-platform: "x86_64"
- run:
name: Build cxx11-abi tarball
command: |
set -e
cd ~/project/
cp ~/project/toolchains/ci_workspaces/WORKSPACE.x86_64.release.ubuntu ~/project/WORKSPACE
bazel build //:libtorchtrt -c opt --noshow_progress
sudo chown -R $(whoami) ~/project/py
CUDA_VERSION=$(cd ~/project/py/torch_tensorrt && python3 -c "from _version import __cuda_version__;print(__cuda_version__)")
TORCHTRT_VERSION=$(cd ~/project/py/torch_tensorrt && python3 -c "from _version import __version__;print(__version__)")
TRT_VERSION=$(cd ~/project/py/torch_tensorrt && python3 -c "from _version import __tensorrt_version__;print(__tensorrt_version__)")
CUDNN_VERSION=$(cd ~/project/py/torch_tensorrt && python3 -c "from _version import __cudnn_version__;print(__cudnn_version__)")
pip3 install -r ~/project/py/requirements.txt
TORCH_VERSION=$(python3 -c "from torch import __version__;print(__version__.split('+')[0])")
cp ~/project/bazel-bin/libtorchtrt.tar.gz ~/project/py/dist/libtorchtrt-${TORCHTRT_VERSION}-cudnn${CUDNN_VERSION}-tensorrt${TRT_VERSION}-cuda${CUDA_VERSION}-libtorch${TORCH_VERSION}-x86_64-linux.tar.gz
- run:
name: Collect packages
command: |
mkdir -p /tmp/dist/release
cp -r ~/project/py/dist/* /tmp/dist/release
- store_artifacts:
path: /tmp/dist/release
destination: x86_64-release-pkgs
- unless:
condition: << parameters.enabled >>
steps:
- run:
name: Skipped packaging
command: echo -e "Packaging stage not enabled"

package-jetson:
parameters:
enabled:
type: boolean
default: true
torch-build:
type: string
jetpack-version:
type: string
cxx11-abi:
type: boolean
default: true
python-version:
type: string
default: 3.8.10
machine:
image: ubuntu-2004:202201-02
resource_class: arm.xlarge
steps:
- checkout
#- run:
# name: Upgrade base
# command: |
# sudo apt clean
# sudo apt update
# sudo apt upgrade
# sudo apt install software-properties-common
- install-cuda:
os: "ubuntu2004"
platform: "sbsa"
cuda-pkg-name: "cuda-toolkit-11-4"
- run:
name: Install openblas
command: sudo apt install libopenblas-dev
- create-env:
os: "ubuntu2004"
platform: "sbsa"
cudnn-version: << pipeline.parameters.cudnn-jetson-version >>
trt-version-short: << pipeline.parameters.trt-jetson-version-short >>
bazel-version: "5.1.1"
bazel-platform: "arm64"
- run:
name: Set python version
command: |
pyenv install << parameters.python-version >>
pyenv global << parameters.python-version >>
- run:
name: Install NGC Torch
environment:
TORCH_INSTALL: https://developer.download.nvidia.com/compute/redist/jp/v<< parameters.jetpack-version >>/pytorch/<< parameters.torch-build >>
command: |
set -e
python3 -m pip install --upgrade pip; python3 -m pip install setuptools wheel; python3 -m pip install expecttest xmlrunner hypothesis aiohttp numpy=='1.19.4' pyyaml scipy=='1.5.3' ninja cython typing_extensions protobuf; export "LD_LIBRARY_PATH=/usr/lib/llvm-8/lib:$LD_LIBRARY_PATH"; python3 -m pip install --upgrade protobuf; python3 -m pip install --no-cache $TORCH_INSTALL
- build-py-cxx11-abi:
platform: "sbsa"
release: true
- run:
name: Build cxx11-abi tarball
command: |
set -e
cd ~/project/py/
bazel build //:libtorchtrt -c opt --noshow_progress
CUDA_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cuda_version__;print(__cuda_version__)")
TORCHTRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __version__;print(__version__)")
TRT_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __tensorrt_version__;print(__tensorrt_version__)")
CUDNN_VERSION=$(cd torch_tensorrt && python3 -c "from _version import __cudnn_version__;print(__cudnn_version__)")
pip3 install -r ~/project/py/requirements.txt
TORCH_VERSION=$(python3 -c "from torch import __version__;print(__version__.split('+')[0])")
cp ~/project/bazel-bin/libtorchtrt.tar.gz ~/project/py/dist/libtorchtrt-${TORCHTRT_VERSION}-cudnn${CUDNN_VERSION}-tensorrt${TRT_VERSION}-cuda${CUDA_VERSION}-libtorch${TORCH_VERSION}-aarch64-linux-jp<< parameters.jetpack-version >>.tar.gz
- run:
name: Move to release dir
command: |
mkdir -p /tmp/dist/jetson
cp -r ~/project/py/dist/* /tmp/dist/jetson
- store_artifacts:
path: /tmp/dist/jetson
destination: aarch64-release-pkgs


parameters:
# Nightly platform config
torch-nightly-build:
Expand Down Expand Up @@ -631,6 +795,10 @@ parameters:
type: string
default: "8.4.1.5"

enable-packaging:
type: boolean
default: false

# Invoke jobs via workflows
# See: https://circleci.com/docs/2.0/configuration-reference/#workflows
workflows:
Expand All @@ -642,14 +810,14 @@ workflows:
branches:
only:
- master
- release/**/*
jobs:
- build-aarch64-pyt-jetson:
torch-build: << pipeline.parameters.torch-jetson-build >>
jetpack-version: << pipeline.parameters.jetpack-version >>
python-version: 3.8.10



- build-x86_64-pyt-release:
torch-build: << pipeline.parameters.torch-release-build >>
torch-build-index: << pipeline.parameters.torch-release-build-index >>
Expand All @@ -674,7 +842,7 @@ workflows:
requires:
- build-x86_64-pyt-release

- test-py-ts-x86_64:
- test-py-fx-x86_64:
name: test-py-fx-x86_64-pyt-release
channel: "release"
torch-build: << pipeline.parameters.torch-release-build >>
Expand All @@ -684,8 +852,6 @@ workflows:
- build-x86_64-pyt-release




- build-x86_64-pyt-nightly:
torch-build: << pipeline.parameters.torch-nightly-build >>
torch-build-index: << pipeline.parameters.torch-nightly-build-index >>
Expand Down Expand Up @@ -719,13 +885,74 @@ workflows:
requires:
- build-x86_64-pyt-nightly

on-push:
release:
when: << pipeline.parameters.enable-packaging >>
jobs:
- build-aarch64-pyt-jetson:
torch-build: << pipeline.parameters.torch-jetson-build >>
jetpack-version: << pipeline.parameters.jetpack-version >>
python-version: 3.8.10

- build-x86_64-pyt-release:
torch-build: << pipeline.parameters.torch-release-build >>
torch-build-index: << pipeline.parameters.torch-release-build-index >>


- test-core-cpp-x86_64:
name: test-core-cpp-x86_64-pyt-release
channel: "release"
torch-build: << pipeline.parameters.torch-release-build >>
torch-build-index: << pipeline.parameters.torch-release-build-index >>
trt-version-short: << pipeline.parameters.trt-release-version-short >>
trt-version-long: << pipeline.parameters.trt-release-version-long >>
cudnn-version: << pipeline.parameters.cudnn-release-version >>
requires:
- build-x86_64-pyt-release

- test-py-ts-x86_64:
name: test-py-ts-x86_64-pyt-release
channel: "release"
torch-build: << pipeline.parameters.torch-release-build >>
torch-build-index: << pipeline.parameters.torch-release-build-index >>
trt-version-long: << pipeline.parameters.trt-release-version-long >>
requires:
- build-x86_64-pyt-release

- test-py-fx-x86_64:
name: test-py-fx-x86_64-pyt-release
channel: "release"
torch-build: << pipeline.parameters.torch-release-build >>
torch-build-index: << pipeline.parameters.torch-release-build-index >>
trt-version-long: << pipeline.parameters.trt-release-version-long >>
requires:
- build-x86_64-pyt-release


- package-x86_64:
name: package-release-x86_64
enabled: << pipeline.parameters.enable-packaging >>
torch-build: << pipeline.parameters.torch-release-build >>
torch-build-index: << pipeline.parameters.torch-release-build-index >>
requires:
- test-core-cpp-x86_64-pyt-release
- test-py-ts-x86_64-pyt-release
- test-py-fx-x86_64-pyt-release

- package-jetson:
name: package-release-aarch64-jetson
enabled: << pipeline.parameters.enable-packaging >>
torch-build: << pipeline.parameters.torch-jetson-build >>
jetpack-version: << pipeline.parameters.jetpack-version >>
python-version: 3.8.10
requires:
- build-aarch64-pyt-jetson

on-push:
jobs:
- build-aarch64-pyt-jetson:
torch-build: << pipeline.parameters.torch-jetson-build >>
jetpack-version: << pipeline.parameters.jetpack-version >>
python-version: 3.8.10


- build-x86_64-pyt-release:
Expand All @@ -752,7 +979,7 @@ workflows:
requires:
- build-x86_64-pyt-release

- test-py-ts-x86_64:
- test-py-fx-x86_64:
name: test-py-fx-x86_64-pyt-release
channel: "release"
torch-build: << pipeline.parameters.torch-release-build >>
Expand All @@ -762,7 +989,6 @@ workflows:
- build-x86_64-pyt-release



- build-x86_64-pyt-nightly:
torch-build: << pipeline.parameters.torch-nightly-build >>
torch-build-index: << pipeline.parameters.torch-nightly-build-index >>
Expand Down Expand Up @@ -794,5 +1020,4 @@ workflows:
torch-build-index: << pipeline.parameters.torch-nightly-build-index >>
trt-version-long: << pipeline.parameters.trt-nightly-version-long >>
requires:
- build-x86_64-pyt-nightly

- build-x86_64-pyt-nightly
4 changes: 2 additions & 2 deletions core/conversion/converters/impl/max.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,8 @@ auto max_registrations TORCHTRT_UNUSED = RegisterNodeConversionPatterns().patter
TORCHTRT_CHECK(topk_layer, "Unable to create max layer from node: " << *n);
auto topk_dims = util::toVec(topk_layer->getOutput(0)->getDimensions());

nvinfer1::ITensor* out0;
nvinfer1::ITensor* out1;
nvinfer1::ITensor* out0 = nullptr;
nvinfer1::ITensor* out1 = nullptr;
if (!keep_dims) {
if (topk_dims[dim] == 1) {
auto squeeze_layer = ctx->net->addShuffle(*topk_layer->getOutput(0));
Expand Down
7 changes: 7 additions & 0 deletions py/build_whl.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,14 @@ build_py310() {
#auditwheel repair --plat manylinux2014_x86_64
}

#build_py311() {
# /opt/python/cp311-cp311/bin/python -m pip install -r requirements.txt
# /opt/python/cp311-cp311/bin/python setup.py bdist_wheel --release
#auditwheel repair --plat manylinux2014_x86_64
#}

build_py37
build_py38
build_py39
build_py310
#build_py311
14 changes: 14 additions & 0 deletions py/ci/Dockerfile.ci
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
FROM pytorch/manylinux-builder:cuda11.3
ARG trt_version

RUN echo -e "Installing with TensorRT ${trt_version}"

RUN yum install -y ninja-build tensorrt-${trt_version}.*

RUN wget https://github.com/bazelbuild/bazelisk/releases/download/v1.11.0/bazelisk-linux-amd64 \
&& mv bazelisk-linux-amd64 /usr/bin/bazel \
&& chmod +x /usr/bin/bazel

RUN mkdir /workspace

WORKDIR /workspace
Loading