Skip to content

Add .circleci/config.yml #1153

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jul 22, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
693 changes: 639 additions & 54 deletions .circleci/config.yml

Large diffs are not rendered by default.

3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,6 @@ bazel-Torch-TensorRT-Preview
docsrc/src/
bazel-TensorRT
bazel-tensorrt
.pytest_cache
*.cache
*cifar-10-batches-py*
5 changes: 4 additions & 1 deletion examples/int8/training/vgg16/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
torch>=1.10.0
tensorboard>=1.14.0
pytorch-quantization --extra-index-url https://pypi.ngc.nvidia.com
nvidia-pyindex
--extra-index-url https://pypi.ngc.nvidia.com
pytorch-quantization>=2.1.2
tqdm
50 changes: 32 additions & 18 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,22 @@

# Use system installed Python packages
PYT_PATH='/opt/conda/lib/python3.8/site-packages' if not 'PYT_PATH' in os.environ else os.environ["PYT_PATH"]
print(f"Using python path {PYT_PATH}")

# Set the root directory to the directory of the noxfile unless the user wants to
# TOP_DIR
TOP_DIR=os.path.dirname(os.path.realpath(__file__)) if not 'TOP_DIR' in os.environ else os.environ["TOP_DIR"]
print(f"Test root directory {TOP_DIR}")

# Set the USE_CXX11=1 to use cxx11_abi
USE_CXX11=0 if not 'USE_CXX11' in os.environ else os.environ["USE_CXX11"]
if USE_CXX11:
print("Using cxx11 abi")

# Set the USE_HOST_DEPS=1 to use host dependencies for tests
USE_HOST_DEPS=0 if not 'USE_HOST_DEPS' in os.environ else os.environ["USE_HOST_DEPS"]
if USE_HOST_DEPS:
print("Using dependencies from host python")

SUPPORTED_PYTHON_VERSIONS=["3.7", "3.8", "3.9", "3.10"]

Expand Down Expand Up @@ -58,6 +64,12 @@ def download_datasets(session):

def train_model(session):
session.chdir(os.path.join(TOP_DIR, 'examples/int8/training/vgg16'))
session.install("-r", "requirements.txt")
if os.path.exists('vgg16_ckpts/ckpt_epoch25.pth'):
session.run_always('python',
'export_ckpt.py',
'vgg16_ckpts/ckpt_epoch25.pth')
return
if USE_HOST_DEPS:
session.run_always('python',
'main.py',
Expand Down Expand Up @@ -140,14 +152,14 @@ def run_base_tests(session):
print("Running basic tests")
session.chdir(os.path.join(TOP_DIR, 'tests/py'))
tests = [
"test_api.py",
"test_to_backend_api.py",
"api",
"integrations/test_to_backend_api.py",
]
for test in tests:
if USE_HOST_DEPS:
session.run_always('python', test, env={'PYTHONPATH': PYT_PATH})
session.run_always('pytest', test, env={'PYTHONPATH': PYT_PATH})
else:
session.run_always("python", test)
session.run_always("pytest", test)

def run_accuracy_tests(session):
print("Running accuracy tests")
Expand All @@ -169,23 +181,23 @@ def copy_model(session):
session.run_always('cp',
'-rpf',
os.path.join(TOP_DIR, src_file),
os.path.join(TOP_DIR, str('tests/py/') + file_name),
os.path.join(TOP_DIR, str('tests/modules/') + file_name),
external=True)

def run_int8_accuracy_tests(session):
print("Running accuracy tests")
copy_model(session)
session.chdir(os.path.join(TOP_DIR, 'tests/py'))
tests = [
"test_ptq_dataloader_calibrator.py",
"test_ptq_to_backend.py",
"test_qat_trt_accuracy.py",
"ptq/test_ptq_to_backend.py",
"ptq/test_ptq_dataloader_calibrator.py",
"qat/",
]
for test in tests:
if USE_HOST_DEPS:
session.run_always('python', test, env={'PYTHONPATH': PYT_PATH})
session.run_always('pytest', test, env={'PYTHONPATH': PYT_PATH})
else:
session.run_always("python", test)
session.run_always("pytest", test)

def run_trt_compatibility_tests(session):
print("Running TensorRT compatibility tests")
Expand All @@ -197,9 +209,9 @@ def run_trt_compatibility_tests(session):
]
for test in tests:
if USE_HOST_DEPS:
session.run_always('python', test, env={'PYTHONPATH': PYT_PATH})
session.run_always('pytest', test, env={'PYTHONPATH': PYT_PATH})
else:
session.run_always("python", test)
session.run_always("pytest", test)

def run_dla_tests(session):
print("Running DLA tests")
Expand All @@ -209,9 +221,9 @@ def run_dla_tests(session):
]
for test in tests:
if USE_HOST_DEPS:
session.run_always('python', test, env={'PYTHONPATH': PYT_PATH})
session.run_always('pytest', test, env={'PYTHONPATH': PYT_PATH})
else:
session.run_always("python", test)
session.run_always("pytest", test)

def run_multi_gpu_tests(session):
print("Running multi GPU tests")
Expand All @@ -221,9 +233,9 @@ def run_multi_gpu_tests(session):
]
for test in tests:
if USE_HOST_DEPS:
session.run_always('python', test, env={'PYTHONPATH': PYT_PATH})
session.run_always('pytest', test, env={'PYTHONPATH': PYT_PATH})
else:
session.run_always("python", test)
session.run_always("pytest", test)

def run_l0_api_tests(session):
if not USE_HOST_DEPS:
Expand All @@ -245,7 +257,6 @@ def run_l1_accuracy_tests(session):
if not USE_HOST_DEPS:
install_deps(session)
install_torch_trt(session)
download_models(session)
download_datasets(session)
train_model(session)
run_accuracy_tests(session)
Expand All @@ -255,7 +266,6 @@ def run_l1_int8_accuracy_tests(session):
if not USE_HOST_DEPS:
install_deps(session)
install_torch_trt(session)
download_models(session)
download_datasets(session)
train_model(session)
finetune_model(session)
Expand Down Expand Up @@ -313,4 +323,8 @@ def l2_multi_gpu_tests(session):
@nox.session(python=SUPPORTED_PYTHON_VERSIONS, reuse_venv=True)
def download_test_models(session):
"""Grab all the models needed for testing"""
try:
import torch
except ModuleNotFoundError:
install_deps(session)
download_models(session)
4 changes: 1 addition & 3 deletions py/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
-f https://download.pytorch.org/whl/torch_stable.html
-f https://download.pytorch.org/whl/torch/
--extra-index-url https://download.pytorch.org/whl/cu113
torch==1.11.0+cu113
torch==1.11.0
pybind11==2.6.2
6 changes: 5 additions & 1 deletion py/torch_tensorrt/_util.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from torch_tensorrt import __version__
from torch_tensorrt import _C

import torch


def dump_build_info():
"""Prints build information about the torch_tensorrt distribution to stdout
Expand All @@ -15,7 +17,9 @@ def get_build_info() -> str:
str: String containing the build information for torch_tensorrt distribution
"""
build_info = _C.get_build_info()
build_info = "Torch-TensorRT Version: " + str(__version__) + '\n' + build_info
build_info = "Torch-TensorRT Version: " + str(__version__) + '\n' \
+ "Using PyTorch Version: " + str(torch.__version__) + '\n' \
+ build_info
return build_info


Expand Down
4 changes: 2 additions & 2 deletions tests/modules/hub.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@
"model": timm.create_model('vit_base_patch16_224', pretrained=True),
"path": "script"
},
"pool": {
"pooling": {
"model": cm.Pool(),
"path": "trace"
},
Expand All @@ -104,7 +104,7 @@
"model": cm.FallbackInplaceOPIf(),
"path": "script"
},
"bert-base-uncased": {
"bert_base_uncased": {
"model": cm.BertModule(),
"path": "trace"
}
Expand Down
2 changes: 0 additions & 2 deletions tests/modules/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,2 @@
-f https://download.pytorch.org/whl/torch_stable.html
#torch==1.11.0+cu113
timm==v0.4.12
transformers==4.17.0
Loading