|
| 1 | +trigger: none |
| 2 | + |
| 3 | +pr: |
| 4 | + autoCancel: true |
| 5 | + drafts: false |
| 6 | + branches: |
| 7 | + include: |
| 8 | + - master |
| 9 | + paths: |
| 10 | + include: |
| 11 | + - neural_compressor/common |
| 12 | + - setup.py |
| 13 | + - requirements_pt.txt |
| 14 | + - .azure-pipelines/scripts/ut/3x/run_3x_pt_fp8.sh |
| 15 | + |
| 16 | +pool: GAUDI |
| 17 | + |
| 18 | +variables: |
| 19 | + IMAGE_NAME: "neural-compressor" |
| 20 | + IMAGE_TAG: "py310" |
| 21 | + UPLOAD_PATH: $(Build.SourcesDirectory)/log_dir |
| 22 | + DOWNLOAD_PATH: $(Build.SourcesDirectory)/log_dir |
| 23 | + ARTIFACT_NAME: "UT_coverage_report_3x_pt_fp8" |
| 24 | + REPO: $(Build.Repository.Uri) |
| 25 | + |
| 26 | +stages: |
| 27 | + - stage: Torch_habana |
| 28 | + displayName: Torch 3x Habana FP8 |
| 29 | + dependsOn: [] |
| 30 | + jobs: |
| 31 | + - job: |
| 32 | + displayName: Torch 3x Habana FP8 |
| 33 | + steps: |
| 34 | + - template: template/ut-template.yml |
| 35 | + parameters: |
| 36 | + imageSource: "pull" |
| 37 | + dockerConfigName: "commonDockerConfig" |
| 38 | + utScriptFileName: "3x/run_3x_pt_fp8" |
| 39 | + uploadPath: $(UPLOAD_PATH) |
| 40 | + utArtifact: "ut_3x_pt_fp8" |
| 41 | + |
| 42 | + - stage: Torch_habana_baseline |
| 43 | + displayName: Torch 3x Habana FP8 baseline |
| 44 | + dependsOn: [] |
| 45 | + jobs: |
| 46 | + - job: |
| 47 | + displayName: Torch 3x Habana FP8 baseline |
| 48 | + steps: |
| 49 | + - template: template/ut-template.yml |
| 50 | + parameters: |
| 51 | + imageSource: "pull" |
| 52 | + dockerConfigName: "gitCloneDockerConfig" |
| 53 | + utScriptFileName: "3x/run_3x_pt_fp8" |
| 54 | + uploadPath: $(UPLOAD_PATH) |
| 55 | + utArtifact: "ut_3x_pt_fp8" |
| 56 | + |
| 57 | + - stage: Coverage |
| 58 | + displayName: "Coverage Compare" |
| 59 | + pool: |
| 60 | + vmImage: "ubuntu-latest" |
| 61 | + dependsOn: [Torch_habana, Torch_habana_baseline] |
| 62 | + jobs: |
| 63 | + - job: CollectDatafiles |
| 64 | + steps: |
| 65 | + - script: | |
| 66 | + if [[ ! $(docker images | grep -i ${IMAGE_NAME}:${IMAGE_TAG}) ]]; then |
| 67 | + docker build -f ${BUILD_SOURCESDIRECTORY}/.azure-pipelines/docker/Dockerfile.devel -t ${IMAGE_NAME}:${IMAGE_TAG} . |
| 68 | + fi |
| 69 | + docker images | grep -i ${IMAGE_NAME} |
| 70 | + if [[ $? -ne 0 ]]; then |
| 71 | + echo "NO Such Repo" |
| 72 | + exit 1 |
| 73 | + fi |
| 74 | + displayName: "Build develop docker image" |
| 75 | +
|
| 76 | + - task: DownloadPipelineArtifact@2 |
| 77 | + inputs: |
| 78 | + artifact: |
| 79 | + patterns: '*_coverage/.coverage' |
| 80 | + path: $(DOWNLOAD_PATH) |
| 81 | + |
| 82 | + - script: | |
| 83 | + echo "--- create container ---" |
| 84 | + docker run -d -it --name="collectLogs" -v ${BUILD_SOURCESDIRECTORY}:/neural-compressor ${IMAGE_NAME}:${IMAGE_TAG} /bin/bash |
| 85 | + echo "--- docker ps ---" |
| 86 | + docker ps |
| 87 | + echo "--- collect logs ---" |
| 88 | + docker exec collectLogs /bin/bash +x -c "cd /neural-compressor/.azure-pipelines/scripts \ |
| 89 | + && bash install_nc.sh 3x_pt_fp8 \ |
| 90 | + && bash ut/3x/collect_log_3x.sh 3x_pt_fp8" |
| 91 | + displayName: "Collect UT Coverage" |
| 92 | +
|
| 93 | + - task: PublishPipelineArtifact@1 |
| 94 | + condition: succeededOrFailed() |
| 95 | + inputs: |
| 96 | + targetPath: $(UPLOAD_PATH) |
| 97 | + artifact: $(ARTIFACT_NAME) |
| 98 | + publishLocation: "pipeline" |
| 99 | + |
| 100 | + - task: Bash@3 |
| 101 | + condition: always() |
| 102 | + inputs: |
| 103 | + targetType: "inline" |
| 104 | + script: | |
| 105 | + docker exec collectLogs bash -c "rm -fr /neural-compressor/* && rm -fr /neural-compressor/.* || true" |
| 106 | + displayName: "Docker clean up" |
0 commit comments