diff --git a/eng/performance/helix.proj b/eng/performance/helix.proj
index b86eb2a4a13..0d92455c587 100644
--- a/eng/performance/helix.proj
+++ b/eng/performance/helix.proj
@@ -11,56 +11,72 @@
15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
$(WorkItemDirectory)
- $(WorkItemCommand) --partition=%(HelixWorkItem.Index) --bdn-arguments="$(BenchmarkDotNetArguments) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(BaselineWorkItemCommand) --partition=%(HelixWorkItem.Index) --bdn-arguments="$(BaselineBenchmarkDotNetArguments) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+ $(WorkItemCommand) --partition=%(HelixWorkItem.Index) --bdn-arguments="$(BenchmarkDotNetArguments) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)"
+
+ if [ "x$PERF_PREREQS_INSTALL_FAILED" = "x1" ]; then
+ echo "\n\n** Error: Failed to install prerequisites **\n\n"; (exit 1);
+ else
+ $(WorkItemCommand) --partition=%(HelixWorkItem.Index) --bdn-arguments="$(BenchmarkDotNetArguments) --partition-count $(PartitionCount) --partition-index %(HelixWorkItem.Index)";
+ fi
+
+ $(CompareCommand)
$(WorkItemTimeout)
Partition%(HelixWorkItem.Index)-combined-perf-lab-report.json
-
+
$(WorkItemDirectory)
- $(WorkItemCommand) --bdn-arguments="$(BenchmarkDotNetArguments)"
+ $(BaselineWorkItemCommand) --bdn-arguments="$(BaselineBenchmarkDotNetArguments)"
+ $(WorkItemCommand) --bdn-arguments="$(BenchmarkDotNetArguments)"
+
+ if [ "x$PERF_PREREQS_INSTALL_FAILED" = "x1" ]; then
+ echo "\n\n** Error: Failed to install prerequisites **\n\n"; (exit 1);
+ else
+ $(WorkItemCommand) --bdn-arguments="$(BenchmarkDotNetArguments)";
+ fi
+
+ $(CompareCommand)
$(WorkItemTimeout)
combined-perf-lab-report.json
diff --git a/scripts/ci_setup.py b/scripts/ci_setup.py
index fb5b1f2b84b..c3376c769b2 100644
--- a/scripts/ci_setup.py
+++ b/scripts/ci_setup.py
@@ -440,8 +440,7 @@ def main(args: Any):
output_file += extension
dir_path = os.path.dirname(output_file)
- if not os.path.isdir(dir_path):
- os.mkdir(dir_path)
+ os.makedirs(dir_path, exist_ok=True)
if not framework.startswith('net4'):
target_framework_moniker = dotnet.FrameworkAction.get_target_framework_moniker(framework)
diff --git a/scripts/dotnet.py b/scripts/dotnet.py
index 9277cf89303..211ac85a497 100755
--- a/scripts/dotnet.py
+++ b/scripts/dotnet.py
@@ -822,8 +822,7 @@ def install(
if not install_dir:
install_dir = __get_directory(architecture)
- if not path.exists(install_dir):
- makedirs(install_dir)
+ makedirs(install_dir, exist_ok=True)
getLogger().info("DotNet Install Path: '%s'", install_dir)
diff --git a/scripts/performance/common.py b/scripts/performance/common.py
index 3e158d90d38..df58c05570b 100644
--- a/scripts/performance/common.py
+++ b/scripts/performance/common.py
@@ -66,8 +66,7 @@ def make_directory(path: str):
'''Creates a directory.'''
if not path:
raise TypeError('Undefined path.')
- if not os.path.isdir(path):
- os.makedirs(path)
+ os.makedirs(path, exist_ok=True)
def remove_directory(path: str) -> None:
diff --git a/scripts/performance/logger.py b/scripts/performance/logger.py
index 42602a5dcf3..dcb303c1c50 100644
--- a/scripts/performance/logger.py
+++ b/scripts/performance/logger.py
@@ -77,8 +77,7 @@ def __initialize(verbose: bool):
def __generate_log_file_name(launch_datetime: datetime) -> str:
'''Generates a unique log file name for the current script.'''
log_dir = path.join(get_repo_root_path(), 'logs')
- if not path.exists(log_dir):
- makedirs(log_dir)
+ makedirs(log_dir, exist_ok=True)
if not hasattr(__main__, '__file__'):
script_name = 'python_interactive_mode'
diff --git a/scripts/run_performance_job.py b/scripts/run_performance_job.py
index a8da016e86a..294ff80b6fd 100644
--- a/scripts/run_performance_job.py
+++ b/scripts/run_performance_job.py
@@ -1,3 +1,4 @@
+from logging import getLogger
import re
from dataclasses import dataclass, field
from datetime import timedelta
@@ -5,13 +6,17 @@
import json
import os
import shutil
+from subprocess import CalledProcessError
import sys
+import tempfile
+from traceback import format_exc
import urllib.request
import xml.etree.ElementTree as ET
from typing import Any, Dict, List, Optional
import ci_setup
from performance.common import RunCommand, set_environment_variable
+from performance.logger import setup_loggers
from send_to_helix import PerfSendToHelixArgs, perf_send_to_helix
def output_counters_for_crank(reports: List[Any]):
@@ -50,19 +55,21 @@ def output_counters_for_crank(reports: List[Any]):
@dataclass
class RunPerformanceJobArgs:
- queue: str
run_kind: str
architecture: str
os_group: str
+ logical_machine: Optional[str] = None
+ queue: Optional[str] = None
framework: Optional[str] = None
performance_repo_dir: str = "."
+ runtime_repo_dir: Optional[str] = None
core_root_dir: Optional[str] = None
baseline_core_root_dir: Optional[str] = None
mono_dotnet_dir: Optional[str] = None
libraries_download_dir: Optional[str] = None
versions_props_path: Optional[str] = None
- chrome_versions_props_path: Optional[str] = None
+ browser_versions_props_path: Optional[str] = None
built_app_dir: Optional[str] = None
extra_bdn_args: Optional[str] = None
run_categories: str = 'Libraries Runtime'
@@ -118,11 +125,6 @@ def get_pre_commands(args: RunPerformanceJobArgs, v8_version: str):
else:
helix_pre_commands += ["export ORIGPYPATH=$PYTHONPATH"]
- # Allow using OpenSSL 1.0.2: https://github.com/dotnet/runtime/pull/60728
- # TODO: Is this still needed?
- if args.os_group != "windows":
- helix_pre_commands += ["export CRYPTOGRAPHY_ALLOW_OPENSSL_102=true"]
-
# Create separate list of commands to handle the next part.
# On non-Windows, these commands are chained together with && so they will stop if any fail
install_prerequisites: list[str] = []
@@ -144,7 +146,12 @@ def get_pre_commands(args: RunPerformanceJobArgs, v8_version: str):
]
else:
if args.os_group != "osx" and args.os_sub_group != "_musl":
- install_prerequisites += ["sudo apt-get -y install python3-pip python3-venv"]
+ install_prerequisites += [
+ 'echo "** Waiting for dpkg to unlock (up to 2 minutes) **"',
+ 'timeout 2m bash -c \'while sudo fuser /var/lib/dpkg/lock-frontend >/dev/null 2>&1; do if [ -z "$printed" ]; then echo "Waiting for dpkg lock to be released... Lock is held by: $(ps -o cmd= -p $(sudo fuser /var/lib/dpkg/lock-frontend))"; printed=1; fi; echo "Waiting 5 seconds to check again"; sleep 5; done;\'',
+ "sudo apt-get remove -y lttng-modules-dkms", # https://github.com/dotnet/runtime/pull/101142
+ "sudo apt-get -y install python3-pip"
+ ]
install_prerequisites += [
"python3 -m venv $HELIX_WORKITEM_ROOT/.venv",
@@ -159,13 +166,13 @@ def get_pre_commands(args: RunPerformanceJobArgs, v8_version: str):
# Install python pacakges needed to upload results to azure storage
install_prerequisites += [
- f"python -m pip install -U pip --force-reinstall",
- f"python -m pip install azure.storage.blob==12.13.0 --force-reinstall",
- f"python -m pip install azure.storage.queue==12.4.0 --force-reinstall",
- f"python -m pip install azure.identity==1.16.1 --force-reinstall",
- f"python -m pip install urllib3==1.26.19 --force-reinstall",
- f"python -m pip install opentelemetry-api==1.23.0 --force-reinstall",
- f"python -m pip install opentelemetry-sdk==1.23.0 --force-reinstall",
+ f"python -m pip install -U pip",
+ f"python -m pip install azure.storage.blob==12.13.0",
+ f"python -m pip install azure.storage.queue==12.4.0",
+ f"python -m pip install azure.identity==1.16.1",
+ f"python -m pip install urllib3==1.26.19",
+ f"python -m pip install opentelemetry-api==1.23.0",
+ f"python -m pip install opentelemetry-sdk==1.23.0",
]
# Install prereqs for NodeJS https://github.com/dotnet/runtime/pull/40667
@@ -190,6 +197,7 @@ def get_pre_commands(args: RunPerformanceJobArgs, v8_version: str):
"export NODE_MAJOR=18",
"echo \"deb [signed-by=/etc/apt/keyrings/nodesource.gpg] https://deb.nodesource.com/node_$NODE_MAJOR.x nodistro main\" | sudo tee /etc/apt/sources.list.d/nodesource.list",
"sudo apt-get update",
+ "sudo apt autoremove -y",
"sudo apt-get install nodejs -y",
f"test -n \"{v8_version}\"",
"npm install --prefix $HELIX_WORKITEM_ROOT jsvu -g",
@@ -251,10 +259,17 @@ def get_pre_commands(args: RunPerformanceJobArgs, v8_version: str):
]
# ensure that the PYTHONPATH is set to the scripts directory
+ # TODO: Run scripts out of work item directory instead of payload directory
if args.os_group == "windows":
- helix_pre_commands += ["set PYTHONPATH=%HELIX_WORKITEM_ROOT%\\scripts%3B%HELIX_WORKITEM_ROOT%"]
+ helix_pre_commands += ["set PYTHONPATH=%HELIX_CORRELATION_PAYLOAD%\\scripts%3B%HELIX_CORRELATION_PAYLOAD%"]
else:
- helix_pre_commands += ["export PYTHONPATH=$HELIX_WORKITEM_ROOT/scripts:$HELIX_WORKITEM_ROOT"]
+ helix_pre_commands += ["export PYTHONPATH=$HELIX_CORRELATION_PAYLOAD/scripts:$HELIX_CORRELATION_PAYLOAD"]
+
+ if args.runtime_type == "iOSMono":
+ if args.os_group == "windows":
+ helix_pre_commands += ["%HELIX_CORRELATION_PAYLOAD%\\monoaot\\mono-aot-cross --llvm --version"]
+ else:
+ helix_pre_commands += ["$HELIX_CORRELATION_PAYLOAD/monoaot/mono-aot-cross --llvm --version"]
return helix_pre_commands
@@ -264,11 +279,63 @@ def get_post_commands(args: RunPerformanceJobArgs):
else:
helix_post_commands = ["export PYTHONPATH=$ORIGPYPATH"]
- # TODO: Support custom helix log collection in post command
+ if args.runtime_type == "wasm" and args.os_group != "windows":
+ helix_post_commands += [
+ """test -d "$HELIX_WORKITEM_UPLOAD_ROOT" && (
+ export _PERF_DIR=$HELIX_WORKITEM_ROOT/performance;
+ mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log;
+ find $_PERF_DIR -name '*.binlog' | xargs -I{} cp {} $HELIX_WORKITEM_UPLOAD_ROOT/log;
+ test "$_commandExitCode" -eq 0 || (
+ mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/obj;
+ mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/bin;
+ mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/BenchmarkDotNet.Autogenerated/obj;
+ mkdir -p $HELIX_WORKITEM_UPLOAD_ROOT/log/for-running;
+ cp -R $_PERF_DIR/artifacts/obj/MicroBenchmarks $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/obj;
+ cp -R $_PERF_DIR/artifacts/bin/MicroBenchmarks $HELIX_WORKITEM_UPLOAD_ROOT/log/MicroBenchmarks/bin;
+ cp -R $_PERF_DIR/artifacts/obj/BenchmarkDotNet.Autogenerated $HELIX_WORKITEM_UPLOAD_ROOT/log/BenchmarkDotNet.Autogenerated/obj;
+ cp -R $_PERF_DIR/artifacts/bin/for-running $HELIX_WORKITEM_UPLOAD_ROOT/log/for-running))"""]
+
return helix_post_commands
+def logical_machine_to_queue(logical_machine: str, internal: bool, os_group: str, architecture: str, alpine: bool):
+ if os_group == "windows":
+ if not internal:
+ return "Windows.10.Amd64.ClientRS4.DevEx.15.8.Open"
+ else:
+ queue_map = {
+ "perftiger": "Windows.11.Amd64.Tiger.Perf",
+ "perftiger_crossgen": "Windows.11.Amd64.Tiger.Perf",
+ "perfowl": "Windows.11.Amd64.Owl.Perf",
+ "perfsurf": "Windows.11.Arm64.Surf.Perf",
+ "perfpixel4a": "Windows.11.Amd64.Pixel.Perf",
+ "perfampere": "Windows.Server.Arm64.Perf",
+ "perfviper": "Windows.11.Amd64.Viper.Perf",
+ "cloudvm": "Windows.10.Amd64"
+ }
+ return queue_map.get(logical_machine, "Windows.11.Amd64.Tiger.Perf")
+ else:
+ if alpine:
+ # this is the same for both public and internal
+ return "alpine.amd64.tiger.perf"
+ elif not internal:
+ if architecture == "arm64":
+ return "ubuntu.1804.armarch.open"
+ else:
+ return "Ubuntu.2204.Amd64.Open"
+ else:
+ queue_map = {
+ "perfampere": "Ubuntu.2204.Arm64.Perf",
+ "perfiphone12mini": "OSX.13.Amd64.Iphone.Perf",
+ "perfowl": "Ubuntu.2204.Amd64.Owl.Perf",
+ "perftiger_crossgen": "Ubuntu.1804.Amd64.Tiger.Perf",
+ "perfviper": "Ubuntu.2204.Amd64.Viper.Perf",
+ "cloudvm": "Ubuntu.2204.Amd64"
+ }
+ return queue_map.get(logical_machine, "Ubuntu.2204.Amd64.Tiger.Perf")
def run_performance_job(args: RunPerformanceJobArgs):
+ setup_loggers(verbose=True)
+
helix_type_suffix = ""
if args.runtime_type == "wasm":
if args.codegen_type == "AOT":
@@ -276,6 +343,12 @@ def run_performance_job(args: RunPerformanceJobArgs):
else:
helix_type_suffix = "/wasm"
+ alpine = args.runtime_type == "coreclr" and args.os_sub_group == "_musl"
+ if args.queue is None:
+ if args.logical_machine is None:
+ raise Exception("Either queue or logical machine must be specifed")
+ args.queue = logical_machine_to_queue(args.logical_machine, args.internal, args.os_group, args.architecture, alpine)
+
if args.performance_repo_ci:
# needs to be unique to avoid logs overwriting in mc.dot.net
build_config = f"{args.architecture}_{args.channel}_{args.run_kind}"
@@ -299,8 +372,8 @@ def run_performance_job(args: RunPerformanceJobArgs):
if args.perflab_upload_token is None:
env_var_name = "PerfCommandUploadToken" if args.os_group == "windows" else "PerfCommandUploadTokenLinux"
args.perflab_upload_token = os.environ.get(env_var_name)
- if args.perflab_upload_token is None:
- print(f"WARNING: {env_var_name} is not set. Results will not be uploaded.")
+ if args.perflab_upload_token is None and args.internal:
+ getLogger().info(f"{env_var_name} is not set. This may be needed for results to be uploaded.")
args.performance_repo_dir = os.path.abspath(args.performance_repo_dir)
@@ -314,23 +387,19 @@ def run_performance_job(args: RunPerformanceJobArgs):
elif args.os_group != "windows":
args.target_csproj = args.target_csproj.replace("\\", "/")
+ if args.libraries_download_dir is None and not args.performance_repo_ci and args.runtime_repo_dir is not None:
+ args.libraries_download_dir = os.path.join(args.runtime_repo_dir, "artifacts")
- llvm = False
- android_mono = False
- mono_dotnet = None
+ llvm = args.codegen_type == "AOT" and args.runtime_type != "wasm"
+ android_mono = args.runtime_type == "AndroidMono"
+ ios_mono = args.runtime_type == "iOSMono"
+ ios_nativeaot = args.runtime_type == "iOSNativeAOT"
mono_aot = False
- ios_mono = False
+ mono_aot_path = None
+ mono_dotnet = None
wasm_bundle_dir = None
wasm_aot = False
- ios_nativeaot = False
- mono_aot_path = None
- if args.runtime_type == "AndroidMono":
- android_mono = True
- elif args.runtime_type == "iOSMono":
- ios_mono = True
- elif args.runtime_type == "iOSNativeAOT":
- ios_nativeaot = True
- elif args.runtime_type == "mono":
+ if args.runtime_type == "mono":
if args.codegen_type == "AOT":
if args.libraries_download_dir is None:
raise Exception("Libraries not downloaded for MonoAOT")
@@ -339,6 +408,10 @@ def run_performance_job(args: RunPerformanceJobArgs):
mono_aot_path = os.path.join(args.libraries_download_dir, "bin", "aot")
else:
mono_dotnet = args.mono_dotnet_dir
+ if mono_dotnet is None:
+ if args.runtime_repo_dir is None:
+ raise Exception("Mono directory must be passed in for mono runs")
+ mono_dotnet = os.path.join(args.runtime_repo_dir, ".dotnet-mono")
elif args.runtime_type == "wasm":
if args.libraries_download_dir is None:
raise Exception("Libraries not downloaded for WASM")
@@ -346,26 +419,25 @@ def run_performance_job(args: RunPerformanceJobArgs):
wasm_bundle_dir = os.path.join(args.libraries_download_dir, "bin", "wasm")
if args.codegen_type == "AOT":
wasm_aot = True
- elif args.runtime_type == "coreclr":
- if args.os_sub_group == "_musl":
- alpine = True
working_dir = os.path.join(args.performance_repo_dir, "CorrelationStaging") # folder in which the payload and workitem directories will be made
work_item_dir = os.path.join(working_dir, "workitem", "") # Folder in which the work item commands will be run in
payload_dir = os.path.join(working_dir, "payload", "") # Uploaded folder containing everything needed to run the performance test
root_payload_dir = os.path.join(payload_dir, "root") # folder that will get copied into the root of the payload directory
- os.makedirs(root_payload_dir, exist_ok=True)
# clear payload directory
if os.path.exists(working_dir):
- print("Clearing existing payload directory")
+ getLogger().info("Clearing existing payload directory")
shutil.rmtree(working_dir)
+ # ensure directories exist
+ os.makedirs(work_item_dir, exist_ok=True)
+ os.makedirs(root_payload_dir, exist_ok=True)
+
# Include a copy of the whole performance in the payload directory
performance_payload_dir = os.path.join(payload_dir, "performance")
- print("Copying performance repository to payload directory")
- shutil.copytree(args.performance_repo_dir, performance_payload_dir, ignore=shutil.ignore_patterns("CorrelationStaging", ".git", "artifacts", ".dotnet", ".venv"))
- print("Finished copying performance repository to payload directory")
+ getLogger().info("Copying performance repository to payload directory")
+ shutil.copytree(args.performance_repo_dir, performance_payload_dir, ignore=shutil.ignore_patterns("CorrelationStaging", ".git", "artifacts", ".dotnet", ".venv", ".vs"))
bdn_arguments = ["--anyCategories", args.run_categories]
@@ -381,6 +453,7 @@ def run_performance_job(args: RunPerformanceJobArgs):
raise Exception("HelixAccessToken environment variable is not configured")
else:
args.helix_access_token = None
+ os.environ.pop("HelixAccessToken", None) # in case the environment variable is set on the system already
args.perflab_upload_token = ""
extra_bdn_arguments += [
"--iterationCount", "1",
@@ -487,9 +560,9 @@ def run_performance_job(args: RunPerformanceJobArgs):
ci_setup_arguments.commit_sha = args.perf_repo_hash
if args.use_local_commit_time:
- get_commit_time_command = RunCommand(["git", "show", "-s", "--format=%ci", args.perf_repo_hash])
- get_commit_time_command.run()
- ci_setup_arguments.commit_time = f"\"{get_commit_time_command.stdout}\""
+ get_commit_time_command = RunCommand(["git", "show", "-s", "--format=%ci", args.perf_repo_hash], verbose=True)
+ get_commit_time_command.run(args.runtime_repo_dir)
+ ci_setup_arguments.commit_time = f"{get_commit_time_command.stdout.strip()}"
# not_in_lab should stay False for internal dotnet performance CI runs
if not args.internal and not args.performance_repo_ci:
@@ -497,23 +570,26 @@ def run_performance_job(args: RunPerformanceJobArgs):
if mono_dotnet is not None:
mono_dotnet_path = os.path.join(payload_dir, "dotnet-mono")
+ getLogger().info("Copying mono dotnet directory to payload directory")
shutil.copytree(mono_dotnet, mono_dotnet_path)
v8_version = ""
if wasm_bundle_dir is not None:
- wasm_bundle_dir = payload_dir
- shutil.copytree(wasm_bundle_dir, wasm_bundle_dir)
+ wasm_bundle_dir_path = payload_dir
+ getLogger().info("Copying wasm bundle directory to payload directory")
+ shutil.copytree(wasm_bundle_dir, wasm_bundle_dir_path, dirs_exist_ok=True)
- # Ensure there is a space at the beginning, so BDN can correctly read them as arguments to `--wasmArgs`
- wasm_args = " --expose_wasm"
+ wasm_args = "--expose_wasm"
if args.javascript_engine == "v8":
- if args.chrome_versions_props_path is None:
- raise Exception("ChromeVersion.props must be present for wasm runs")
+ if args.browser_versions_props_path is None:
+ if args.runtime_repo_dir is None:
+ raise Exception("BrowserVersions.props must be present for wasm runs")
+ args.browser_versions_props_path = os.path.join(args.runtime_repo_dir, "eng", "testing", "BrowserVersions.props")
wasm_args += " --module"
- with open(args.chrome_versions_props_path) as f:
+ with open(args.browser_versions_props_path) as f:
for line in f:
match = re.search(r"linux_V8Version>([^<]*)<", line)
if match:
@@ -521,7 +597,7 @@ def run_performance_job(args: RunPerformanceJobArgs):
v8_version = ".".join(v8_version.split(".")[:3])
break
else:
- raise Exception("Unable to find v8 version in ChromeVersions.props")
+ raise Exception("Unable to find v8 version in BrowserVersions.props")
if args.javascript_engine_path is None:
args.javascript_engine_path = f"/home/helixbot/.jsvu/bin/v8-{v8_version}"
@@ -531,7 +607,7 @@ def run_performance_job(args: RunPerformanceJobArgs):
extra_bdn_arguments += [
"--wasmEngine", args.javascript_engine_path,
- f"\"--wasmArgs={wasm_args}\""
+ f"\\\"--wasmArgs={wasm_args}\\\"",
"--cli", "$HELIX_CORRELATION_PAYLOAD/dotnet/dotnet",
"--wasmDataDir", "$HELIX_CORRELATION_PAYLOAD/wasm-data"
]
@@ -542,7 +618,7 @@ def run_performance_job(args: RunPerformanceJobArgs):
"--buildTimeout", "3600"
]
- ci_setup_arguments.dotnet_path = f"{wasm_bundle_dir}/dotnet"
+ ci_setup_arguments.dotnet_path = f"{wasm_bundle_dir_path}/dotnet"
if args.dotnet_version_link is not None:
if args.dotnet_version_link.startswith("https"): # Version link is a proper url
@@ -583,6 +659,7 @@ def run_performance_job(args: RunPerformanceJobArgs):
if mono_aot_path is None:
raise Exception("Mono AOT Path must be provided for MonoAOT runs")
monoaot_dotnet_path = os.path.join(payload_dir, "monoaot")
+ getLogger().info("Copying MonoAOT build to payload directory")
shutil.copytree(mono_aot_path, monoaot_dotnet_path)
extra_bdn_arguments += [
"--runtimes", "monoaotllvm",
@@ -593,45 +670,69 @@ def run_performance_job(args: RunPerformanceJobArgs):
extra_bdn_arguments += ["--logBuildOutput", "--generateBinLog"]
+ if args.only_sanity_check:
+ extra_bdn_arguments += ["--filter", "System.Tests.Perf_*"]
+
bdn_arguments += extra_bdn_arguments
+ baseline_bdn_arguments = bdn_arguments[:]
+
use_core_run = False
- if args.core_root_dir is not None:
+ use_baseline_core_run = False
+ if not args.performance_repo_ci and args.runtime_type == "coreclr":
use_core_run = True
+ if args.core_root_dir is None:
+ if args.runtime_repo_dir is None:
+ raise Exception("Core_Root directory must be specified for non-performance CI runs")
+ args.core_root_dir = os.path.join(args.runtime_repo_dir, "artifacts", "tests", "coreclr", f"{args.os_group}.{args.architecture}.Release", "Tests", "Core_Root")
coreroot_payload_dir = os.path.join(payload_dir, "Core_Root")
+ getLogger().info("Copying Core_Root directory to payload directory")
shutil.copytree(args.core_root_dir, coreroot_payload_dir, ignore=shutil.ignore_patterns("*.pdb"))
- use_baseline_core_run = False
- if args.baseline_core_root_dir is not None:
- use_baseline_core_run = True
- baseline_coreroot_payload_dir = os.path.join(payload_dir, "Baseline_Core_Root")
- shutil.copytree(args.baseline_core_root_dir, baseline_coreroot_payload_dir)
+ if args.baseline_core_root_dir is not None:
+ use_baseline_core_run = True
+ baseline_coreroot_payload_dir = os.path.join(payload_dir, "Baseline_Core_Root")
+ getLogger().info("Copying Baseline Core_Root directory to payload directory")
+ shutil.copytree(args.baseline_core_root_dir, baseline_coreroot_payload_dir)
if args.maui_version is not None:
ci_setup_arguments.maui_version = args.maui_version
+
+ if args.built_app_dir is None:
+ if args.runtime_repo_dir is not None:
+ args.built_app_dir = args.runtime_repo_dir
+
if android_mono:
if args.built_app_dir is None:
raise Exception("Built apps directory must be present for Android Mono benchmarks")
- os.makedirs(work_item_dir, exist_ok=True)
- shutil.copy(os.path.join(args.built_app_dir, "MonoBenchmarksDroid.apk"), root_payload_dir)
- shutil.copy(os.path.join(args.built_app_dir, "androidHelloWorld", "HelloAndroid.apk"), root_payload_dir)
+ getLogger().info("Copying Android apps to payload directory")
+ shutil.copy(os.path.join(args.built_app_dir, "MonoBenchmarksDroid.apk"), os.path.join(root_payload_dir, "MonoBenchmarksDroid.apk"))
+ shutil.copy(os.path.join(args.built_app_dir, "androidHelloWorld", "HelloAndroid.apk"), os.path.join(root_payload_dir, "HelloAndroid.apk"))
ci_setup_arguments.architecture = "arm64"
if ios_mono or ios_nativeaot:
if args.built_app_dir is None:
raise Exception("Built apps directory must be present for IOS Mono or IOS Native AOT benchmarks")
- dest_zip_folder = os.path.join(payload_dir, "iosHelloWorldZip")
- shutil.copy(os.path.join(args.built_app_dir, "iosHelloWorld"), os.path.join(payload_dir, "iosHelloWorld"))
- shutil.copy(os.path.join(args.built_app_dir, "iosHelloWorldZip"), dest_zip_folder)
+ getLogger().info("Copying IOS apps to payload directory")
+ ios_hello_world_dir = os.path.join(payload_dir, "iosHelloWorld")
+ os.makedirs(ios_hello_world_dir, exist_ok=True) # need to create the dir since the files actually get copied into a child dir
+ shutil.copytree(os.path.join(args.built_app_dir, "iosHelloWorld"), os.path.join(ios_hello_world_dir, "iosHelloWorld"))
- # rename all zips in the 2nd folder to iOSSampleApp.zip
- for file in glob(os.path.join(dest_zip_folder, "*.zip")):
- os.rename(file, os.path.join(dest_zip_folder, "iOSSampleApp.zip"))
+ ios_hello_world_zip_dir = os.path.join(payload_dir, "iosHelloWorldZip")
+ shutil.copytree(os.path.join(args.built_app_dir, "iosHelloWorldZip"), ios_hello_world_zip_dir)
+
+ # Find the zip file in the directory and move it to iOSSampleApp.zip
+ for file in glob(os.path.join(ios_hello_world_zip_dir, "**", "*.zip")):
+ dest = os.path.join(ios_hello_world_zip_dir, "iOSSampleApp.zip")
+ getLogger().info(f"Moving {file} to {dest}")
+ shutil.move(file, dest)
+ break
# ensure work item directory is not empty
- shutil.copytree(os.path.join(args.performance_repo_dir, "docs"), work_item_dir)
+ getLogger().info("Copying docs to work item directory so it isn't empty")
+ shutil.copytree(os.path.join(args.performance_repo_dir, "docs"), work_item_dir, dirs_exist_ok=True)
if args.os_group == "windows":
agent_python = "py -3"
@@ -661,22 +762,23 @@ def run_performance_job(args: RunPerformanceJobArgs):
if args.channel is not None:
ci_setup_arguments.channel = args.channel
- if args.perf_repo_hash is not None:
+ if args.perf_repo_hash is not None and args.performance_repo_ci:
ci_setup_arguments.perf_hash = args.perf_repo_hash
ci_setup.main(ci_setup_arguments)
# ci_setup may modify global.json, so we should copy it across to the payload directory if that happens
# TODO: Refactor this when we eventually remove the dependency on ci_setup.py directly from the runtime repository.
+ getLogger().info("Copying global.json to payload directory")
shutil.copy(os.path.join(args.performance_repo_dir, 'global.json'), os.path.join(performance_payload_dir, 'global.json'))
if args.is_scenario:
set_environment_variable("DOTNET_ROOT", ci_setup_arguments.install_dir, save_to_pipeline=True)
- print(f"Set DOTNET_ROOT to {ci_setup_arguments.install_dir}")
+ getLogger().info(f"Set DOTNET_ROOT to {ci_setup_arguments.install_dir}")
new_path = f"{ci_setup_arguments.install_dir}{os.pathsep}{os.environ['PATH']}"
set_environment_variable("PATH", new_path, save_to_pipeline=True)
- print(f"Set PATH to {new_path}")
+ getLogger().info(f"Set PATH to {new_path}")
framework = os.environ["PERFLAB_Framework"]
os.environ["PERFLAB_TARGET_FRAMEWORKS"] = framework
@@ -717,87 +819,90 @@ def run_performance_job(args: RunPerformanceJobArgs):
"-p:DisableTransitiveFrameworkReferenceDownloads=true"],
verbose=True).run()
- # build MemoryConsumption
- RunCommand([
- dotnet_executable_path, "publish",
- "-c", "Release",
- "-o", os.path.join(payload_dir, "MemoryConsumption"),
- "-f", framework,
- "-r", runtime_id,
- "--self-contained",
- os.path.join(args.performance_repo_dir, "src", "tools", "ScenarioMeasurement", "MemoryConsumption", "MemoryConsumption.csproj"),
- f"/bl:{os.path.join(args.performance_repo_dir, 'artifacts', 'log', build_config, 'MemoryConsumption.binlog')}",
- "-p:DisableTransitiveFrameworkReferenceDownloads=true"],
- verbose=True).run()
-
- # build PerfLabGenericEventSourceForwarder
- RunCommand([
- dotnet_executable_path, "publish",
- "-c", "Release",
- "-o", os.path.join(payload_dir, "PerfLabGenericEventSourceForwarder"),
- "-f", framework,
- "-r", runtime_id,
- os.path.join(args.performance_repo_dir, "src", "tools", "PerfLabGenericEventSourceForwarder", "PerfLabGenericEventSourceForwarder", "PerfLabGenericEventSourceForwarder.csproj"),
- f"/bl:{os.path.join(args.performance_repo_dir, 'artifacts', 'log', build_config, 'PerfLabGenericEventSourceForwarder.binlog')}",
- "-p:DisableTransitiveFrameworkReferenceDownloads=true"],
- verbose=True).run()
-
- # build PerfLabGenericEventSourceLTTngProvider
- if args.os_group != "windows" and args.os_group != "osx" and args.os_version == "2204":
+ if args.performance_repo_ci:
+ # build MemoryConsumption
RunCommand([
- os.path.join(args.performance_repo_dir, "src", "tools", "PerfLabGenericEventSourceLTTngProvider", "build.sh"),
- "-o", os.path.join(payload_dir, "PerfLabGenericEventSourceForwarder")],
+ dotnet_executable_path, "publish",
+ "-c", "Release",
+ "-o", os.path.join(payload_dir, "MemoryConsumption"),
+ "-f", framework,
+ "-r", runtime_id,
+ "--self-contained",
+ os.path.join(args.performance_repo_dir, "src", "tools", "ScenarioMeasurement", "MemoryConsumption", "MemoryConsumption.csproj"),
+ f"/bl:{os.path.join(args.performance_repo_dir, 'artifacts', 'log', build_config, 'MemoryConsumption.binlog')}",
+ "-p:DisableTransitiveFrameworkReferenceDownloads=true"],
verbose=True).run()
-
- # copy PDN
- if args.os_group == "windows" and args.architecture != "x86" and args.pdn_path is not None:
- print("Copying PDN")
- pdn_dest = os.path.join(payload_dir, "PDN")
- pdn_file_path = os.path.join(pdn_dest, "PDN.zip")
- os.makedirs(pdn_dest, exist_ok=True)
- shutil.copyfile(args.pdn_path, pdn_file_path)
- print(f"PDN copied to {pdn_file_path}")
-
- # create a copy of the environment since we want these to only be set during the following invocation
- environ_copy = os.environ.copy()
-
- os.environ["CorrelationPayloadDirectory"] = payload_dir
- os.environ["Architecture"] = args.architecture
- os.environ["TargetsWindows"] = "true" if args.os_group == "windows" else "false"
- os.environ["HelixTargetQueues"] = args.queue
- os.environ["Python"] = agent_python
- os.environ["RuntimeFlavor"] = args.runtime_flavor or ''
- os.environ["HybridGlobalization"] = str(args.hybrid_globalization)
-
- # TODO: See if these commands are needed for linux as they were being called before but were failing.
- if args.os_group == "windows" or args.os_group == "osx":
- RunCommand([*(agent_python.split(" ")), "-m", "pip", "install", "--user", "--upgrade", "pip"]).run()
- RunCommand([*(agent_python.split(" ")), "-m", "pip", "install", "--user", "urllib3==1.26.19"]).run()
- RunCommand([*(agent_python.split(" ")), "-m", "pip", "install", "--user", "requests"]).run()
-
- scenarios_path = os.path.join(args.performance_repo_dir, "src", "scenarios")
- script_path = os.path.join(args.performance_repo_dir, "scripts")
- os.environ["PYTHONPATH"] = f"{os.environ.get('PYTHONPATH', '')}{os.pathsep}{script_path}{os.pathsep}{scenarios_path}"
- print(f"PYTHONPATH={os.environ['PYTHONPATH']}")
-
- os.environ["DOTNET_CLI_TELEMETRY_OPTOUT"] = "1"
- os.environ["DOTNET_MULTILEVEL_LOOKUP"] = "0"
- os.environ["UseSharedCompilation"] = "false"
-
- print("Current dotnet directory:", ci_setup_arguments.install_dir)
- print("If more than one version exist in this directory, usually the latest runtime and sdk will be used.")
-
- RunCommand([
- "dotnet", "msbuild", args.project_file,
- "/restore",
- "/t:PreparePayloadWorkItems",
- f"/bl:{os.path.join(args.performance_repo_dir, 'artifacts', 'log', build_config, 'PrepareWorkItemPayloads.binlog')}"],
- verbose=True).run()
-
- # restore env vars
- os.environ.update(environ_copy)
-
- shutil.copy(os.path.join(performance_payload_dir, "NuGet.config"), root_payload_dir)
+
+ # build PerfLabGenericEventSourceForwarder
+ RunCommand([
+ dotnet_executable_path, "publish",
+ "-c", "Release",
+ "-o", os.path.join(payload_dir, "PerfLabGenericEventSourceForwarder"),
+ "-f", framework,
+ "-r", runtime_id,
+ os.path.join(args.performance_repo_dir, "src", "tools", "PerfLabGenericEventSourceForwarder", "PerfLabGenericEventSourceForwarder", "PerfLabGenericEventSourceForwarder.csproj"),
+ f"/bl:{os.path.join(args.performance_repo_dir, 'artifacts', 'log', build_config, 'PerfLabGenericEventSourceForwarder.binlog')}",
+ "-p:DisableTransitiveFrameworkReferenceDownloads=true"],
+ verbose=True).run()
+
+ # build PerfLabGenericEventSourceLTTngProvider
+ if args.os_group != "windows" and args.os_group != "osx" and args.os_version == "2204":
+ RunCommand([
+ os.path.join(args.performance_repo_dir, "src", "tools", "PerfLabGenericEventSourceLTTngProvider", "build.sh"),
+ "-o", os.path.join(payload_dir, "PerfLabGenericEventSourceForwarder")],
+ verbose=True).run()
+
+ # copy PDN
+ if args.os_group == "windows" and args.architecture != "x86" and args.pdn_path is not None:
+ pdn_dest = os.path.join(payload_dir, "PDN")
+ pdn_file_path = os.path.join(pdn_dest, "PDN.zip")
+ getLogger().info(f"Copying PDN from {args.pdn_path} to {pdn_file_path}")
+ os.makedirs(pdn_dest, exist_ok=True)
+ shutil.copyfile(args.pdn_path, pdn_file_path)
+
+ # create a copy of the environment since we want these to only be set during the following invocation
+ environ_copy = os.environ.copy()
+
+ os.environ["CorrelationPayloadDirectory"] = payload_dir
+ os.environ["Architecture"] = args.architecture
+ os.environ["TargetsWindows"] = "true" if args.os_group == "windows" else "false"
+ os.environ["HelixTargetQueues"] = args.queue
+ os.environ["Python"] = agent_python
+ os.environ["RuntimeFlavor"] = args.runtime_flavor or ''
+ os.environ["HybridGlobalization"] = str(args.hybrid_globalization)
+
+ # TODO: See if these commands are needed for linux as they were being called before but were failing.
+ if args.os_group == "windows" or args.os_group == "osx":
+ RunCommand([*(agent_python.split(" ")), "-m", "pip", "install", "--user", "--upgrade", "pip"]).run()
+ RunCommand([*(agent_python.split(" ")), "-m", "pip", "install", "--user", "urllib3==1.26.19"]).run()
+ RunCommand([*(agent_python.split(" ")), "-m", "pip", "install", "--user", "requests"]).run()
+
+ scenarios_path = os.path.join(args.performance_repo_dir, "src", "scenarios")
+ script_path = os.path.join(args.performance_repo_dir, "scripts")
+ os.environ["PYTHONPATH"] = f"{os.environ.get('PYTHONPATH', '')}{os.pathsep}{script_path}{os.pathsep}{scenarios_path}"
+ getLogger().info(f"PYTHONPATH={os.environ['PYTHONPATH']}")
+
+ os.environ["DOTNET_CLI_TELEMETRY_OPTOUT"] = "1"
+ os.environ["DOTNET_MULTILEVEL_LOOKUP"] = "0"
+ os.environ["UseSharedCompilation"] = "false"
+
+ getLogger().info("Current dotnet directory:", ci_setup_arguments.install_dir)
+ getLogger().info("If more than one version exist in this directory, usually the latest runtime and sdk will be used.")
+
+ # PreparePayloadWorkItems is only available for scenarios runs defined inside the performance repo
+ if args.performance_repo_ci:
+ RunCommand([
+ "dotnet", "msbuild", args.project_file,
+ "/restore",
+ "/t:PreparePayloadWorkItems",
+ f"/bl:{os.path.join(args.performance_repo_dir, 'artifacts', 'log', build_config, 'PrepareWorkItemPayloads.binlog')}"],
+ verbose=True).run()
+
+ # restore env vars
+ os.environ.update(environ_copy)
+
+ getLogger().info("Copying NuGet.config, shared, and staticdeps to payload directory")
+ shutil.copy(os.path.join(performance_payload_dir, "NuGet.config"), os.path.join(root_payload_dir, "NuGet.config"))
shutil.copytree(os.path.join(performance_payload_dir, "scripts"), os.path.join(payload_dir, "scripts"))
shutil.copytree(os.path.join(performance_payload_dir, "src", "scenarios", "shared"), os.path.join(payload_dir, "shared"))
shutil.copytree(os.path.join(performance_payload_dir, "src", "scenarios", "staticdeps"), os.path.join(payload_dir, "staticdeps"))
@@ -805,9 +910,17 @@ def run_performance_job(args: RunPerformanceJobArgs):
if args.architecture == "arm64":
dotnet_dir = os.path.join(ci_setup_arguments.install_dir, "")
arm64_dotnet_dir = os.path.join(args.performance_repo_dir, "tools", "dotnet", "arm64")
+ getLogger().info(f"Copying arm64 dotnet directory to payload dotnet directory")
shutil.rmtree(dotnet_dir)
shutil.copytree(arm64_dotnet_dir, dotnet_dir)
+ # Zip the workitem directory (for xharness (mobile) based workitems)
+ if args.run_kind == "ios_scenarios" or args.run_kind == "android_scenarios":
+ getLogger().info("Zipping workitem directory for app bundle")
+ with tempfile.TemporaryDirectory() as temp_dir:
+ archive_path = shutil.make_archive(os.path.join(temp_dir, 'workitem'), 'zip', work_item_dir)
+ shutil.move(archive_path, f"{work_item_dir}.zip")
+
if args.os_group == "windows":
cli_arguments = [
"--dotnet-versions", "%DOTNET_VERSION%",
@@ -832,7 +945,9 @@ def run_performance_job(args: RunPerformanceJobArgs):
if using_mono:
if args.versions_props_path is None:
- raise Exception("Version.props must be present for mono runs")
+ if args.runtime_repo_dir is None:
+ raise Exception("Version.props must be present for mono runs")
+ args.versions_props_path = os.path.join(args.runtime_repo_dir, "eng", "Versions.props")
with open(args.versions_props_path) as f:
for line in f:
@@ -854,10 +969,18 @@ def run_performance_job(args: RunPerformanceJobArgs):
else:
bdn_arguments += ["--corerun", "$HELIX_CORRELATION_PAYLOAD/Core_Root/corerun"]
+ if use_baseline_core_run:
+ if args.os_group == "windows":
+ baseline_bdn_arguments += ["--corerun", "%HELIX_CORRELATION_PAYLOAD%\\Baseline_Core_Root\\CoreRun.exe"]
+ else:
+ baseline_bdn_arguments += ["--corerun", "$HELIX_CORRELATION_PAYLOAD/Baseline_Core_Root/corerun"]
+
if args.os_group == "windows":
bdn_artifacts_directory = "%HELIX_WORKITEM_UPLOAD_ROOT%\\BenchmarkDotNet.Artifacts"
+ bdn_baseline_artifacts_dir = "%HELIX_WORKITEM_UPLOAD_ROOT%\\BenchmarkDotNet.Artifacts_Baseline"
else:
bdn_artifacts_directory = "$HELIX_WORKITEM_UPLOAD_ROOT/BenchmarkDotNet.Artifacts"
+ bdn_baseline_artifacts_dir = "$HELIX_WORKITEM_UPLOAD_ROOT/BenchmarkDotNet.Artifacts_Baseline"
if args.os_group == "windows":
work_item_command = [
@@ -876,11 +999,15 @@ def run_performance_job(args: RunPerformanceJobArgs):
"--incremental", "no",
"--architecture", args.architecture,
"-f", perf_lab_framework,
- *perf_lab_arguments,
- "--bdn-artifacts", bdn_artifacts_directory]
+ *perf_lab_arguments]
if perf_lab_framework != "net462":
work_item_command = work_item_command + cli_arguments
+
+ baseline_work_item_command = work_item_command[:]
+
+ work_item_command += ["--bdn-artifacts", bdn_artifacts_directory]
+ baseline_work_item_command += ["--bdn-artifacts", bdn_baseline_artifacts_dir]
work_item_timeout = timedelta(hours=6)
if args.only_sanity_check:
@@ -888,6 +1015,30 @@ def run_performance_job(args: RunPerformanceJobArgs):
helix_results_destination_dir=os.path.join(args.performance_repo_dir, "artifacts", "helix-results")
+ compare_command = None
+ fail_on_test_failure = True
+ if args.compare:
+ fail_on_test_failure = False
+ if args.os_group == "windows":
+ dotnet_exe = f"%HELIX_WORKITEM_ROOT%\\performance\\tools\\dotnet\\{args.architecture}\\dotnet.exe"
+ results_comparer = "%HELIX_WORKITEM_ROOT%\\performance\\src\\tools\\ResultsComparer\\ResultsComparer.csproj"
+ threshold = "2%%"
+ xml_results = "%HELIX_WORKITEM_ROOT%\\testResults.xml"
+ else:
+ dotnet_exe = f"$HELIX_WORKITEM_ROOT/performance/tools/dotnet/{args.architecture}/dotnet"
+ results_comparer = "$HELIX_WORKITEM_ROOT/performance/src/tools/ResultsComparer/ResultsComparer.csproj"
+ threshold = "2%"
+ xml_results = "$HELIX_WORKITEM_ROOT/testResults.xml"
+
+ compare_command = [
+ dotnet_exe, "run",
+ "-f", perf_lab_framework,
+ "-p", results_comparer,
+ "--base", bdn_baseline_artifacts_dir,
+ "--diff", bdn_artifacts_directory,
+ "--threshold", threshold,
+ "--xml", xml_results]
+
perf_send_to_helix_args = PerfSendToHelixArgs(
helix_source=f"{helix_source_prefix}/{args.build_repository_name}/{args.build_source_branch}",
helix_type=helix_type,
@@ -909,12 +1060,20 @@ def run_performance_job(args: RunPerformanceJobArgs):
hybrid_globalization=args.hybrid_globalization,
target_csproj=args.target_csproj,
work_item_command=work_item_command,
+ baseline_work_item_command=baseline_work_item_command,
bdn_arguments=bdn_arguments,
+ baseline_bdn_arguments=baseline_bdn_arguments,
download_files_from_helix=True,
targets_windows=args.os_group == "windows",
helix_results_destination_dir=helix_results_destination_dir,
python="python",
- affinity=args.affinity)
+ affinity=args.affinity,
+ compare=args.compare,
+ compare_command=compare_command,
+ only_sanity_check=args.only_sanity_check,
+ ios_strip_symbols=args.ios_strip_symbols,
+ ios_llvm_build=args.ios_llvm_build,
+ fail_on_test_failure=fail_on_test_failure)
if args.send_to_helix:
perf_send_to_helix(perf_send_to_helix_args)
@@ -933,91 +1092,108 @@ def run_performance_job(args: RunPerformanceJobArgs):
def main(argv: List[str]):
- args: dict[str, Any] = {}
-
- i = 1
- while i < len(argv):
- key = argv[i]
- bool_args = {
- "--internal": "internal",
- "--physical-promotion": "physical_promotion_run_type",
- "--is-scenario": "is_scenario",
- "--local-build": "local_build",
- "--compare": "compare",
- "--ios-llvm-build": "ios_llvm_build",
- "--ios-strip-symbols": "ios_strip_symbols",
- "--hybrid-globalization": "hybrid_globalization",
- "--send-to-helix": "send_to_helix",
- "--performance-repo-ci": "performance_repo_ci"
- }
-
- if key in bool_args:
- args[bool_args[key]] = True
- i += 1
- continue
-
- simple_arg_map = {
- "--queue": "queue",
- "--framework": "framework",
- "--run-kind": "run_kind",
- "--architecture": "architecture",
- "--core-root-dir": "core_root_dir",
- "--performance-repo-dir": "performance_repo_dir",
- "--mono-dotnet-dir": "mono_dotnet_dir",
- "--libraries-download-dir": "libraries_download_dir",
- "--versions-props-path": "versions_props_path",
- "--chrome-versions-props-path": "chrome_versions_props_path",
- "--built-app-dir": "built_app_dir",
- "--perflab-upload-token": "perflab_upload_token",
- "--helix-access-token": "helix_access_token",
- "--project-file": "project_file",
- "--build-repository-name": "build_repository_name",
- "--build-source-branch": "build_source_branch",
- "--build-number": "build_number",
- "--pgo-run-type": "pgo_run_type",
- "--r2r-run-type": "r2r_run_type",
- "--codegen-type": "codegen_type",
- "--runtime-type": "runtime_type",
- "--run-categories": "run_categories",
- "--extra-bdn-args": "extra_bdn_args",
- "--affinity": "affinity",
- "--os-group": "os_group",
- "--os-sub-group": "os_sub_group",
- "--runtime-flavor": "runtime_flavor",
- "--javascript-engine": "javascript_engine",
- "--experiment-name": "experiment_name",
- "--channel": "channel",
- "--perf-hash": "perf_hash",
- "--os-version": "os_version",
- "--dotnet-version-link": "dotnet_version_link",
- "--target-csproj": "target_csproj",
- "--pdn-path": "pdn_path",
- }
-
- if key in simple_arg_map:
- arg_name = simple_arg_map[key]
- val = argv[i + 1]
- elif key == "--partition-count":
- arg_name = "partition_count"
- val = int(argv[i + 1])
- elif key == "--run-env-vars":
- val = {}
- while i < len(argv):
+ setup_loggers(verbose=True)
+
+ try:
+ args: dict[str, Any] = {}
+
+ i = 1
+ while i < len(argv):
+ key = argv[i]
+ bool_args = {
+ "--internal": "internal",
+ "--physical-promotion": "physical_promotion_run_type",
+ "--is-scenario": "is_scenario",
+ "--local-build": "local_build",
+ "--compare": "compare",
+ "--ios-llvm-build": "ios_llvm_build",
+ "--ios-strip-symbols": "ios_strip_symbols",
+ "--hybrid-globalization": "hybrid_globalization",
+ "--send-to-helix": "send_to_helix",
+ "--performance-repo-ci": "performance_repo_ci",
+ "--only-sanity-check": "only_sanity_check",
+ "--use-local-commit-time": "use_local_commit_time",
+ }
+
+ if key in bool_args:
+ args[bool_args[key]] = True
i += 1
- arg = argv[i]
- if arg.startswith("--"):
- break
- k, v = arg.split("=")
- val[k] = v
- args["run_env_vars"] = val
- continue
- else:
- raise Exception(f"Invalid argument: {key}")
-
- args[arg_name] = val
- i += 2
-
- run_performance_job(RunPerformanceJobArgs(**args))
+ continue
+
+ simple_arg_map = {
+ "--queue": "queue",
+ "--framework": "framework",
+ "--run-kind": "run_kind",
+ "--architecture": "architecture",
+ "--core-root-dir": "core_root_dir",
+ "--baseline-core-root-dir": "baseline_core_root_dir",
+ "--performance-repo-dir": "performance_repo_dir",
+ "--mono-dotnet-dir": "mono_dotnet_dir",
+ "--libraries-download-dir": "libraries_download_dir",
+ "--versions-props-path": "versions_props_path",
+ "--browser-versions-props-path": "browser_versions_props_path",
+ "--built-app-dir": "built_app_dir",
+ "--perflab-upload-token": "perflab_upload_token",
+ "--helix-access-token": "helix_access_token",
+ "--project-file": "project_file",
+ "--build-repository-name": "build_repository_name",
+ "--build-source-branch": "build_source_branch",
+ "--build-number": "build_number",
+ "--pgo-run-type": "pgo_run_type",
+ "--r2r-run-type": "r2r_run_type",
+ "--codegen-type": "codegen_type",
+ "--runtime-type": "runtime_type",
+ "--run-categories": "run_categories",
+ "--extra-bdn-args": "extra_bdn_args",
+ "--affinity": "affinity",
+ "--os-group": "os_group",
+ "--os-sub-group": "os_sub_group",
+ "--runtime-flavor": "runtime_flavor",
+ "--javascript-engine": "javascript_engine",
+ "--experiment-name": "experiment_name",
+ "--channel": "channel",
+ "--perf-hash": "perf_hash",
+ "--os-version": "os_version",
+ "--dotnet-version-link": "dotnet_version_link",
+ "--target-csproj": "target_csproj",
+ "--pdn-path": "pdn_path",
+ "--runtime-repo-dir": "runtime_repo_dir",
+ "--logical-machine": "logical_machine"
+ }
+
+ if key in simple_arg_map:
+ arg_name = simple_arg_map[key]
+ val = argv[i + 1]
+ elif key == "--partition-count":
+ arg_name = "partition_count"
+ val = int(argv[i + 1])
+ elif key == "--run-env-vars":
+ val = {}
+ while i < len(argv):
+ i += 1
+ arg = argv[i]
+ if arg.startswith("--"):
+ break
+ k, v = arg.split("=")
+ val[k] = v
+ args["run_env_vars"] = val
+ continue
+ else:
+ raise Exception(f"Invalid argument: {key}")
+
+ args[arg_name] = val
+ i += 2
+
+ run_performance_job(RunPerformanceJobArgs(**args))
+ return 0
+ except CalledProcessError as ex:
+ getLogger().error('Command: "%s", exited with status: %s', ex.cmd, ex.returncode)
+ except IOError as ex:
+ getLogger().error("I/O error (%s): %s: %s", ex.errno, ex.strerror, ex.filename)
+ except Exception:
+ getLogger().error('Unexpected error: %s', sys.exc_info()[0])
+ getLogger().error(format_exc())
+ return 1
if __name__ == "__main__":
main(sys.argv)
\ No newline at end of file
diff --git a/scripts/send_to_helix.py b/scripts/send_to_helix.py
index 93f2e42acc9..b46a7962340 100644
--- a/scripts/send_to_helix.py
+++ b/scripts/send_to_helix.py
@@ -44,6 +44,7 @@ class PerfSendToHelixArgs:
wait_for_work_item_completion: bool = True
creator: str = ""
helix_results_destination_dir : Optional[str] = None
+ fail_on_test_failure: bool = True
# Used by our custom .proj files
work_item_dir: str = ""
@@ -56,14 +57,21 @@ class PerfSendToHelixArgs:
# Used by BDN projects
work_item_command: Optional[List[str]] = None
+ baseline_work_item_command: Optional[List[str]] = None
partition_count: Optional[int] = None
bdn_arguments: Optional[List[str]] = None
+ baseline_bdn_arguments: Optional[List[str]] = None
+ compare: bool = False
+ compare_command: Optional[List[str]] = None
+ only_sanity_check: bool = False
# Used by scenarios projects
runtime_flavor: Optional[str] = None
hybrid_globalization: Optional[bool] = None
python: Optional[str] = None
affinity: Optional[str] = None
+ ios_strip_symbols: Optional[bool] = None
+ ios_llvm_build: Optional[bool] = None
def set_environment_variables(self, save_to_pipeline: bool = True):
def set_env_var(name: str, value: Union[str, bool, List[str], timedelta, int, None], sep = " ", save_to_pipeline=save_to_pipeline):
@@ -100,14 +108,22 @@ def set_env_var(name: str, value: Union[str, bool, List[str], timedelta, int, No
set_env_var("PartitionCount", self.partition_count)
set_env_var("RuntimeFlavor", self.runtime_flavor)
set_env_var("HybridGlobalization", self.hybrid_globalization)
+ set_env_var("iOSStripSymbols", self.ios_strip_symbols)
+ set_env_var("iOSLlvmBuild", self.ios_llvm_build)
set_env_var("TargetCsproj", self.target_csproj)
set_env_var("WorkItemCommand", self.work_item_command, sep=" ")
+ set_env_var("BaselineWorkItemCommand", self.baseline_work_item_command, sep=" ")
+ set_env_var("CompareCommand", self.compare_command, sep=" ")
set_env_var("BenchmarkDotNetArguments", self.bdn_arguments, sep=" ")
+ set_env_var("BaselineBenchmarkDotNetArguments", self.baseline_bdn_arguments, sep=" ")
set_env_var("DownloadFilesFromHelix", self.download_files_from_helix)
set_env_var("TargetsWindows", self.targets_windows)
set_env_var("HelixResultsDestinationDir", self.helix_results_destination_dir)
set_env_var("Python", self.python)
set_env_var("AffinityValue", self.affinity)
+ set_env_var("Compare", self.compare)
+ set_env_var("FailOnTestFailure", self.fail_on_test_failure)
+ set_env_var("OnlySanityCheck", self.only_sanity_check)
# The following will already be set in the CI pipeline, but are required to run Helix locally
set_env_var("BUILD_REASON", self.env_build_reason, save_to_pipeline=False)