From 1a6afc08fc57a12f327b6e634e0cb9a3c9452d1e Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Tue, 10 Sep 2019 15:50:04 +0200 Subject: [PATCH 01/18] bot: Split workflow in several hooks, fixes #14 --- bot/code_coverage_bot/cli.py | 26 +- bot/code_coverage_bot/codecov.py | 303 ------------------------ bot/code_coverage_bot/config.py | 3 + bot/code_coverage_bot/hooks/__init__.py | 0 bot/code_coverage_bot/hooks/base.py | 131 ++++++++++ bot/code_coverage_bot/hooks/cron.py | 64 +++++ bot/code_coverage_bot/hooks/mc.py | 103 ++++++++ bot/code_coverage_bot/hooks/try_repo.py | 48 ++++ 8 files changed, 372 insertions(+), 306 deletions(-) delete mode 100644 bot/code_coverage_bot/codecov.py create mode 100644 bot/code_coverage_bot/hooks/__init__.py create mode 100644 bot/code_coverage_bot/hooks/base.py create mode 100644 bot/code_coverage_bot/hooks/cron.py create mode 100644 bot/code_coverage_bot/hooks/mc.py create mode 100644 bot/code_coverage_bot/hooks/try_repo.py diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index b6601233b..de001d6ce 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -6,8 +6,12 @@ import argparse import os +import structlog + from code_coverage_bot import config -from code_coverage_bot.codecov import CodeCov +from code_coverage_bot.hooks.cron import CronHook +from code_coverage_bot.hooks.mc import MozillaCentralHook +from code_coverage_bot.hooks.try_repo import TryHook from code_coverage_bot.secrets import secrets from code_coverage_bot.taskcluster import taskcluster_config from code_coverage_tools.log import init_logger @@ -55,8 +59,24 @@ def main(): sentry_dsn=secrets.get("SENTRY_DSN"), ) - c = CodeCov(args.repository, args.revision, args.task_name_filter, args.cache_root) - c.go() + logger = structlog.get_logger(__name__) + + if args.revision is None: + logger.info("Running cron hook") + hook = CronHook(args.task_name_filter, args.cache_root) + + elif args.repository == config.MOZILLA_CENTRAL_REPOSITORY: + logger.info("Running Mozilla Central hook") + hook = MozillaCentralHook(args.task_name_filter, args.cache_root, args.revision) + + elif args.repository == config.TRY_REPOSITORY: + logger.info("Running Try hook") + hook = TryHook(args.task_name_filter, args.cache_root, args.revision) + + else: + raise Exception(f"Invalid configuration for {args.repository}/{args.revision}") + + hook.run() if __name__ == "__main__": diff --git a/bot/code_coverage_bot/codecov.py b/bot/code_coverage_bot/codecov.py deleted file mode 100644 index 2c1c3c617..000000000 --- a/bot/code_coverage_bot/codecov.py +++ /dev/null @@ -1,303 +0,0 @@ -# -*- coding: utf-8 -*- - -import json -import os -import tempfile -import zipfile -from datetime import datetime -from datetime import timedelta - -import hglib -import structlog - -from code_coverage_bot import chunk_mapping -from code_coverage_bot import grcov -from code_coverage_bot import hgmo -from code_coverage_bot import taskcluster -from code_coverage_bot import uploader -from code_coverage_bot.artifacts import ArtifactsHandler -from code_coverage_bot.notifier import notify_email -from code_coverage_bot.phabricator import PhabricatorUploader -from code_coverage_bot.phabricator import parse_revision_id -from code_coverage_bot.secrets import secrets -from code_coverage_bot.taskcluster import taskcluster_config -from code_coverage_bot.utils import ThreadPoolExecutorResult -from code_coverage_bot.zero_coverage import ZeroCov - -logger = structlog.get_logger(__name__) - - -HG_BASE = "https://hg.mozilla.org/" -MOZILLA_CENTRAL_REPOSITORY = "{}mozilla-central".format(HG_BASE) -TRY_REPOSITORY = "{}try".format(HG_BASE) - - -class CodeCov(object): - def __init__(self, repository, revision, task_name_filter, cache_root): - # List of test-suite, sorted alphabetically. - # This way, the index of a suite in the array should be stable enough. - self.suites = ["web-platform-tests"] - - self.cache_root = cache_root - - temp_dir = tempfile.mkdtemp() - self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") - self.reports_dir = os.path.join(temp_dir, "ccov-reports") - - self.index_service = taskcluster_config.get_service("index") - - if revision is None: - # Retrieve latest ingested revision - self.repository = MOZILLA_CENTRAL_REPOSITORY - try: - self.revision = uploader.gcp_latest("mozilla-central")[0]["revision"] - except Exception as e: - logger.warn( - "Failed to retrieve the latest reports ingested: {}".format(e) - ) - raise - self.from_pulse = False - else: - self.repository = repository - self.revision = revision - self.from_pulse = True - - self.branch = self.repository[len(HG_BASE) :] - - assert os.path.isdir(cache_root), "Cache root {} is not a dir.".format( - cache_root - ) - self.repo_dir = os.path.join(cache_root, self.branch) - - logger.info("Mercurial revision", revision=self.revision) - - task_ids = {} - for platform in ["linux", "windows", "android-test", "android-emulator"]: - task = taskcluster.get_task(self.branch, self.revision, platform) - - # On try, developers might have requested to run only one platform, and we trust them. - # On mozilla-central, we want to assert that every platform was run (except for android platforms - # as they are unstable). - if task is not None: - task_ids[platform] = task - elif ( - self.repository == MOZILLA_CENTRAL_REPOSITORY - and not platform.startswith("android") - ): - raise Exception("Code coverage build failed and was not indexed.") - - self.artifactsHandler = ArtifactsHandler( - task_ids, self.artifacts_dir, task_name_filter - ) - - def clone_repository(self, repository, revision): - cmd = hglib.util.cmdbuilder( - "robustcheckout", - repository, - self.repo_dir, - purge=True, - sharebase="hg-shared", - upstream="https://hg.mozilla.org/mozilla-unified", - revision=revision, - networkattempts=7, - ) - - cmd.insert(0, hglib.HGPATH) - - proc = hglib.util.popen(cmd) - out, err = proc.communicate() - if proc.returncode: - raise hglib.error.CommandError(cmd, proc.returncode, out, err) - - logger.info("{} cloned".format(repository)) - - def retrieve_source_and_artifacts(self): - with ThreadPoolExecutorResult(max_workers=2) as executor: - # Thread 1 - Download coverage artifacts. - executor.submit(self.artifactsHandler.download_all) - - # Thread 2 - Clone repository. - executor.submit(self.clone_repository, self.repository, self.revision) - - def build_reports(self, only=None): - """ - Build all the possible covdir reports using current artifacts - """ - os.makedirs(self.reports_dir, exist_ok=True) - - reports = {} - for ( - (platform, suite), - artifacts, - ) in self.artifactsHandler.get_combinations().items(): - - if only is not None and (platform, suite) not in only: - continue - - # Generate covdir report for that suite & platform - logger.info( - "Building covdir suite report", - suite=suite, - platform=platform, - artifacts=len(artifacts), - ) - output = grcov.report( - artifacts, source_dir=self.repo_dir, out_format="covdir" - ) - - # Write output on FS - path = os.path.join(self.reports_dir, f"{platform}.{suite}.json") - with open(path, "wb") as f: - f.write(output) - - reports[(platform, suite)] = path - - return reports - - def upload_reports(self, reports): - """ - Upload all provided covdir reports on GCP - """ - for (platform, suite), path in reports.items(): - report = open(path, "rb").read() - uploader.gcp( - self.branch, self.revision, report, suite=suite, platform=platform - ) - - def check_javascript_files(self): - """ - Check that all JavaScript files present in the coverage artifacts actually exist. - If they don't, there might be a bug in the LCOV rewriter. - """ - for artifact in self.artifactsHandler.get(): - if "jsvm" not in artifact: - continue - - with zipfile.ZipFile(artifact, "r") as zf: - for file_name in zf.namelist(): - with zf.open(file_name, "r") as fl: - source_files = [ - line[3:].decode("utf-8").rstrip() - for line in fl - if line.startswith(b"SF:") - ] - missing_files = [ - f - for f in source_files - if not os.path.exists(os.path.join(self.repo_dir, f)) - ] - if len(missing_files) != 0: - logger.warn( - f"{missing_files} are present in coverage reports, but missing from the repository" - ) - - # This function is executed when the bot is triggered at the end of a mozilla-central build. - def go_from_trigger_mozilla_central(self): - # Check the covdir report does not already exists - if uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"): - logger.warn("Full covdir report already on GCP") - return - - self.retrieve_source_and_artifacts() - - self.check_javascript_files() - - reports = self.build_reports() - logger.info("Built all covdir reports", nb=len(reports)) - - # Retrieve the full report - full_path = reports.get(("all", "all")) - assert full_path is not None, "Missing full report (all:all)" - report = json.load(open(full_path)) - - paths = uploader.covdir_paths(report) - expected_extensions = [".js", ".cpp"] - for extension in expected_extensions: - assert any( - path.endswith(extension) for path in paths - ), "No {} file in the generated report".format(extension) - - self.upload_reports(reports) - logger.info("Uploaded all covdir reports", nb=len(reports)) - - # Get pushlog and ask the backend to generate the coverage by changeset - # data, which will be cached. - with hgmo.HGMO(self.repo_dir) as hgmo_server: - changesets = hgmo_server.get_automation_relevance_changesets(self.revision) - - logger.info("Upload changeset coverage data to Phabricator") - phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) - changesets_coverage = phabricatorUploader.upload(report, changesets) - - notify_email(self.revision, changesets, changesets_coverage) - - # This function is executed when the bot is triggered at the end of a try build. - def go_from_trigger_try(self): - phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) - - with hgmo.HGMO(server_address=TRY_REPOSITORY) as hgmo_server: - changesets = hgmo_server.get_automation_relevance_changesets(self.revision) - - if not any( - parse_revision_id(changeset["desc"]) is not None for changeset in changesets - ): - logger.info( - "None of the commits in the try push are linked to a Phabricator revision" - ) - return - - self.retrieve_source_and_artifacts() - - reports = self.build_reports(only=[("all", "all")]) - full_path = reports.get(("all", "all")) - assert full_path is not None, "Missing full report (all:all)" - report = json.load(open(full_path)) - - logger.info("Upload changeset coverage data to Phabricator") - phabricatorUploader.upload(report, changesets) - - # This function is executed when the bot is triggered via cron. - def go_from_cron(self): - self.retrieve_source_and_artifacts() - - logger.info("Generating zero coverage reports") - zc = ZeroCov(self.repo_dir) - zc.generate(self.artifactsHandler.get(), self.revision) - - logger.info("Generating chunk mapping") - chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) - - # Index the task in the TaskCluster index at the given revision and as "latest". - # Given that all tasks have the same rank, the latest task that finishes will - # overwrite the "latest" entry. - namespaces = [ - "project.releng.services.project.{}.code_coverage_bot.{}".format( - secrets[secrets.APP_CHANNEL], self.revision - ), - "project.releng.services.project.{}.code_coverage_bot.latest".format( - secrets[secrets.APP_CHANNEL] - ), - ] - - for namespace in namespaces: - self.index_service.insertTask( - namespace, - { - "taskId": os.environ["TASK_ID"], - "rank": 0, - "data": {}, - "expires": (datetime.utcnow() + timedelta(180)).strftime( - "%Y-%m-%dT%H:%M:%S.%fZ" - ), - }, - ) - - def go(self): - if not self.from_pulse: - self.go_from_cron() - elif self.repository == TRY_REPOSITORY: - self.go_from_trigger_try() - elif self.repository == MOZILLA_CENTRAL_REPOSITORY: - self.go_from_trigger_mozilla_central() - else: - assert False, "We shouldn't be here!" diff --git a/bot/code_coverage_bot/config.py b/bot/code_coverage_bot/config.py index 1b08a10b1..05a67bab9 100644 --- a/bot/code_coverage_bot/config.py +++ b/bot/code_coverage_bot/config.py @@ -4,3 +4,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. PROJECT_NAME = "code-coverage-bot" +HG_BASE = "https://hg.mozilla.org/" +MOZILLA_CENTRAL_REPOSITORY = "{}mozilla-central".format(HG_BASE) +TRY_REPOSITORY = "{}try".format(HG_BASE) diff --git a/bot/code_coverage_bot/hooks/__init__.py b/bot/code_coverage_bot/hooks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/bot/code_coverage_bot/hooks/base.py b/bot/code_coverage_bot/hooks/base.py new file mode 100644 index 000000000..651bec974 --- /dev/null +++ b/bot/code_coverage_bot/hooks/base.py @@ -0,0 +1,131 @@ +# -*- coding: utf-8 -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +import tempfile + +import hglib +import structlog + +from code_coverage_bot import config +from code_coverage_bot import grcov +from code_coverage_bot import taskcluster +from code_coverage_bot import uploader +from code_coverage_bot.artifacts import ArtifactsHandler +from code_coverage_bot.utils import ThreadPoolExecutorResult + +logger = structlog.get_logger(__name__) + + +class Hook(object): + def __init__(self, task_name_filter, cache_root, revision=None): + temp_dir = tempfile.mkdtemp() + self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") + self.reports_dir = os.path.join(temp_dir, "ccov-reports") + + assert os.path.isdir(cache_root), f"Cache root {cache_root} is not a dir." + self.repo_dir = os.path.join(cache_root, self.branch) + + if revision is None: + # Retrieve latest ingested revision + try: + self.revision = uploader.gcp_latest(self.branch)[0]["revision"] + except Exception as e: + logger.warn( + "Failed to retrieve the latest reports ingested: {}".format(e) + ) + raise + else: + self.revision = revision + logger.info( + "Mercurial setup", repository=self.repository, revision=self.revision + ) + + task_ids = {} + for platform in ["linux", "windows", "android-test", "android-emulator"]: + task = taskcluster.get_task(self.branch, self.revision, platform) + + # On try, developers might have requested to run only one platform, and we trust them. + # On mozilla-central, we want to assert that every platform was run (except for android platforms + # as they are unstable). + if task is not None: + task_ids[platform] = task + elif ( + self.repository == config.MOZILLA_CENTRAL_REPOSITORY + and not platform.startswith("android") + ): + raise Exception("Code coverage build failed and was not indexed.") + + self.artifactsHandler = ArtifactsHandler( + task_ids, self.artifacts_dir, task_name_filter + ) + + @property + def branch(self): + return self.repository[len(config.HG_BASE) :] + + def clone_repository(self): + cmd = hglib.util.cmdbuilder( + "robustcheckout", + self.repository, + self.repo_dir, + purge=True, + sharebase="hg-shared", + upstream="https://hg.mozilla.org/mozilla-unified", + revision=self.revision, + networkattempts=7, + ) + + cmd.insert(0, hglib.HGPATH) + + proc = hglib.util.popen(cmd) + out, err = proc.communicate() + if proc.returncode: + raise hglib.error.CommandError(cmd, proc.returncode, out, err) + + logger.info("{} cloned".format(self.repository)) + + def retrieve_source_and_artifacts(self): + with ThreadPoolExecutorResult(max_workers=2) as executor: + # Thread 1 - Download coverage artifacts. + executor.submit(self.artifactsHandler.download_all) + + # Thread 2 - Clone repository. + executor.submit(self.clone_repository) + + def build_reports(self, only=None): + """ + Build all the possible covdir reports using current artifacts + """ + os.makedirs(self.reports_dir, exist_ok=True) + + reports = {} + for ( + (platform, suite), + artifacts, + ) in self.artifactsHandler.get_combinations().items(): + + if only is not None and (platform, suite) not in only: + continue + + # Generate covdir report for that suite & platform + logger.info( + "Building covdir suite report", + suite=suite, + platform=platform, + artifacts=len(artifacts), + ) + output = grcov.report( + artifacts, source_dir=self.repo_dir, out_format="covdir" + ) + + # Write output on FS + path = os.path.join(self.reports_dir, f"{platform}.{suite}.json") + with open(path, "wb") as f: + f.write(output) + + reports[(platform, suite)] = path + + return reports diff --git a/bot/code_coverage_bot/hooks/cron.py b/bot/code_coverage_bot/hooks/cron.py new file mode 100644 index 000000000..cce3518eb --- /dev/null +++ b/bot/code_coverage_bot/hooks/cron.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import os +from datetime import datetime +from datetime import timedelta + +import structlog + +from code_coverage_bot import chunk_mapping +from code_coverage_bot import config +from code_coverage_bot.hooks.base import Hook +from code_coverage_bot.secrets import secrets +from code_coverage_bot.taskcluster import taskcluster_config +from code_coverage_bot.zero_coverage import ZeroCov + +logger = structlog.get_logger(__name__) + + +class CronHook(Hook): + """ + This function is executed when the bot is triggered via cron. + """ + + repository = config.MOZILLA_CENTRAL_REPOSITORY + + def run(self): + self.retrieve_source_and_artifacts() + + logger.info("Generating zero coverage reports") + zc = ZeroCov(self.repo_dir) + zc.generate(self.artifactsHandler.get(), self.revision) + + logger.info("Generating chunk mapping") + chunk_mapping.generate(self.repo_dir, self.revision, self.artifactsHandler) + + # Index the task in the TaskCluster index at the given revision and as "latest". + # Given that all tasks have the same rank, the latest task that finishes will + # overwrite the "latest" entry. + namespaces = [ + "project.releng.services.project.{}.code_coverage_bot.{}".format( + secrets[secrets.APP_CHANNEL], self.revision + ), + "project.releng.services.project.{}.code_coverage_bot.latest".format( + secrets[secrets.APP_CHANNEL] + ), + ] + + index_service = taskcluster_config.get_service("index") + + for namespace in namespaces: + index_service.insertTask( + namespace, + { + "taskId": os.environ["TASK_ID"], + "rank": 0, + "data": {}, + "expires": (datetime.utcnow() + timedelta(180)).strftime( + "%Y-%m-%dT%H:%M:%S.%fZ" + ), + }, + ) diff --git a/bot/code_coverage_bot/hooks/mc.py b/bot/code_coverage_bot/hooks/mc.py new file mode 100644 index 000000000..76a825324 --- /dev/null +++ b/bot/code_coverage_bot/hooks/mc.py @@ -0,0 +1,103 @@ +# -*- coding: utf-8 -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json +import os +import zipfile + +import structlog + +from code_coverage_bot import config +from code_coverage_bot import hgmo +from code_coverage_bot import uploader +from code_coverage_bot.hooks.base import Hook +from code_coverage_bot.notifier import notify_email +from code_coverage_bot.phabricator import PhabricatorUploader + +logger = structlog.get_logger(__name__) + + +class MozillaCentralHook(Hook): + """ + This function is executed when the bot is triggered at the end of a mozilla-central build. + """ + + repository = config.MOZILLA_CENTRAL_REPOSITORY + + def run(self): + # Check the covdir report does not already exists + if uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"): + logger.warn("Full covdir report already on GCP") + return + + self.retrieve_source_and_artifacts() + + self.check_javascript_files() + + reports = self.build_reports() + logger.info("Built all covdir reports", nb=len(reports)) + + # Retrieve the full report + full_path = reports.get(("all", "all")) + assert full_path is not None, "Missing full report (all:all)" + report = json.load(open(full_path)) + + paths = uploader.covdir_paths(report) + expected_extensions = [".js", ".cpp"] + for extension in expected_extensions: + assert any( + path.endswith(extension) for path in paths + ), "No {} file in the generated report".format(extension) + + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) + + # Get pushlog and ask the backend to generate the coverage by changeset + # data, which will be cached. + with hgmo.HGMO(self.repo_dir) as hgmo_server: + changesets = hgmo_server.get_automation_relevance_changesets(self.revision) + + logger.info("Upload changeset coverage data to Phabricator") + phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) + changesets_coverage = phabricatorUploader.upload(report, changesets) + + notify_email(self.revision, changesets, changesets_coverage) + + def upload_reports(self, reports): + """ + Upload all provided covdir reports on GCP + """ + for (platform, suite), path in reports.items(): + report = open(path, "rb").read() + uploader.gcp( + self.branch, self.revision, report, suite=suite, platform=platform + ) + + def check_javascript_files(self): + """ + Check that all JavaScript files present in the coverage artifacts actually exist. + If they don't, there might be a bug in the LCOV rewriter. + """ + for artifact in self.artifactsHandler.get(): + if "jsvm" not in artifact: + continue + + with zipfile.ZipFile(artifact, "r") as zf: + for file_name in zf.namelist(): + with zf.open(file_name, "r") as fl: + source_files = [ + line[3:].decode("utf-8").rstrip() + for line in fl + if line.startswith(b"SF:") + ] + missing_files = [ + f + for f in source_files + if not os.path.exists(os.path.join(self.repo_dir, f)) + ] + if len(missing_files) != 0: + logger.warn( + f"{missing_files} are present in coverage reports, but missing from the repository" + ) diff --git a/bot/code_coverage_bot/hooks/try_repo.py b/bot/code_coverage_bot/hooks/try_repo.py new file mode 100644 index 000000000..3f6229cdb --- /dev/null +++ b/bot/code_coverage_bot/hooks/try_repo.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +import json + +import structlog + +from code_coverage_bot import config +from code_coverage_bot import hgmo +from code_coverage_bot.hooks.base import Hook +from code_coverage_bot.phabricator import PhabricatorUploader +from code_coverage_bot.phabricator import parse_revision_id + +logger = structlog.get_logger(__name__) + + +class TryHook(Hook): + """ + This function is executed when the bot is triggered at the end of a try build. + """ + + repository = config.TRY_REPOSITORY + + def run(self): + phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) + + with hgmo.HGMO(server_address=config.TRY_REPOSITORY) as hgmo_server: + changesets = hgmo_server.get_automation_relevance_changesets(self.revision) + + if not any( + parse_revision_id(changeset["desc"]) is not None for changeset in changesets + ): + logger.info( + "None of the commits in the try push are linked to a Phabricator revision" + ) + return + + self.retrieve_source_and_artifacts() + + reports = self.build_reports(only=[("all", "all")]) + full_path = reports.get(("all", "all")) + assert full_path is not None, "Missing full report (all:all)" + report = json.load(open(full_path)) + + logger.info("Upload changeset coverage data to Phabricator") + phabricatorUploader.upload(report, changesets) From 3621220433f530d716c9e4f69dc435b6a185a783 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Tue, 10 Sep 2019 15:59:39 +0200 Subject: [PATCH 02/18] Remove useless codecov "test" --- bot/tests/test_codecov.py | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 bot/tests/test_codecov.py diff --git a/bot/tests/test_codecov.py b/bot/tests/test_codecov.py deleted file mode 100644 index bdd0662fb..000000000 --- a/bot/tests/test_codecov.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- - -from code_coverage_bot import codecov - - -def test_ok(): - assert codecov From ed0f9d1d5396625101b4ed665ff529609c9b13a3 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Wed, 11 Sep 2019 12:35:16 +0200 Subject: [PATCH 03/18] bot: Split in 2 entrypoints repo & cron --- bot/code_coverage_bot/cli.py | 30 +------ bot/code_coverage_bot/hooks/base.py | 17 +--- bot/code_coverage_bot/hooks/cron.py | 20 ++++- .../hooks/{mc.py => repo.py} | 84 +++++++++++++++---- bot/code_coverage_bot/hooks/try_repo.py | 48 ----------- bot/setup.py | 5 +- 6 files changed, 99 insertions(+), 105 deletions(-) rename bot/code_coverage_bot/hooks/{mc.py => repo.py} (58%) delete mode 100644 bot/code_coverage_bot/hooks/try_repo.py diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index de001d6ce..1805361b9 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -6,12 +6,7 @@ import argparse import os -import structlog - from code_coverage_bot import config -from code_coverage_bot.hooks.cron import CronHook -from code_coverage_bot.hooks.mc import MozillaCentralHook -from code_coverage_bot.hooks.try_repo import TryHook from code_coverage_bot.secrets import secrets from code_coverage_bot.taskcluster import taskcluster_config from code_coverage_tools.log import init_logger @@ -42,7 +37,7 @@ def parse_cli(): return parser.parse_args() -def main(): +def setup_cli(): args = parse_cli() # Auth on Taskcluster @@ -59,25 +54,4 @@ def main(): sentry_dsn=secrets.get("SENTRY_DSN"), ) - logger = structlog.get_logger(__name__) - - if args.revision is None: - logger.info("Running cron hook") - hook = CronHook(args.task_name_filter, args.cache_root) - - elif args.repository == config.MOZILLA_CENTRAL_REPOSITORY: - logger.info("Running Mozilla Central hook") - hook = MozillaCentralHook(args.task_name_filter, args.cache_root, args.revision) - - elif args.repository == config.TRY_REPOSITORY: - logger.info("Running Try hook") - hook = TryHook(args.task_name_filter, args.cache_root, args.revision) - - else: - raise Exception(f"Invalid configuration for {args.repository}/{args.revision}") - - hook.run() - - -if __name__ == "__main__": - main() + return args diff --git a/bot/code_coverage_bot/hooks/base.py b/bot/code_coverage_bot/hooks/base.py index 651bec974..d011aca34 100644 --- a/bot/code_coverage_bot/hooks/base.py +++ b/bot/code_coverage_bot/hooks/base.py @@ -12,7 +12,6 @@ from code_coverage_bot import config from code_coverage_bot import grcov from code_coverage_bot import taskcluster -from code_coverage_bot import uploader from code_coverage_bot.artifacts import ArtifactsHandler from code_coverage_bot.utils import ThreadPoolExecutorResult @@ -20,7 +19,7 @@ class Hook(object): - def __init__(self, task_name_filter, cache_root, revision=None): + def __init__(self, repository, revision, task_name_filter, cache_root): temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") self.reports_dir = os.path.join(temp_dir, "ccov-reports") @@ -28,17 +27,9 @@ def __init__(self, task_name_filter, cache_root, revision=None): assert os.path.isdir(cache_root), f"Cache root {cache_root} is not a dir." self.repo_dir = os.path.join(cache_root, self.branch) - if revision is None: - # Retrieve latest ingested revision - try: - self.revision = uploader.gcp_latest(self.branch)[0]["revision"] - except Exception as e: - logger.warn( - "Failed to retrieve the latest reports ingested: {}".format(e) - ) - raise - else: - self.revision = revision + self.revision = revision + self.repository = repository + assert self.revision and self.repository, "Missing repo/revision" logger.info( "Mercurial setup", repository=self.repository, revision=self.revision ) diff --git a/bot/code_coverage_bot/hooks/cron.py b/bot/code_coverage_bot/hooks/cron.py index cce3518eb..e9836c34c 100644 --- a/bot/code_coverage_bot/hooks/cron.py +++ b/bot/code_coverage_bot/hooks/cron.py @@ -11,6 +11,8 @@ from code_coverage_bot import chunk_mapping from code_coverage_bot import config +from code_coverage_bot import uploader +from code_coverage_bot.cli import setup_cli from code_coverage_bot.hooks.base import Hook from code_coverage_bot.secrets import secrets from code_coverage_bot.taskcluster import taskcluster_config @@ -24,7 +26,16 @@ class CronHook(Hook): This function is executed when the bot is triggered via cron. """ - repository = config.MOZILLA_CENTRAL_REPOSITORY + def __init__(self, *args, **kwargs): + + # Retrieve latest ingested revision + try: + revision = uploader.gcp_latest("mozilla-central")[0]["revision"] + except Exception as e: + logger.warn("Failed to retrieve the latest reports ingested: {}".format(e)) + raise + + super().__init__(config.MOZILLA_CENTRAL_REPOSITORY, revision, *args, **kwargs) def run(self): self.retrieve_source_and_artifacts() @@ -62,3 +73,10 @@ def run(self): ), }, ) + + +def main(): + logger.info("Starting code coverage bot for cron") + args = setup_cli() + hook = CronHook(args.task_name_filter, args.cache_root) + hook.run() diff --git a/bot/code_coverage_bot/hooks/mc.py b/bot/code_coverage_bot/hooks/repo.py similarity index 58% rename from bot/code_coverage_bot/hooks/mc.py rename to bot/code_coverage_bot/hooks/repo.py index 76a825324..550383056 100644 --- a/bot/code_coverage_bot/hooks/mc.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -12,19 +12,44 @@ from code_coverage_bot import config from code_coverage_bot import hgmo from code_coverage_bot import uploader +from code_coverage_bot.cli import setup_cli from code_coverage_bot.hooks.base import Hook from code_coverage_bot.notifier import notify_email from code_coverage_bot.phabricator import PhabricatorUploader +from code_coverage_bot.phabricator import parse_revision_id logger = structlog.get_logger(__name__) -class MozillaCentralHook(Hook): +REPOSITORIES = { + config.MOZILLA_CENTRAL_REPOSITORY: { + # Will build all the reports possible + "build_reports": None, + "gcp_upload": True, + "send_low_coverage_email": True, + }, + config.TRY_REPOSITORY: { + # Only build the main report + "build_reports": [("all", "all")], + "gcp_upload": False, + "send_low_coverage_email": False, + }, +} + + +class RepositoryHook(Hook): """ This function is executed when the bot is triggered at the end of a mozilla-central build. """ - repository = config.MOZILLA_CENTRAL_REPOSITORY + def __init__(self, repository, *args, **kwargs): + assert repository in REPOSITORIES, f"Unsupported repository {repository}" + self.config = REPOSITORIES[repository] + + for key in ("build_reports", "gcp_upload", "send_low_coverage_email"): + assert key in self.config, f"Missing {key} in {repository} config" + + super().__init__(repository, *args, **kwargs) def run(self): # Check the covdir report does not already exists @@ -36,7 +61,7 @@ def run(self): self.check_javascript_files() - reports = self.build_reports() + reports = self.build_reports(only=self.config["build_reports"]) logger.info("Built all covdir reports", nb=len(reports)) # Retrieve the full report @@ -51,19 +76,22 @@ def run(self): path.endswith(extension) for path in paths ), "No {} file in the generated report".format(extension) - self.upload_reports(reports) - logger.info("Uploaded all covdir reports", nb=len(reports)) - - # Get pushlog and ask the backend to generate the coverage by changeset - # data, which will be cached. - with hgmo.HGMO(self.repo_dir) as hgmo_server: - changesets = hgmo_server.get_automation_relevance_changesets(self.revision) + # Upload reports on GCP + if self.config["gcp_upload"]: + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) + else: + logger.info("Skipping GCP upload") - logger.info("Upload changeset coverage data to Phabricator") - phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) - changesets_coverage = phabricatorUploader.upload(report, changesets) + # Upload coverage on phabricator + changesets, coverage = self.upload_phabricator(report) - notify_email(self.revision, changesets, changesets_coverage) + # Send an email on low coverage + if self.config["send_low_coverage_email"]: + notify_email(self.revision, changesets, coverage) + logger.info("Sent low coverage email notification") + else: + logger.info("Skipping low coverage email notification") def upload_reports(self, reports): """ @@ -101,3 +129,31 @@ def check_javascript_files(self): logger.warn( f"{missing_files} are present in coverage reports, but missing from the repository" ) + + def upload_phabricator(self, report): + phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) + + with hgmo.HGMO(server_address=config.TRY_REPOSITORY) as hgmo_server: + changesets = hgmo_server.get_automation_relevance_changesets(self.revision) + + if not any( + parse_revision_id(changeset["desc"]) is not None for changeset in changesets + ): + logger.info( + "None of the commits in the try push are linked to a Phabricator revision" + ) + return + + logger.info("Upload changeset coverage data to Phabricator") + coverage = phabricatorUploader.upload(report, changesets) + + return changesets, coverage + + +def main(): + logger.info("Starting code coverage bot for repository") + args = setup_cli() + hook = RepositoryHook( + args.repository, args.revision, args.task_name_filter, args.cache_root + ) + hook.run() diff --git a/bot/code_coverage_bot/hooks/try_repo.py b/bot/code_coverage_bot/hooks/try_repo.py deleted file mode 100644 index 3f6229cdb..000000000 --- a/bot/code_coverage_bot/hooks/try_repo.py +++ /dev/null @@ -1,48 +0,0 @@ -# -*- coding: utf-8 -*- -# This Source Code Form is subject to the terms of the Mozilla Public -# License, v. 2.0. If a copy of the MPL was not distributed with this -# file, You can obtain one at http://mozilla.org/MPL/2.0/. - -import json - -import structlog - -from code_coverage_bot import config -from code_coverage_bot import hgmo -from code_coverage_bot.hooks.base import Hook -from code_coverage_bot.phabricator import PhabricatorUploader -from code_coverage_bot.phabricator import parse_revision_id - -logger = structlog.get_logger(__name__) - - -class TryHook(Hook): - """ - This function is executed when the bot is triggered at the end of a try build. - """ - - repository = config.TRY_REPOSITORY - - def run(self): - phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) - - with hgmo.HGMO(server_address=config.TRY_REPOSITORY) as hgmo_server: - changesets = hgmo_server.get_automation_relevance_changesets(self.revision) - - if not any( - parse_revision_id(changeset["desc"]) is not None for changeset in changesets - ): - logger.info( - "None of the commits in the try push are linked to a Phabricator revision" - ) - return - - self.retrieve_source_and_artifacts() - - reports = self.build_reports(only=[("all", "all")]) - full_path = reports.get(("all", "all")) - assert full_path is not None, "Missing full report (all:all)" - report = json.load(open(full_path)) - - logger.info("Upload changeset coverage data to Phabricator") - phabricatorUploader.upload(report, changesets) diff --git a/bot/setup.py b/bot/setup.py index da0ed4315..62e9cf8d9 100644 --- a/bot/setup.py +++ b/bot/setup.py @@ -46,6 +46,9 @@ def read_requirements(file_): zip_safe=False, license="MPL2", entry_points={ - "console_scripts": ["code-coverage-bot = code_coverage_bot.cli:main"] + "console_scripts": [ + "code-coverage-cron = code_coverage_bot.hooks.cron:main" + "code-coverage-repo = code_coverage_bot.hooks.repo:main" + ] }, ) From 106915b79ad4840d5a5ed541f7b20d79b8a68763 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Wed, 11 Sep 2019 13:01:13 +0200 Subject: [PATCH 04/18] bot: Fix boot issues --- bot/code_coverage_bot/cli.py | 16 +++++++--------- bot/code_coverage_bot/hooks/base.py | 12 +++++++----- bot/code_coverage_bot/hooks/cron.py | 2 +- bot/setup.py | 4 ++-- 4 files changed, 17 insertions(+), 17 deletions(-) diff --git a/bot/code_coverage_bot/cli.py b/bot/code_coverage_bot/cli.py index 1805361b9..643131bd1 100644 --- a/bot/code_coverage_bot/cli.py +++ b/bot/code_coverage_bot/cli.py @@ -12,13 +12,15 @@ from code_coverage_tools.log import init_logger -def parse_cli(): +def setup_cli(ask_repository=True, ask_revision=True): """ - Setup CLI options parser + Setup CLI options parser and taskcluster bootstrap """ parser = argparse.ArgumentParser(description="Mozilla Code Coverage Bot") - parser.add_argument("--repository", default=os.environ.get("REPOSITORY")) - parser.add_argument("--revision", default=os.environ.get("REVISION")) + if ask_repository: + parser.add_argument("--repository", default=os.environ.get("REPOSITORY")) + if ask_revision: + parser.add_argument("--revision", default=os.environ.get("REVISION")) parser.add_argument( "--cache-root", required=True, help="Cache root, used to pull changesets" ) @@ -34,11 +36,7 @@ def parse_cli(): ) parser.add_argument("--taskcluster-client-id", help="Taskcluster Client ID") parser.add_argument("--taskcluster-access-token", help="Taskcluster Access token") - return parser.parse_args() - - -def setup_cli(): - args = parse_cli() + args = parser.parse_args() # Auth on Taskcluster taskcluster_config.auth(args.taskcluster_client_id, args.taskcluster_access_token) diff --git a/bot/code_coverage_bot/hooks/base.py b/bot/code_coverage_bot/hooks/base.py index d011aca34..daa0375a6 100644 --- a/bot/code_coverage_bot/hooks/base.py +++ b/bot/code_coverage_bot/hooks/base.py @@ -24,16 +24,18 @@ def __init__(self, repository, revision, task_name_filter, cache_root): self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") self.reports_dir = os.path.join(temp_dir, "ccov-reports") - assert os.path.isdir(cache_root), f"Cache root {cache_root} is not a dir." - self.repo_dir = os.path.join(cache_root, self.branch) - - self.revision = revision self.repository = repository - assert self.revision and self.repository, "Missing repo/revision" + self.revision = revision + assert ( + self.revision is not None and self.repository is not None + ), "Missing repo/revision" logger.info( "Mercurial setup", repository=self.repository, revision=self.revision ) + assert os.path.isdir(cache_root), f"Cache root {cache_root} is not a dir." + self.repo_dir = os.path.join(cache_root, self.branch) + task_ids = {} for platform in ["linux", "windows", "android-test", "android-emulator"]: task = taskcluster.get_task(self.branch, self.revision, platform) diff --git a/bot/code_coverage_bot/hooks/cron.py b/bot/code_coverage_bot/hooks/cron.py index e9836c34c..4c2ea3b05 100644 --- a/bot/code_coverage_bot/hooks/cron.py +++ b/bot/code_coverage_bot/hooks/cron.py @@ -77,6 +77,6 @@ def run(self): def main(): logger.info("Starting code coverage bot for cron") - args = setup_cli() + args = setup_cli(ask_revision=False, ask_repository=False) hook = CronHook(args.task_name_filter, args.cache_root) hook.run() diff --git a/bot/setup.py b/bot/setup.py index 62e9cf8d9..c0b6491a3 100644 --- a/bot/setup.py +++ b/bot/setup.py @@ -47,8 +47,8 @@ def read_requirements(file_): license="MPL2", entry_points={ "console_scripts": [ - "code-coverage-cron = code_coverage_bot.hooks.cron:main" - "code-coverage-repo = code_coverage_bot.hooks.repo:main" + "code-coverage-cron = code_coverage_bot.hooks.cron:main", + "code-coverage-repo = code_coverage_bot.hooks.repo:main", ] }, ) From 4ae0b8a6037744793765650aaa49f6c039db1a63 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Wed, 11 Sep 2019 13:07:38 +0200 Subject: [PATCH 05/18] Ship hooks through Taskcluster CI --- .taskcluster.yml | 3 +- ...r-hook.json => taskcluster-hook-cron.json} | 7 +- bot/taskcluster-hook-repo.json | 95 +++++++++++++++++++ 3 files changed, 100 insertions(+), 5 deletions(-) rename bot/{taskcluster-hook.json => taskcluster-hook-cron.json} (97%) create mode 100644 bot/taskcluster-hook-repo.json diff --git a/.taskcluster.yml b/.taskcluster.yml index f2b4ff434..02cffe8b5 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -428,7 +428,8 @@ tasks: cd code-coverage && git checkout ${head_rev} && sed -i -e 's/CHANNEL/${channel}/g' -e 's/REVISION/${head_rev}/g' bot/taskcluster-hook.json && - taskboot --target . build-hook bot/taskcluster-hook.json project-relman code-coverage-${channel}" + taskboot --target . build-hook bot/taskcluster-hook-repo.json project-relman code-coverage-repo-${channel} && + taskboot --target . build-hook bot/taskcluster-hook-cron.json project-relman code-coverage-cron-${channel}" metadata: name: "Code Coverage Bot hook update (${channel})" description: Update Taskcluster hook triggering the code-coverage tasks diff --git a/bot/taskcluster-hook.json b/bot/taskcluster-hook-cron.json similarity index 97% rename from bot/taskcluster-hook.json rename to bot/taskcluster-hook-cron.json index d84fc182b..7a74a2dda 100644 --- a/bot/taskcluster-hook.json +++ b/bot/taskcluster-hook-cron.json @@ -39,10 +39,10 @@ "description": "", "name": { "$if": "firedBy == 'triggerHook'", - "else": "Code Coverage aggregation task (CHANNEL)", + "else": "Code Coverage aggregation task - cron (CHANNEL)", "then": { "$if": "'taskName' in payload", - "else": "Code Coverage aggregation task (CHANNEL)", + "else": "Code Coverage aggregation task - cron (CHANNEL)", "then": { "$eval": "payload.taskName" } @@ -71,7 +71,7 @@ }, "capabilities": {}, "command": [ - "code-coverage-bot", + "code-coverage-cron", "--taskcluster-secret", "project/relman/code-coverage/runtime-CHANNEL", "--cache-root", @@ -97,7 +97,6 @@ "schedulerId": "-", "scopes": [ "secrets:get:project/relman/code-coverage/runtime-CHANNEL", - "notify:email:*", "docker-worker:cache:code-coverage-bot-CHANNEL", "index:insert-task:project.releng.services.project.CHANNEL.code_coverage_bot.*" ], diff --git a/bot/taskcluster-hook-repo.json b/bot/taskcluster-hook-repo.json new file mode 100644 index 000000000..fca3b63b5 --- /dev/null +++ b/bot/taskcluster-hook-repo.json @@ -0,0 +1,95 @@ +{ + "bindings": [], + "metadata": { + "description": "Automatically build code coverage reports", + "emailOnError": true, + "name": "Code coverage hook (CHANNEL)", + "owner": "mcastelluccio@mozilla.com" + }, + "task": { + "$merge": [ + { + "$if": "firedBy == 'triggerHook'", + "else": {}, + "then": { + "$if": "'taskGroupId' in payload", + "else": {}, + "then": { + "taskGroupId": { + "$eval": "payload.taskGroupId" + } + } + } + }, + { + "created": { + "$fromNow": "0 seconds" + }, + "deadline": { + "$fromNow": "4 hours" + }, + "expires": { + "$fromNow": "1 month" + }, + "extra": {}, + "metadata": { + "description": "", + "name": { + "$if": "firedBy == 'triggerHook'", + "else": "Code Coverage aggregation task - repo (CHANNEL)", + "then": { + "$if": "'taskName' in payload", + "else": "Code Coverage aggregation task - repo (CHANNEL)", + "then": { + "$eval": "payload.taskName" + } + } + }, + "owner": "mcastelluccio@mozilla.com", + "source": "https://github.com/mozilla/code-coverage" + }, + "payload": { + "cache": { + "code-coverage-bot-CHANNEL": "/cache" + }, + "capabilities": {}, + "command": [ + "code-coverage-repo", + "--taskcluster-secret", + "project/relman/code-coverage/runtime-CHANNEL", + "--cache-root", + "/cache" + ], + "env": { + "$if": "firedBy == 'triggerHook'", + "else": {}, + "then": { + "$eval": "payload" + } + }, + "features": { + "taskclusterProxy": true + }, + "image": "mozilla/code-coverage:bot-REVISION", + "maxRunTime": 14400 + }, + "priority": "normal", + "provisionerId": "aws-provisioner-v1", + "retries": 5, + "routes": [], + "schedulerId": "-", + "scopes": [ + "secrets:get:project/relman/code-coverage/runtime-CHANNEL", + "notify:email:*", + "docker-worker:cache:code-coverage-bot-CHANNEL" + ], + "tags": {}, + "workerType": "releng-svc-memory" + } + ] + }, + "triggerSchema": { + "additionalProperties": true, + "type": "object" + } +} From 648f668aeef9b643bffdaf694b687abbf7aa00f4 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Wed, 11 Sep 2019 14:00:58 +0200 Subject: [PATCH 06/18] Fix hook test --- bot/tests/test_hook.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/tests/test_hook.py b/bot/tests/test_hook.py index 3094ebf90..0f64abb72 100644 --- a/bot/tests/test_hook.py +++ b/bot/tests/test_hook.py @@ -6,7 +6,7 @@ import jsonschema import pytest -HOOK = os.path.join(os.path.dirname(__file__), "../taskcluster-hook.json") +HOOK = os.path.join(os.path.dirname(__file__), "../taskcluster-hook-repo.json") payloads = [ # Trigger by interface or API From be8b2db1ecd7e673e9228ef1170ca22a652e3d39 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Wed, 11 Sep 2019 14:05:04 +0200 Subject: [PATCH 07/18] Fix sed expression in TC hook build --- .taskcluster.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.taskcluster.yml b/.taskcluster.yml index 02cffe8b5..6f66e0f02 100644 --- a/.taskcluster.yml +++ b/.taskcluster.yml @@ -427,7 +427,7 @@ tasks: - "git clone --quiet ${repository} && cd code-coverage && git checkout ${head_rev} && - sed -i -e 's/CHANNEL/${channel}/g' -e 's/REVISION/${head_rev}/g' bot/taskcluster-hook.json && + sed -i -e 's/CHANNEL/${channel}/g' -e 's/REVISION/${head_rev}/g' bot/taskcluster-hook-*.json && taskboot --target . build-hook bot/taskcluster-hook-repo.json project-relman code-coverage-repo-${channel} && taskboot --target . build-hook bot/taskcluster-hook-cron.json project-relman code-coverage-cron-${channel}" metadata: From a5392dc1ffd4ae559950d4df9d52525c1b039fff Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 15:57:09 +0200 Subject: [PATCH 08/18] Update bot/code_coverage_bot/hooks/repo.py Co-Authored-By: Marco --- bot/code_coverage_bot/hooks/repo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 550383056..8f71e6a1f 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -39,7 +39,7 @@ class RepositoryHook(Hook): """ - This function is executed when the bot is triggered at the end of a mozilla-central build. + This function is executed when the bot is triggered at the end of a build and associated tests. """ def __init__(self, repository, *args, **kwargs): From ef77462d98a80ecb28d02d25d75c72e89b521316 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 15:57:25 +0200 Subject: [PATCH 09/18] Update bot/code_coverage_bot/hooks/repo.py Co-Authored-By: Marco --- bot/code_coverage_bot/hooks/repo.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 8f71e6a1f..54c5ab816 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -53,7 +53,7 @@ def __init__(self, repository, *args, **kwargs): def run(self): # Check the covdir report does not already exists - if uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"): + if self.config["gcp_upload"] and uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"): logger.warn("Full covdir report already on GCP") return From bce9023b0ef935e485f97938fe1de7cefbdfbd68 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 16:02:53 +0200 Subject: [PATCH 10/18] Simplify hooks description --- bot/taskcluster-hook-cron.json | 155 +++++++++++++-------------------- bot/taskcluster-hook-repo.json | 24 ++--- 2 files changed, 65 insertions(+), 114 deletions(-) diff --git a/bot/taskcluster-hook-cron.json b/bot/taskcluster-hook-cron.json index 7a74a2dda..b75068d59 100644 --- a/bot/taskcluster-hook-cron.json +++ b/bot/taskcluster-hook-cron.json @@ -10,103 +10,66 @@ "0 0 0 * * *" ], "task": { - "$merge": [ - { - "$if": "firedBy == 'triggerHook'", - "else": {}, - "then": { - "$if": "'taskGroupId' in payload", - "else": {}, - "then": { - "taskGroupId": { - "$eval": "payload.taskGroupId" - } - } - } - }, - { - "created": { - "$fromNow": "0 seconds" - }, - "deadline": { - "$fromNow": "4 hours" - }, - "expires": { - "$fromNow": "1 month" - }, - "extra": {}, - "metadata": { - "description": "", - "name": { - "$if": "firedBy == 'triggerHook'", - "else": "Code Coverage aggregation task - cron (CHANNEL)", - "then": { - "$if": "'taskName' in payload", - "else": "Code Coverage aggregation task - cron (CHANNEL)", - "then": { - "$eval": "payload.taskName" - } - } - }, - "owner": "mcastelluccio@mozilla.com", - "source": "https://github.com/mozilla/code-coverage" + "created": { + "$fromNow": "0 seconds" + }, + "deadline": { + "$fromNow": "4 hours" + }, + "expires": { + "$fromNow": "1 month" + }, + "extra": {}, + "metadata": { + "description": "", + "name": "Code Coverage aggregation task - cron (CHANNEL)", + "owner": "mcastelluccio@mozilla.com", + "source": "https://github.com/mozilla/code-coverage" + }, + "payload": { + "artifacts": { + "public/chunk_mapping.tar.xz": { + "path": "/chunk_mapping.tar.xz", + "type": "file" }, - "payload": { - "artifacts": { - "public/chunk_mapping.tar.xz": { - "path": "/chunk_mapping.tar.xz", - "type": "file" - }, - "public/per_chunk_mapping.tar.xz": { - "path": "/per_chunk_mapping.tar.xz", - "type": "file" - }, - "public/zero_coverage_report.json": { - "path": "/zero_coverage_report.json", - "type": "file" - } - }, - "cache": { - "code-coverage-bot-CHANNEL": "/cache" - }, - "capabilities": {}, - "command": [ - "code-coverage-cron", - "--taskcluster-secret", - "project/relman/code-coverage/runtime-CHANNEL", - "--cache-root", - "/cache" - ], - "env": { - "$if": "firedBy == 'triggerHook'", - "else": {}, - "then": { - "$eval": "payload" - } - }, - "features": { - "taskclusterProxy": true - }, - "image": "mozilla/code-coverage:bot-REVISION", - "maxRunTime": 14400 + "public/per_chunk_mapping.tar.xz": { + "path": "/per_chunk_mapping.tar.xz", + "type": "file" }, - "priority": "normal", - "provisionerId": "aws-provisioner-v1", - "retries": 5, - "routes": [], - "schedulerId": "-", - "scopes": [ - "secrets:get:project/relman/code-coverage/runtime-CHANNEL", - "docker-worker:cache:code-coverage-bot-CHANNEL", - "index:insert-task:project.releng.services.project.CHANNEL.code_coverage_bot.*" - ], - "tags": {}, - "workerType": "releng-svc-memory" - } - ] - }, - "triggerSchema": { - "additionalProperties": true, - "type": "object" + "public/zero_coverage_report.json": { + "path": "/zero_coverage_report.json", + "type": "file" + } + }, + "cache": { + "code-coverage-bot-CHANNEL": "/cache" + }, + "capabilities": {}, + "command": [ + "code-coverage-cron", + "--taskcluster-secret", + "project/relman/code-coverage/runtime-CHANNEL", + "--cache-root", + "/cache" + ], + "env": {}, + "features": { + "taskclusterProxy": true + }, + "image": "mozilla/code-coverage:bot-REVISION", + "maxRunTime": 14400 + }, + "priority": "normal", + "provisionerId": "aws-provisioner-v1", + "retries": 5, + "routes": [], + "schedulerId": "-", + "scopes": [ + "secrets:get:project/relman/code-coverage/runtime-CHANNEL", + "docker-worker:cache:code-coverage-bot-CHANNEL", + "index:insert-task:project.releng.services.project.CHANNEL.code_coverage_bot.*" + ], + "tags": {}, + "workerType": "releng-svc-memory" } } diff --git a/bot/taskcluster-hook-repo.json b/bot/taskcluster-hook-repo.json index fca3b63b5..af1c149d9 100644 --- a/bot/taskcluster-hook-repo.json +++ b/bot/taskcluster-hook-repo.json @@ -9,15 +9,11 @@ "task": { "$merge": [ { - "$if": "firedBy == 'triggerHook'", + "$if": "'taskGroupId' in payload", "else": {}, "then": { - "$if": "'taskGroupId' in payload", - "else": {}, - "then": { - "taskGroupId": { - "$eval": "payload.taskGroupId" - } + "taskGroupId": { + "$eval": "payload.taskGroupId" } } }, @@ -35,14 +31,10 @@ "metadata": { "description": "", "name": { - "$if": "firedBy == 'triggerHook'", + "$if": "'taskName' in payload", "else": "Code Coverage aggregation task - repo (CHANNEL)", "then": { - "$if": "'taskName' in payload", - "else": "Code Coverage aggregation task - repo (CHANNEL)", - "then": { - "$eval": "payload.taskName" - } + "$eval": "payload.taskName" } }, "owner": "mcastelluccio@mozilla.com", @@ -61,11 +53,7 @@ "/cache" ], "env": { - "$if": "firedBy == 'triggerHook'", - "else": {}, - "then": { - "$eval": "payload" - } + "$eval": "payload" }, "features": { "taskclusterProxy": true From 6004ae89c2d6a5f70155bc49a015d816521f2c71 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 16:08:37 +0200 Subject: [PATCH 11/18] Fix black --- bot/code_coverage_bot/hooks/repo.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 54c5ab816..c0bd0de0e 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -53,7 +53,9 @@ def __init__(self, repository, *args, **kwargs): def run(self): # Check the covdir report does not already exists - if self.config["gcp_upload"] and uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"): + if self.config["gcp_upload"] and uploader.gcp_covdir_exists( + self.branch, self.revision, "all", "all" + ): logger.warn("Full covdir report already on GCP") return From 0770cd0fefb7941321fcc993ead81c86b9c9b98d Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 16:19:07 +0200 Subject: [PATCH 12/18] Set expected extensions in config --- bot/code_coverage_bot/hooks/repo.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index c0bd0de0e..2fc5837ed 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -27,12 +27,14 @@ "build_reports": None, "gcp_upload": True, "send_low_coverage_email": True, + "expected_extensions": [".js", ".cpp"], }, config.TRY_REPOSITORY: { # Only build the main report "build_reports": [("all", "all")], "gcp_upload": False, "send_low_coverage_email": False, + "expected_extensions": None, }, } @@ -71,12 +73,13 @@ def run(self): assert full_path is not None, "Missing full report (all:all)" report = json.load(open(full_path)) - paths = uploader.covdir_paths(report) - expected_extensions = [".js", ".cpp"] - for extension in expected_extensions: - assert any( - path.endswith(extension) for path in paths - ), "No {} file in the generated report".format(extension) + # Check extensions + if self.config["expected_extensions"]: + paths = uploader.covdir_paths(report) + for extension in self.config["expected_extensions"]: + assert any( + path.endswith(extension) for path in paths + ), "No {} file in the generated report".format(extension) # Upload reports on GCP if self.config["gcp_upload"]: From 54066e8a1966b0b8a50657ba07c55d202093e241 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 16:36:01 +0200 Subject: [PATCH 13/18] bot: Configure hgmo access per repo --- bot/code_coverage_bot/hgmo.py | 3 +++ bot/code_coverage_bot/hooks/repo.py | 14 +++++++++++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/bot/code_coverage_bot/hgmo.py b/bot/code_coverage_bot/hgmo.py index d08052051..32a41c1b2 100644 --- a/bot/code_coverage_bot/hgmo.py +++ b/bot/code_coverage_bot/hgmo.py @@ -22,6 +22,9 @@ def __init__(self, repo_dir=None, server_address=None): else: self.server_address = HGMO.SERVER_ADDRESS self.repo_dir = repo_dir + logger.info( + "Configured HGMO server", address=self.server_address, dir=self.repo_dir + ) self.pid_file = os.path.join(os.getcwd(), HGMO.PID_FILE) def __get_pid(self): diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 2fc5837ed..656b325a8 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -28,6 +28,8 @@ "gcp_upload": True, "send_low_coverage_email": True, "expected_extensions": [".js", ".cpp"], + # Use local repo to load mercurial information + "hgmo_local": True, }, config.TRY_REPOSITORY: { # Only build the main report @@ -35,6 +37,9 @@ "gcp_upload": False, "send_low_coverage_email": False, "expected_extensions": None, + # Use remote try repo in order to return early if the + # try build is not linked to Phabricator + "hgmo_local": False, }, } @@ -138,7 +143,14 @@ def check_javascript_files(self): def upload_phabricator(self, report): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) - with hgmo.HGMO(server_address=config.TRY_REPOSITORY) as hgmo_server: + # Build HGMO config according to this repo's configuration + hgmo_config = {} + if self.config["hgmo_local"]: + hgmo_config["repo_dir"] = self.repo_dir + else: + hgmo_config["server_address"] = self.repository + + with hgmo.HGMO(**hgmo_config) as hgmo_server: changesets = hgmo_server.get_automation_relevance_changesets(self.revision) if not any( From 9540a61b8f8bc26a27bb3879fb1d0bb5ad2abf07 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 16:46:19 +0200 Subject: [PATCH 14/18] Fix unit tests for hooks + multi trigger --- bot/taskcluster-hook-cron.json | 4 ++++ bot/tests/test_hook.py | 28 ++++++++++++++++------------ 2 files changed, 20 insertions(+), 12 deletions(-) diff --git a/bot/taskcluster-hook-cron.json b/bot/taskcluster-hook-cron.json index b75068d59..e2275142e 100644 --- a/bot/taskcluster-hook-cron.json +++ b/bot/taskcluster-hook-cron.json @@ -71,5 +71,9 @@ ], "tags": {}, "workerType": "releng-svc-memory" + }, + "triggerSchema": { + "additionalProperties": true, + "type": "object" } } diff --git a/bot/tests/test_hook.py b/bot/tests/test_hook.py index 0f64abb72..ed726a5b5 100644 --- a/bot/tests/test_hook.py +++ b/bot/tests/test_hook.py @@ -6,29 +6,33 @@ import jsonschema import pytest -HOOK = os.path.join(os.path.dirname(__file__), "../taskcluster-hook-repo.json") +HOOK_REPO = os.path.join(os.path.dirname(__file__), "../taskcluster-hook-repo.json") +HOOK_CRON = os.path.join(os.path.dirname(__file__), "../taskcluster-hook-cron.json") payloads = [ # Trigger by interface or API - {"firedBy": "triggerHook", "taskId": "xxx", "payload": {}}, - { - "firedBy": "triggerHook", - "taskId": "xxx", - "payload": {"taskName": "Custom task name", "taskGroupId": "yyyy"}, - }, + (HOOK_REPO, {"firedBy": "triggerHook", "taskId": "xxx", "payload": {}}), + ( + HOOK_REPO, + { + "firedBy": "triggerHook", + "taskId": "xxx", + "payload": {"taskName": "Custom task name", "taskGroupId": "yyyy"}, + }, + ), # Cron trigger - {"firedBy": "schedule", "taskId": "xxx"}, + (HOOK_CRON, {"firedBy": "schedule", "taskId": "xxx"}), ] -@pytest.mark.parametrize("payload", payloads) -def test_hook_syntax(payload): +@pytest.mark.parametrize("hook_path, payload", payloads) +def test_hook_syntax(hook_path, payload): """ Validate the Taskcluster hook syntax """ - assert os.path.exists(HOOK) + assert os.path.exists(hook_path) - with open(HOOK, "r") as f: + with open(hook_path, "r") as f: # Patch the hook as in the taskboot deployment content = f.read() content = content.replace("REVISION", "deadbeef1234") From f3be123f9284b065b1163c70f583c43bea2f2c17 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 17:24:07 +0200 Subject: [PATCH 15/18] Specify required platforms per repo --- bot/code_coverage_bot/artifacts.py | 1 + bot/code_coverage_bot/hooks/base.py | 39 ++++++++++++++++++----------- bot/code_coverage_bot/hooks/repo.py | 12 ++++++++- 3 files changed, 36 insertions(+), 16 deletions(-) diff --git a/bot/code_coverage_bot/artifacts.py b/bot/code_coverage_bot/artifacts.py index 3b054b733..dce10828a 100644 --- a/bot/code_coverage_bot/artifacts.py +++ b/bot/code_coverage_bot/artifacts.py @@ -140,6 +140,7 @@ def download_all(self): [ taskcluster.get_task_details(build_task_id)["taskGroupId"] for build_task_id in self.task_ids.values() + if build_task_id is not None ] ) test_tasks = [ diff --git a/bot/code_coverage_bot/hooks/base.py b/bot/code_coverage_bot/hooks/base.py index daa0375a6..b179b75ef 100644 --- a/bot/code_coverage_bot/hooks/base.py +++ b/bot/code_coverage_bot/hooks/base.py @@ -18,8 +18,19 @@ logger = structlog.get_logger(__name__) +PLATFORMS = ["linux", "windows", "android-test", "android-emulator"] + + class Hook(object): - def __init__(self, repository, revision, task_name_filter, cache_root): + def __init__( + self, + repository, + revision, + task_name_filter, + cache_root, + fail, + required_platforms=[], + ): temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") self.reports_dir = os.path.join(temp_dir, "ccov-reports") @@ -36,20 +47,18 @@ def __init__(self, repository, revision, task_name_filter, cache_root): assert os.path.isdir(cache_root), f"Cache root {cache_root} is not a dir." self.repo_dir = os.path.join(cache_root, self.branch) - task_ids = {} - for platform in ["linux", "windows", "android-test", "android-emulator"]: - task = taskcluster.get_task(self.branch, self.revision, platform) - - # On try, developers might have requested to run only one platform, and we trust them. - # On mozilla-central, we want to assert that every platform was run (except for android platforms - # as they are unstable). - if task is not None: - task_ids[platform] = task - elif ( - self.repository == config.MOZILLA_CENTRAL_REPOSITORY - and not platform.startswith("android") - ): - raise Exception("Code coverage build failed and was not indexed.") + # Load current coverage task for all platforms + task_ids = { + platform: taskcluster.get_task(self.branch, self.revision, platform) + for platform in PLATFORMS + } + + # Check the required platforms are present + for platform in required_platforms: + if not task_ids[platform]: + raise Exception( + f"Code coverage build on {platform} failed and was not indexed." + ) self.artifactsHandler = ArtifactsHandler( task_ids, self.artifacts_dir, task_name_filter diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 656b325a8..3f91b8d24 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -30,6 +30,9 @@ "expected_extensions": [".js", ".cpp"], # Use local repo to load mercurial information "hgmo_local": True, + # On mozilla-central, we want to assert that every platform was run (except for android platforms + # as they are unstable). + "required_platforms": ["linux", "windows"], }, config.TRY_REPOSITORY: { # Only build the main report @@ -40,6 +43,8 @@ # Use remote try repo in order to return early if the # try build is not linked to Phabricator "hgmo_local": False, + # On try, developers might have requested to run only one platform, and we trust them. + "required_platforms": [], }, } @@ -56,7 +61,12 @@ def __init__(self, repository, *args, **kwargs): for key in ("build_reports", "gcp_upload", "send_low_coverage_email"): assert key in self.config, f"Missing {key} in {repository} config" - super().__init__(repository, *args, **kwargs) + super().__init__( + repository, + required_platforms=self.config["required_platforms"], + *args, + **kwargs, + ) def run(self): # Check the covdir report does not already exists From 6873e281db5db68cc47f72d1a6e21f68aafdcbce Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Thu, 12 Sep 2019 17:31:07 +0200 Subject: [PATCH 16/18] Fix constructor --- bot/code_coverage_bot/hooks/base.py | 8 +------- bot/code_coverage_bot/hooks/repo.py | 9 ++++++++- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/bot/code_coverage_bot/hooks/base.py b/bot/code_coverage_bot/hooks/base.py index b179b75ef..897339d11 100644 --- a/bot/code_coverage_bot/hooks/base.py +++ b/bot/code_coverage_bot/hooks/base.py @@ -23,13 +23,7 @@ class Hook(object): def __init__( - self, - repository, - revision, - task_name_filter, - cache_root, - fail, - required_platforms=[], + self, repository, revision, task_name_filter, cache_root, required_platforms=[] ): temp_dir = tempfile.mkdtemp() self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts") diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 3f91b8d24..79fdce66a 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -58,7 +58,14 @@ def __init__(self, repository, *args, **kwargs): assert repository in REPOSITORIES, f"Unsupported repository {repository}" self.config = REPOSITORIES[repository] - for key in ("build_reports", "gcp_upload", "send_low_coverage_email"): + for key in ( + "build_reports", + "gcp_upload", + "send_low_coverage_email", + "expected_extensions", + "hgmo_local", + "required_platforms", + ): assert key in self.config, f"Missing {key} in {repository} config" super().__init__( From 4dcc40a1b97c17eed387d65730618a26d8154796 Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Fri, 13 Sep 2019 10:02:55 +0200 Subject: [PATCH 17/18] bot: Split repo workflows in dedicated classes --- bot/code_coverage_bot/hooks/repo.py | 200 ++++++++++++++-------------- 1 file changed, 100 insertions(+), 100 deletions(-) diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index 79fdce66a..aa0eb430e 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -21,105 +21,11 @@ logger = structlog.get_logger(__name__) -REPOSITORIES = { - config.MOZILLA_CENTRAL_REPOSITORY: { - # Will build all the reports possible - "build_reports": None, - "gcp_upload": True, - "send_low_coverage_email": True, - "expected_extensions": [".js", ".cpp"], - # Use local repo to load mercurial information - "hgmo_local": True, - # On mozilla-central, we want to assert that every platform was run (except for android platforms - # as they are unstable). - "required_platforms": ["linux", "windows"], - }, - config.TRY_REPOSITORY: { - # Only build the main report - "build_reports": [("all", "all")], - "gcp_upload": False, - "send_low_coverage_email": False, - "expected_extensions": None, - # Use remote try repo in order to return early if the - # try build is not linked to Phabricator - "hgmo_local": False, - # On try, developers might have requested to run only one platform, and we trust them. - "required_platforms": [], - }, -} - - class RepositoryHook(Hook): """ - This function is executed when the bot is triggered at the end of a build and associated tests. + Base class to support specific workflows per repository """ - def __init__(self, repository, *args, **kwargs): - assert repository in REPOSITORIES, f"Unsupported repository {repository}" - self.config = REPOSITORIES[repository] - - for key in ( - "build_reports", - "gcp_upload", - "send_low_coverage_email", - "expected_extensions", - "hgmo_local", - "required_platforms", - ): - assert key in self.config, f"Missing {key} in {repository} config" - - super().__init__( - repository, - required_platforms=self.config["required_platforms"], - *args, - **kwargs, - ) - - def run(self): - # Check the covdir report does not already exists - if self.config["gcp_upload"] and uploader.gcp_covdir_exists( - self.branch, self.revision, "all", "all" - ): - logger.warn("Full covdir report already on GCP") - return - - self.retrieve_source_and_artifacts() - - self.check_javascript_files() - - reports = self.build_reports(only=self.config["build_reports"]) - logger.info("Built all covdir reports", nb=len(reports)) - - # Retrieve the full report - full_path = reports.get(("all", "all")) - assert full_path is not None, "Missing full report (all:all)" - report = json.load(open(full_path)) - - # Check extensions - if self.config["expected_extensions"]: - paths = uploader.covdir_paths(report) - for extension in self.config["expected_extensions"]: - assert any( - path.endswith(extension) for path in paths - ), "No {} file in the generated report".format(extension) - - # Upload reports on GCP - if self.config["gcp_upload"]: - self.upload_reports(reports) - logger.info("Uploaded all covdir reports", nb=len(reports)) - else: - logger.info("Skipping GCP upload") - - # Upload coverage on phabricator - changesets, coverage = self.upload_phabricator(report) - - # Send an email on low coverage - if self.config["send_low_coverage_email"]: - notify_email(self.revision, changesets, coverage) - logger.info("Sent low coverage email notification") - else: - logger.info("Skipping low coverage email notification") - def upload_reports(self, reports): """ Upload all provided covdir reports on GCP @@ -157,12 +63,12 @@ def check_javascript_files(self): f"{missing_files} are present in coverage reports, but missing from the repository" ) - def upload_phabricator(self, report): + def upload_phabricator(self, report, use_local_clone=True): phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) # Build HGMO config according to this repo's configuration hgmo_config = {} - if self.config["hgmo_local"]: + if use_local_clone: hgmo_config["repo_dir"] = self.repo_dir else: hgmo_config["server_address"] = self.repository @@ -184,10 +90,104 @@ def upload_phabricator(self, report): return changesets, coverage +class MozillaCentralHook(RepositoryHook): + """ + Code coverage hook for mozilla-central + * Check coverage artifacts content + * Build all covdir reports possible + * Upload all reports on GCP + * Upload main reports on Phabrictaor + * Send an email to admins on low coverage + """ + + def __init__(self, *args, **kwargs): + super().__init__( + config.MOZILLA_CENTRAL_REPOSITORY, + # On mozilla-central, we want to assert that every platform was run (except for android platforms + # as they are unstable). + required_platforms=["linux", "windows"], + *args, + **kwargs, + ) + + def run(self): + # Check the covdir report does not already exists + if uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"): + logger.warn("Full covdir report already on GCP") + return + + self.retrieve_source_and_artifacts() + + self.check_javascript_files() + + reports = self.build_reports() + logger.info("Built all covdir reports", nb=len(reports)) + + # Retrieve the full report + full_path = reports.get(("all", "all")) + assert full_path is not None, "Missing full report (all:all)" + report = json.load(open(full_path)) + + # Check extensions + paths = uploader.covdir_paths(report) + for extension in [".js", ".cpp"]: + assert any( + path.endswith(extension) for path in paths + ), "No {} file in the generated report".format(extension) + + # Upload reports on GCP + self.upload_reports(reports) + logger.info("Uploaded all covdir reports", nb=len(reports)) + + # Upload coverage on phabricator + changesets, coverage = self.upload_phabricator(report) + + # Send an email on low coverage + notify_email(self.revision, changesets, coverage) + logger.info("Sent low coverage email notification") + + +class TryHook(RepositoryHook): + """ + Code coverage hook for a try push + * Build only main covdir report + * Upload that report on Phabrictaor + """ + + def __init__(self, *args, **kwargs): + super().__init__( + config.TRY_REPOSITORY, + # On try, developers might have requested to run only one platform, and we trust them. + required_platforms=[], + *args, + **kwargs, + ) + + def run(self): + self.retrieve_source_and_artifacts() + + reports = self.build_reports(only=[("all", "all")]) + logger.info("Built all covdir reports", nb=len(reports)) + + # Retrieve the full report + full_path = reports.get(("all", "all")) + assert full_path is not None, "Missing full report (all:all)" + report = json.load(open(full_path)) + + # Upload coverage on phabricator + self.upload_phabricator(report, use_local_clone=False) + + def main(): logger.info("Starting code coverage bot for repository") args = setup_cli() - hook = RepositoryHook( - args.repository, args.revision, args.task_name_filter, args.cache_root - ) + + hooks = { + config.MOZILLA_CENTRAL_REPOSITORY: MozillaCentralHook, + config.TRY_REPOSITORY: TryHook, + } + hook_class = hooks.get(args.repository) + assert hook_class is not None, f"Unsupported repository {args.repository}" + + hook = hook_class(args.revision, args.task_name_filter, args.cache_root) hook.run() From 2f15c4a518a1b23f6b3e6b49894ce8871422d06d Mon Sep 17 00:00:00 2001 From: Bastien Abadie Date: Fri, 13 Sep 2019 10:10:03 +0200 Subject: [PATCH 18/18] bot: Check remote hgmo changesets before loading artfioacts on try --- bot/code_coverage_bot/hooks/repo.py | 42 ++++++++++++++++------------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/bot/code_coverage_bot/hooks/repo.py b/bot/code_coverage_bot/hooks/repo.py index aa0eb430e..bdefbc1ae 100644 --- a/bot/code_coverage_bot/hooks/repo.py +++ b/bot/code_coverage_bot/hooks/repo.py @@ -63,10 +63,10 @@ def check_javascript_files(self): f"{missing_files} are present in coverage reports, but missing from the repository" ) - def upload_phabricator(self, report, use_local_clone=True): - phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) - - # Build HGMO config according to this repo's configuration + def get_hgmo_changesets(self, use_local_clone=True): + """ + Build HGMO changesets according to this repo's configuration + """ hgmo_config = {} if use_local_clone: hgmo_config["repo_dir"] = self.repo_dir @@ -74,20 +74,15 @@ def upload_phabricator(self, report, use_local_clone=True): hgmo_config["server_address"] = self.repository with hgmo.HGMO(**hgmo_config) as hgmo_server: - changesets = hgmo_server.get_automation_relevance_changesets(self.revision) - - if not any( - parse_revision_id(changeset["desc"]) is not None for changeset in changesets - ): - logger.info( - "None of the commits in the try push are linked to a Phabricator revision" - ) - return + return hgmo_server.get_automation_relevance_changesets(self.revision) + def upload_phabricator(self, report, changesets): + """ + Helper to upload coverage report on Phabricator + """ + phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision) logger.info("Upload changeset coverage data to Phabricator") - coverage = phabricatorUploader.upload(report, changesets) - - return changesets, coverage + return phabricatorUploader.upload(report, changesets) class MozillaCentralHook(RepositoryHook): @@ -140,7 +135,8 @@ def run(self): logger.info("Uploaded all covdir reports", nb=len(reports)) # Upload coverage on phabricator - changesets, coverage = self.upload_phabricator(report) + changesets = self.get_hgmo_changesets() + coverage = self.upload_phabricator(report, changesets) # Send an email on low coverage notify_email(self.revision, changesets, coverage) @@ -164,6 +160,16 @@ def __init__(self, *args, **kwargs): ) def run(self): + changesets = self.get_hgmo_changesets(use_local_clone=False) + + if not any( + parse_revision_id(changeset["desc"]) is not None for changeset in changesets + ): + logger.info( + "None of the commits in the try push are linked to a Phabricator revision" + ) + return + self.retrieve_source_and_artifacts() reports = self.build_reports(only=[("all", "all")]) @@ -175,7 +181,7 @@ def run(self): report = json.load(open(full_path)) # Upload coverage on phabricator - self.upload_phabricator(report, use_local_clone=False) + self.upload_phabricator(report, changesets) def main():