Skip to content

bot: build detailed reports for tests suites & platforms #144

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Sep 4, 2019
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 43 additions & 10 deletions bot/code_coverage_bot/artifacts.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
import collections
import fnmatch
import itertools
import os
import time

Expand All @@ -11,6 +13,9 @@
logger = structlog.get_logger(__name__)


Artifact = collections.namedtuple("Artifact", "path, task_id, platform, suite, chunk")


SUITES_TO_IGNORE = [
"awsy",
"talos",
Expand All @@ -25,41 +30,65 @@ def __init__(self, task_ids, parent_dir="ccov-artifacts", task_name_filter="*"):
self.task_ids = task_ids
self.parent_dir = parent_dir
self.task_name_filter = task_name_filter
self.artifacts = []

def generate_path(self, platform, chunk, artifact):
file_name = "%s_%s_%s" % (platform, chunk, os.path.basename(artifact["name"]))
return os.path.join(self.parent_dir, file_name)

def get_chunks(self, platform):
return set(
f.split("_")[1]
for f in os.listdir(self.parent_dir)
if os.path.basename(f).startswith(f"{platform}_")
artifact.chunk
for artifact in self.artifacts
if artifact.platform == platform
)

def get(self, platform=None, suite=None, chunk=None):
files = os.listdir(self.parent_dir)
def get_suites(self):
# Group by suite first
suites = itertools.groupby(
sorted(self.artifacts, key=lambda a: a.suite), lambda a: a.suite
)

out = {}
for suite, artifacts in suites:
artifacts = list(artifacts)

# List all available platforms
platforms = {a.platform for a in artifacts}
platforms.add("all")

# And list all possible permutations
for platform in platforms:
out[(platform, suite)] = [
artifact.path
for artifact in artifacts
if platform == "all" or artifact.platform == platform
]

return out

def get(self, platform=None, suite=None, chunk=None):
if suite is not None and chunk is not None:
raise Exception("suite and chunk can't both have a value")

# Filter artifacts according to platform, suite and chunk.
filtered_files = []
for fname in files:
if platform is not None and not fname.startswith("%s_" % platform):
for artifact in self.artifacts:
if platform is not None and artifact.platform != platform:
continue

if suite is not None and suite not in fname:
if suite is not None and artifact.suite != suite:
continue

if chunk is not None and ("%s_code-coverage" % chunk) not in fname:
if chunk is not None and artifact.chunk != chunk:
continue

filtered_files.append(os.path.join(self.parent_dir, fname))
filtered_files.append(artifact.path)

return filtered_files

def download(self, test_task):
suite = taskcluster.get_suite(test_task["task"])
chunk_name = taskcluster.get_chunk(test_task["task"])
platform_name = taskcluster.get_platform(test_task["task"])
test_task_id = test_task["status"]["taskId"]
Expand All @@ -75,6 +104,10 @@ def download(self, test_task):
taskcluster.download_artifact(artifact_path, test_task_id, artifact["name"])
logger.info("%s artifact downloaded" % artifact_path)

self.artifacts.append(
Artifact(artifact_path, test_task_id, platform_name, suite, chunk_name)
)

def is_filtered_task(self, task):
"""
Apply name filter from CLI args on task name
Expand Down
29 changes: 27 additions & 2 deletions bot/code_coverage_bot/codecov.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,14 +120,38 @@ def retrieve_source_and_artifacts(self):

def generate_covdir(self):
"""
Build the covdir report using current artifacts
Build the full covdir report using current artifacts
"""
output = grcov.report(
self.artifactsHandler.get(), source_dir=self.repo_dir, out_format="covdir"
)
logger.info("Covdir report generated successfully")
return json.loads(output)

def build_suites(self):
"""
Build all the detailed covdir reports using current artifacts
and upload them directly on GCP
"""
for (platform, suite), artifacts in self.artifactsHandler.get_suites().items():

# Generate covdir report for that suite & platform
logger.info(
"Building covdir suite report",
suite=suite,
platform=platform,
artifacts=len(artifacts),
)
output = grcov.report(
artifacts, source_dir=self.repo_dir, out_format="covdir"
)

# Then upload on GCP
report = json.loads(output)
uploader.gcp(
self.branch, self.revision, report, suite=suite, platform=platform
)

# This function is executed when the bot is triggered at the end of a mozilla-central build.
def go_from_trigger_mozilla_central(self):
# Check the covdir report does not already exists
Expand Down Expand Up @@ -180,8 +204,9 @@ def go_from_trigger_mozilla_central(self):
changesets_coverage = phabricatorUploader.upload(report, changesets)

uploader.gcp(self.branch, self.revision, report)
logger.info("Main Build uploaded on GCP")

logger.info("Build uploaded on GCP")
self.build_suites()
notify_email(self.revision, changesets, changesets_coverage)

# This function is executed when the bot is triggered at the end of a try build.
Expand Down
19 changes: 15 additions & 4 deletions bot/code_coverage_bot/uploader.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
from code_coverage_tools.gcp import get_bucket

logger = structlog.get_logger(__name__)
GCP_COVDIR_PATH = "{repository}/{revision}.json.zstd"
GCP_COVDIR_PATH = "{repository}/{revision}/{name}.json.zstd"


def gcp(repository, revision, report):
def gcp(repository, revision, report, platform=None, suite=None):
"""
Upload a grcov raw report on Google Cloud Storage
* Compress with zstandard
Expand All @@ -30,7 +30,12 @@ def gcp(repository, revision, report):
archive = compressor.compress(json.dumps(report).encode("utf-8"))

# Upload archive
path = GCP_COVDIR_PATH.format(repository=repository, revision=revision)
if platform and suite:
name = f"{platform}:{suite}"
else:
name = "full"

path = GCP_COVDIR_PATH.format(repository=repository, revision=revision, name=name)
blob = bucket.blob(path)
blob.upload_from_string(archive)

Expand All @@ -42,7 +47,13 @@ def gcp(repository, revision, report):
logger.info("Uploaded {} on {}".format(path, bucket))

# Trigger ingestion on backend
retry(lambda: gcp_ingest(repository, revision), retries=10, wait_between_retries=60)
# TODO: support suite ingestion
if suite is None:
retry(
lambda: gcp_ingest(repository, revision),
retries=10,
wait_between_retries=60,
)

return blob

Expand Down
89 changes: 75 additions & 14 deletions bot/tests/test_artifacts.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
# -*- coding: utf-8 -*-

import hashlib
import itertools
import os
from unittest import mock

import pytest
import responses

from code_coverage_bot.artifacts import Artifact
from code_coverage_bot.artifacts import ArtifactsHandler

FILES = [
Expand All @@ -21,14 +22,28 @@


@pytest.fixture
def FAKE_ARTIFACTS_DIR(tmpdir):
for f in FILES:
open(os.path.join(tmpdir.strpath, f), "w")
return tmpdir.strpath
def fake_artifacts(tmpdir):
def name_to_artifact(name):
"""
Touch the fake artifact & build instance
"""
path = os.path.join(tmpdir.strpath, name)
open(path, "w")

platform, chunk, _ = name.split("_")
return Artifact(
path,
hashlib.md5(name.encode("utf-8")).hexdigest()[:10],
platform,
chunk[: chunk.rindex("-")] if "-" in chunk else chunk,
chunk,
)

return [name_to_artifact(f) for f in FILES]

def test_generate_path(FAKE_ARTIFACTS_DIR):
a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR)

def test_generate_path(fake_artifacts):
a = ArtifactsHandler([])
artifact_jsvm = {"name": "code-coverage-jsvm.info"}
artifact_grcov = {"name": "code-coverage-grcov.zip"}
assert os.path.join(
Expand All @@ -39,8 +54,9 @@ def test_generate_path(FAKE_ARTIFACTS_DIR):
) == a.generate_path("windows", "cppunit", artifact_grcov)


def test_get_chunks(FAKE_ARTIFACTS_DIR):
a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR)
def test_get_chunks(fake_artifacts):
a = ArtifactsHandler([])
a.artifacts = fake_artifacts
assert a.get_chunks("windows") == {"mochitest-1", "xpcshell-7", "cppunit"}
assert a.get_chunks("linux") == {
"mochitest-2",
Expand All @@ -50,11 +66,56 @@ def test_get_chunks(FAKE_ARTIFACTS_DIR):
}


def test_get_coverage_artifacts(FAKE_ARTIFACTS_DIR):
def test_get_suites(tmpdir, fake_artifacts):
def add_dir(files):
return set([os.path.join(FAKE_ARTIFACTS_DIR, f) for f in files])
return [os.path.join(tmpdir.strpath, f) for f in files]

a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR)
a = ArtifactsHandler([])
a.artifacts = fake_artifacts
assert a.get_suites() == {
("all", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]),
("windows", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]),
("all", "firefox-ui-functional"): add_dir(
["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"]
),
("linux", "firefox-ui-functional"): add_dir(
["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"]
),
("all", "mochitest"): add_dir(
[
"windows_mochitest-1_code-coverage-jsvm.info",
"linux_mochitest-2_code-coverage-grcov.zip",
]
),
("linux", "mochitest"): add_dir(["linux_mochitest-2_code-coverage-grcov.zip"]),
("windows", "mochitest"): add_dir(
["windows_mochitest-1_code-coverage-jsvm.info"]
),
("all", "xpcshell"): add_dir(
[
"windows_xpcshell-7_code-coverage-jsvm.info",
"linux_xpcshell-7_code-coverage-grcov.zip",
"linux_xpcshell-3_code-coverage-grcov.zip",
]
),
("linux", "xpcshell"): add_dir(
[
"linux_xpcshell-7_code-coverage-grcov.zip",
"linux_xpcshell-3_code-coverage-grcov.zip",
]
),
("windows", "xpcshell"): add_dir(
["windows_xpcshell-7_code-coverage-jsvm.info"]
),
}


def test_get_coverage_artifacts(tmpdir, fake_artifacts):
def add_dir(files):
return set([os.path.join(tmpdir.strpath, f) for f in files])

a = ArtifactsHandler([])
a.artifacts = fake_artifacts
assert set(a.get()) == add_dir(FILES)
assert set(a.get(suite="mochitest")) == add_dir(
[
Expand Down Expand Up @@ -174,7 +235,7 @@ def build_task(task_state):

@responses.activate
def test_download_all(
LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, FAKE_ARTIFACTS_DIR
LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, fake_artifacts
):
responses.add(
responses.GET,
Expand All @@ -190,7 +251,7 @@ def test_download_all(
status=200,
)

a = ArtifactsHandler({"linux": LINUX_TASK_ID}, parent_dir=FAKE_ARTIFACTS_DIR)
a = ArtifactsHandler({"linux": LINUX_TASK_ID})

downloaded = set()

Expand Down