diff --git a/.taskcluster.yml b/.taskcluster.yml
index efc7161a3..bb53a1d66 100644
--- a/.taskcluster.yml
+++ b/.taskcluster.yml
@@ -41,6 +41,14 @@ tasks:
else: ${event.ref[11:]}
else: 'dev'
+ backend_url:
+ $if: 'tasks_for == "github-push"'
+ then:
+ $if: 'event.ref == "refs/heads/testing"'
+ then: 'https://api.coverage.testing.moz.tools'
+ else: 'https://api.coverage.moz.tools'
+ else: 'https://api.coverage.moz.tools'
+
taskboot_image: "mozilla/taskboot:0.1.9"
in:
- taskId: {$eval: as_slugid("check_lint")}
@@ -226,6 +234,8 @@ tasks:
payload:
maxRunTime: 3600
image: node
+ env:
+ BACKEND_URL: "${backend_url}"
command:
- sh
- -lxce
diff --git a/backend/code_coverage_backend/api.py b/backend/code_coverage_backend/api.py
index e407e8664..f972b4c0c 100644
--- a/backend/code_coverage_backend/api.py
+++ b/backend/code_coverage_backend/api.py
@@ -8,6 +8,7 @@
from code_coverage_backend.config import COVERAGE_EXTENSIONS
from code_coverage_backend.gcp import load_cache
+from code_coverage_backend.report import DEFAULT_FILTER
DEFAULT_REPOSITORY = "mozilla-central"
logger = structlog.get_logger(__name__)
@@ -39,7 +40,13 @@ def coverage_latest(repository=DEFAULT_REPOSITORY):
abort(404)
-def coverage_for_path(path="", changeset=None, repository=DEFAULT_REPOSITORY):
+def coverage_for_path(
+ path="",
+ changeset=None,
+ repository=DEFAULT_REPOSITORY,
+ platform=DEFAULT_FILTER,
+ suite=DEFAULT_FILTER,
+):
"""
Aggregate coverage for a path, regardless of its type:
* file, gives its coverage percent
@@ -54,17 +61,17 @@ def coverage_for_path(path="", changeset=None, repository=DEFAULT_REPOSITORY):
try:
if changeset:
# Find closest report matching this changeset
- changeset, _ = gcp.find_closest_report(repository, changeset)
+ report = gcp.find_closest_report(repository, changeset, platform, suite)
else:
# Fallback to latest report
- changeset, _ = gcp.find_report(repository)
+ report = gcp.find_report(repository, platform, suite)
except Exception as e:
logger.warn("Failed to retrieve report: {}".format(e))
abort(404)
# Load tests data from GCP
try:
- return gcp.get_coverage(repository, changeset, path)
+ return gcp.get_coverage(report, path)
except Exception as e:
logger.warn(
"Failed to load coverage",
@@ -76,7 +83,14 @@ def coverage_for_path(path="", changeset=None, repository=DEFAULT_REPOSITORY):
abort(400)
-def coverage_history(repository=DEFAULT_REPOSITORY, path="", start=None, end=None):
+def coverage_history(
+ repository=DEFAULT_REPOSITORY,
+ path="",
+ start=None,
+ end=None,
+ platform=DEFAULT_FILTER,
+ suite=DEFAULT_FILTER,
+):
"""
List overall coverage from ingested reports over a period of time
"""
@@ -86,7 +100,7 @@ def coverage_history(repository=DEFAULT_REPOSITORY, path="", start=None, end=Non
abort(500)
try:
- return gcp.get_history(repository, path=path, start=start, end=end)
+ return gcp.get_history(repository, path, start, end, platform, suite)
except Exception as e:
logger.warn(
"Failed to load history",
@@ -97,3 +111,22 @@ def coverage_history(repository=DEFAULT_REPOSITORY, path="", start=None, end=Non
error=str(e),
)
abort(400)
+
+
+def coverage_filters(repository=DEFAULT_REPOSITORY):
+ """
+ List all available filters for that repository
+ """
+ gcp = load_cache()
+ if gcp is None:
+ logger.error("No GCP cache available")
+ abort(500)
+
+ try:
+ return {
+ "platforms": gcp.get_platforms(repository),
+ "suites": gcp.get_suites(repository),
+ }
+ except Exception as e:
+ logger.warn("Failed to load filters", repo=repository, error=str(e))
+ abort(400)
diff --git a/backend/code_coverage_backend/api.yml b/backend/code_coverage_backend/api.yml
index 0b506d8b5..76e12a8bc 100644
--- a/backend/code_coverage_backend/api.yml
+++ b/backend/code_coverage_backend/api.yml
@@ -42,6 +42,16 @@ paths:
description: Mozilla repository for this changeset (default to mozilla-central)
required: false
type: string
+ - name: suite
+ in: query
+ description: Filter the coverage using this test suite
+ required: false
+ type: string
+ - name: platform
+ in: query
+ description: Filter the coverage using this platform
+ required: false
+ type: string
responses:
200:
description: Code coverage information for a given file or directory at a given changeset
@@ -72,6 +82,16 @@ paths:
description: Path of the repository folder to get coverage info on.
required: false
type: string
+ - name: suite
+ in: query
+ description: Filter the coverage using this test suite
+ required: false
+ type: string
+ - name: platform
+ in: query
+ description: Filter the coverage using this platform
+ required: false
+ type: string
responses:
200:
description: Overall coverage of specified path over a period of time
@@ -86,3 +106,18 @@ paths:
description: File extensions supported for code coverage
tags:
- v2
+
+ /v2/filters:
+ get:
+ operationId: "code_coverage_backend.api.coverage_filters"
+ parameters:
+ - name: repository
+ in: query
+ description: Mozilla repository for these reports (default to mozilla-central)
+ required: false
+ type: string
+ responses:
+ 200:
+ description: Available filters on the endpoints
+ tags:
+ - v2
diff --git a/backend/code_coverage_backend/gcp.py b/backend/code_coverage_backend/gcp.py
index a1660374e..41b5badf0 100644
--- a/backend/code_coverage_backend/gcp.py
+++ b/backend/code_coverage_backend/gcp.py
@@ -7,25 +7,27 @@
from datetime import datetime
import redis
-import requests
import structlog
import zstandard as zstd
from dateutil.relativedelta import relativedelta
from code_coverage_backend import covdir
from code_coverage_backend import taskcluster
+from code_coverage_backend.hgmo import hgmo_pushes
+from code_coverage_backend.hgmo import hgmo_revision_details
+from code_coverage_backend.report import DEFAULT_FILTER
+from code_coverage_backend.report import Report
from code_coverage_tools.gcp import get_bucket
logger = structlog.get_logger(__name__)
__cache = None
+__hgmo = {}
-KEY_REPORTS = "reports:{repository}"
+KEY_REPORTS = "reports:{repository}:{platform}:{suite}"
KEY_CHANGESET = "changeset:{repository}:{changeset}"
KEY_HISTORY = "history:{repository}"
-KEY_OVERALL_COVERAGE = "overall:{repository}:{changeset}"
-
-HGMO_REVISION_URL = "https://hg.mozilla.org/{repository}/json-rev/{revision}"
-HGMO_PUSHES_URL = "https://hg.mozilla.org/{repository}/json-pushes"
+KEY_PLATFORMS = "platforms:{repository}"
+KEY_SUITES = "suites:{repository}"
REPOSITORIES = ("mozilla-central",)
@@ -48,18 +50,6 @@ def load_cache():
return __cache
-def hgmo_revision_details(repository, changeset):
- """
- HGMO helper to retrieve details for a changeset
- """
- url = HGMO_REVISION_URL.format(repository=repository, revision=changeset)
- resp = requests.get(url)
- resp.raise_for_status()
- data = resp.json()
- assert "pushid" in data, "Missing pushid"
- return data["pushid"], data["date"][0]
-
-
class GCPCache(object):
"""
Cache on Redis GCP results
@@ -85,108 +75,115 @@ def __init__(self, reports_dir=None):
# Load most recent reports in cache
for repo in REPOSITORIES:
- for rev, _ in self.list_reports(repo, nb=1):
- self.download_report(repo, rev)
+ for report in self.list_reports(repo, nb=1):
+ self.download_report(report)
- def ingest_pushes(self, repository, min_push_id=None, nb_pages=3):
+ def ingest_pushes(self, repository, platform, suite, min_push_id=None, nb_pages=3):
"""
Ingest HGMO changesets and pushes into our Redis Cache
The pagination goes from oldest to newest, starting from the optional min_push_id
"""
- chunk_size = 8
- params = {"version": 2}
- if min_push_id is not None:
- assert isinstance(min_push_id, int)
- params["startID"] = min_push_id
- params["endID"] = min_push_id + chunk_size
-
- for page in range(nb_pages):
-
- r = requests.get(
- HGMO_PUSHES_URL.format(repository=repository), params=params
- )
- data = r.json()
-
- # Sort pushes to go from oldest to newest
- pushes = sorted(
- [(int(push_id), push) for push_id, push in data["pushes"].items()],
- key=lambda p: p[0],
- )
- if not pushes:
- return
-
- for push_id, push in pushes:
-
- changesets = push["changesets"]
- date = push["date"]
- self.store_push(repository, push_id, changesets, date)
-
- reports = [
- changeset
- for changeset in changesets
- if self.ingest_report(repository, push_id, changeset, date)
- ]
- if reports:
- logger.info("Found reports in that push", push_id=push_id)
-
- newest = pushes[-1][0]
- params["startID"] = newest
- params["endID"] = newest + chunk_size
-
- def ingest_report(self, repository, push_id, changeset, date):
+ ingested = False
+ for push_id, push in hgmo_pushes(repository, min_push_id, nb_pages):
+ for changeset in push["changesets"]:
+ # TODO: look all neighboring reports on GCP
+ report = Report(
+ self.reports_dir,
+ repository,
+ changeset,
+ platform,
+ suite,
+ push_id=push_id,
+ date=push["date"],
+ )
+
+ # Always link changeset to push to find closest available report
+ self.redis.hmset(
+ KEY_CHANGESET.format(
+ repository=report.repository, changeset=report.changeset
+ ),
+ {"push": report.push_id, "date": report.date},
+ )
+
+ if not ingested and self.ingest_report(report):
+ logger.info(
+ "Found report in that push", push_id=push_id, report=str(report)
+ )
+
+ # Only ingest first report found in a push in order to stay below 30s response time
+ ingested = True
+
+ def ingest_report(self, report):
"""
When a report exist for a changeset, download it and update redis data
"""
- assert isinstance(push_id, int)
- assert isinstance(date, int)
+ assert isinstance(report, Report)
# Download the report
- report_path = self.download_report(repository, changeset)
- if not report_path:
+ if not self.download_report(report):
+ logger.info("Report not available", report=str(report))
return False
# Read overall coverage for history
- key = KEY_OVERALL_COVERAGE.format(repository=repository, changeset=changeset)
- report = covdir.open_report(report_path)
- assert report is not None, "No report to ingest"
- overall_coverage = covdir.get_overall_coverage(report)
+ data = covdir.open_report(report.path)
+ assert data is not None, "No report to ingest"
+ overall_coverage = covdir.get_overall_coverage(data)
assert len(overall_coverage) > 0, "No overall coverage"
- self.redis.hmset(key, overall_coverage)
+ self.redis.hmset(report.key_overall, overall_coverage)
# Add the changeset to the sorted sets of known reports
# The numeric push_id is used as a score to keep the ingested
# changesets ordered
- self.redis.zadd(KEY_REPORTS.format(repository=repository), {changeset: push_id})
+ self.redis.zadd(
+ KEY_REPORTS.format(
+ repository=report.repository,
+ platform=report.platform,
+ suite=report.suite,
+ ),
+ {report.changeset: report.push_id},
+ )
# Add the changeset to the sorted sets of known reports by date
- self.redis.zadd(KEY_HISTORY.format(repository=repository), {changeset: date})
+ self.redis.zadd(
+ KEY_HISTORY.format(repository=report.repository),
+ {report.changeset: report.date},
+ )
- logger.info("Ingested report", changeset=changeset)
+ # Store the filters
+ if report.platform != DEFAULT_FILTER:
+ self.redis.sadd(
+ KEY_PLATFORMS.format(repository=report.repository), report.platform
+ )
+ if report.suite != DEFAULT_FILTER:
+ self.redis.sadd(
+ KEY_SUITES.format(repository=report.repository), report.suite
+ )
+
+ logger.info("Ingested report", report=str(report))
return True
- def download_report(self, repository, changeset):
+ def download_report(self, report):
"""
Download and extract a json+zstd covdir report
"""
+ assert isinstance(report, Report)
+
# Check the report is available on remote storage
- path = "{}/{}.json.zstd".format(repository, changeset)
- blob = self.bucket.blob(path)
+ blob = self.bucket.blob(report.gcp_path)
if not blob.exists():
- logger.debug("No report found on GCP", path=path)
+ logger.debug("No report found on GCP", path=report.gcp_path)
return False
- archive_path = os.path.join(self.reports_dir, blob.name)
- json_path = os.path.join(self.reports_dir, blob.name.rstrip(".zstd"))
- if os.path.exists(json_path):
- logger.info("Report already available", path=json_path)
- return json_path
+ if os.path.exists(report.path):
+ logger.info("Report already available", path=report.path)
+ return True
- os.makedirs(os.path.dirname(archive_path), exist_ok=True)
- blob.download_to_filename(archive_path)
- logger.info("Downloaded report archive", path=archive_path)
+ os.makedirs(os.path.dirname(report.archive_path), exist_ok=True)
+ blob.download_to_filename(report.archive_path)
+ logger.info("Downloaded report archive", path=report.archive_path)
- with open(json_path, "wb") as output:
- with open(archive_path, "rb") as archive:
+ with open(report.path, "wb") as output:
+ with open(report.archive_path, "rb") as archive:
dctx = zstd.ZstdDecompressor()
reader = dctx.stream_reader(archive)
while True:
@@ -195,34 +192,30 @@ def download_report(self, repository, changeset):
break
output.write(chunk)
- os.unlink(archive_path)
- logger.info("Decompressed report", path=json_path)
- return json_path
-
- def store_push(self, repository, push_id, changesets, date):
- """
- Store a push on redis cache, with its changesets
- """
- assert isinstance(push_id, int)
- assert isinstance(changesets, list)
-
- # Store changesets initial data
- for changeset in changesets:
- key = KEY_CHANGESET.format(repository=repository, changeset=changeset)
- self.redis.hmset(key, {"push": push_id, "date": date})
-
- logger.info("Stored new push data", push_id=push_id)
+ os.unlink(report.archive_path)
+ logger.info("Decompressed report", path=report.path)
+ return True
- def find_report(self, repository, push_range=(MAX_PUSH, MIN_PUSH)):
+ def find_report(
+ self,
+ repository,
+ platform=DEFAULT_FILTER,
+ suite=DEFAULT_FILTER,
+ push_range=(MAX_PUSH, MIN_PUSH),
+ ):
"""
Find the first report available before that push
"""
- results = self.list_reports(repository, nb=1, push_range=push_range)
+ results = self.list_reports(
+ repository, platform, suite, nb=1, push_range=push_range
+ )
if not results:
raise Exception("No report found")
return results[0]
- def find_closest_report(self, repository, changeset):
+ def find_closest_report(
+ self, repository, changeset, platform=DEFAULT_FILTER, suite=DEFAULT_FILTER
+ ):
"""
Find the closest report from specified changeset:
1. Lookup the changeset push in cache
@@ -236,18 +229,43 @@ def find_closest_report(self, repository, changeset):
if push_id:
# Redis lib uses bytes for all output
push_id = int(push_id.decode("utf-8"))
+ date = self.redis.hget(key, "date").decode("utf-8")
+
+ # Check the report variant is available locally
+ report = Report(
+ self.reports_dir,
+ repository,
+ changeset,
+ platform,
+ suite,
+ push_id=push_id,
+ date=date,
+ )
+ if not os.path.exists(report.path):
+ self.ingest_report(report)
else:
# Lookup push from HGMO (slow)
push_id, _ = hgmo_revision_details(repository, changeset)
# Ingest pushes as we clearly don't have it in cache
- self.ingest_pushes(repository, min_push_id=push_id - 1, nb_pages=1)
+ self.ingest_pushes(
+ repository, platform, suite, min_push_id=push_id - 1, nb_pages=1
+ )
# Load report from that push
- return self.find_report(repository, push_range=(push_id, MAX_PUSH))
+ return self.find_report(
+ repository, platform, suite, push_range=(push_id, MAX_PUSH)
+ )
- def list_reports(self, repository, nb=5, push_range=(MAX_PUSH, MIN_PUSH)):
+ def list_reports(
+ self,
+ repository,
+ platform=DEFAULT_FILTER,
+ suite=DEFAULT_FILTER,
+ nb=5,
+ push_range=(MAX_PUSH, MIN_PUSH),
+ ):
"""
List the last reports available on the server, ordered by push
by default from newer to older
@@ -262,7 +280,7 @@ def list_reports(self, repository, nb=5, push_range=(MAX_PUSH, MIN_PUSH)):
op = self.redis.zrangebyscore if start < end else self.redis.zrevrangebyscore
reports = op(
- KEY_REPORTS.format(repository=repository),
+ KEY_REPORTS.format(repository=repository, platform=platform, suite=suite),
start,
end,
start=0,
@@ -270,33 +288,45 @@ def list_reports(self, repository, nb=5, push_range=(MAX_PUSH, MIN_PUSH)):
withscores=True,
)
- return [(changeset.decode("utf-8"), int(push)) for changeset, push in reports]
+ return [
+ Report(
+ self.reports_dir,
+ repository,
+ changeset.decode("utf-8"),
+ platform,
+ suite,
+ push_id=push,
+ )
+ for changeset, push in reports
+ ]
- def get_coverage(self, repository, changeset, path):
+ def get_coverage(self, report, path):
"""
Load a report and its coverage for a specific path
and build a serializable representation
"""
- report_path = os.path.join(
- self.reports_dir, "{}/{}.json".format(repository, changeset)
- )
-
- report = covdir.open_report(report_path)
- if report is None:
+ assert isinstance(report, Report)
+ data = covdir.open_report(report.path)
+ if data is None:
# Try to download the report if it's missing locally
- report_path = self.download_report(repository, changeset)
- assert report_path is not False, "Missing report for {} at {}".format(
- repository, changeset
- )
+ assert self.download_report(report), "Missing report {}".format(report)
- report = covdir.open_report(report_path)
- assert report
+ data = covdir.open_report(report.path)
+ assert data
- out = covdir.get_path_coverage(report, path)
- out["changeset"] = changeset
+ out = covdir.get_path_coverage(data, path)
+ out["changeset"] = report.changeset
return out
- def get_history(self, repository, path="", start=None, end=None):
+ def get_history(
+ self,
+ repository,
+ path="",
+ start=None,
+ end=None,
+ platform=DEFAULT_FILTER,
+ suite=DEFAULT_FILTER,
+ ):
"""
Load the history overall coverage from the redis cache
Default to date range from now back to a year
@@ -318,22 +348,36 @@ def get_history(self, repository, path="", start=None, end=None):
def _coverage(changeset, date):
# Load overall coverage for specified path
changeset = changeset.decode("utf-8")
- key = KEY_OVERALL_COVERAGE.format(
- repository=repository, changeset=changeset
+
+ report = Report(
+ self.reports_dir, repository, changeset, platform, suite, date=date
)
- coverage = self.redis.hget(key, path)
+ coverage = self.redis.hget(report.key_overall, path)
if coverage is not None:
coverage = float(coverage)
return {"changeset": changeset, "date": int(date), "coverage": coverage}
return [_coverage(changeset, date) for changeset, date in history]
+ def get_platforms(self, repository):
+ """List all available platforms for a repository"""
+ platforms = self.redis.smembers(KEY_PLATFORMS.format(repository=repository))
+ return sorted(map(lambda x: x.decode("utf-8"), platforms))
+
+ def get_suites(self, repository):
+ """List all available suites for a repository"""
+ suites = self.redis.smembers(KEY_SUITES.format(repository=repository))
+ return sorted(map(lambda x: x.decode("utf-8"), suites))
+
def ingest_available_reports(self, repository):
"""
Ingest all the available reports for a repository
"""
assert isinstance(repository, str)
- REGEX_BLOB = re.compile(r"^{}/(\w+).json.zstd$".format(repository))
+
+ REGEX_BLOB = re.compile(
+ r"^{}/(\w+)/([\w\-]+):([\w\-]+).json.zstd$".format(repository)
+ )
for blob in self.bucket.list_blobs(prefix=repository):
# Get changeset from blob name
@@ -342,10 +386,9 @@ def ingest_available_reports(self, repository):
logger.warn("Invalid blob found {}".format(blob.name))
continue
changeset = match.group(1)
+ platform = match.group(2)
+ suite = match.group(3)
- # Get extra information from HGMO
- push_id, date = hgmo_revision_details(repository, changeset)
- logger.info("Found report", changeset=changeset, push=push_id)
-
- # Ingest report
- self.ingest_report(repository, push_id, changeset, int(date))
+ # Build report instance and ingest it
+ report = Report(self.reports_dir, repository, changeset, platform, suite)
+ self.ingest_report(report)
diff --git a/backend/code_coverage_backend/hgmo.py b/backend/code_coverage_backend/hgmo.py
new file mode 100644
index 000000000..de9d4fcee
--- /dev/null
+++ b/backend/code_coverage_backend/hgmo.py
@@ -0,0 +1,65 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import requests
+import structlog
+
+logger = structlog.get_logger(__name__)
+
+__hgmo = {}
+
+HGMO_REVISION_URL = "https://hg.mozilla.org/{repository}/json-rev/{revision}"
+HGMO_PUSHES_URL = "https://hg.mozilla.org/{repository}/json-pushes"
+
+
+def hgmo_revision_details(repository, changeset):
+ """
+ HGMO helper to retrieve details for a changeset
+ """
+ # Check cache first
+ key = (repository, changeset)
+ if key in __hgmo:
+ return __hgmo[key]
+
+ url = HGMO_REVISION_URL.format(repository=repository, revision=changeset)
+ resp = requests.get(url)
+ resp.raise_for_status()
+ data = resp.json()
+ assert "pushid" in data, "Missing pushid"
+ out = data["pushid"], data["date"][0]
+
+ # Store in cache
+ __hgmo[key] = out
+ return out
+
+
+def hgmo_pushes(repository, min_push_id, nb_pages, chunk_size=8):
+ """
+ HGMO helper to list all pushes in a limited number of pages
+ """
+ params = {"version": 2}
+ if min_push_id is not None:
+ assert isinstance(min_push_id, int)
+ params["startID"] = min_push_id
+ params["endID"] = min_push_id + chunk_size
+
+ for page in range(nb_pages):
+ r = requests.get(HGMO_PUSHES_URL.format(repository=repository), params=params)
+ data = r.json()
+
+ # Sort pushes to go from oldest to newest
+ pushes = sorted(
+ [(int(push_id), push) for push_id, push in data["pushes"].items()],
+ key=lambda p: p[0],
+ )
+ if not pushes:
+ return
+
+ for push in pushes:
+ yield push
+
+ newest = pushes[-1][0]
+ params["startID"] = newest
+ params["endID"] = newest + chunk_size
diff --git a/backend/code_coverage_backend/report.py b/backend/code_coverage_backend/report.py
new file mode 100644
index 000000000..31680e731
--- /dev/null
+++ b/backend/code_coverage_backend/report.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+import os
+
+import structlog
+
+from code_coverage_backend.hgmo import hgmo_revision_details
+
+logger = structlog.get_logger(__name__)
+
+DEFAULT_FILTER = "all"
+
+
+class Report(object):
+ """
+ A single coverage report
+ """
+
+ def __init__(
+ self,
+ base_dir,
+ repository,
+ changeset,
+ platform=DEFAULT_FILTER,
+ suite=DEFAULT_FILTER,
+ push_id=None,
+ date=None,
+ ):
+ assert isinstance(repository, str)
+ assert isinstance(changeset, str)
+ self.base_dir = base_dir
+ self.repository = repository
+ self.changeset = changeset
+ self.platform = platform
+ self.suite = suite
+
+ # Get extra information from HGMO
+ if push_id or date:
+ self.push_id = push_id
+ self.date = date
+ else:
+ self.push_id, date = hgmo_revision_details(repository, changeset)
+ self.date = int(date)
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return self.name
+
+ def __eq__(self, other):
+
+ return isinstance(other, Report) and (
+ self.base_dir,
+ self.repository,
+ self.changeset,
+ self.platform,
+ self.suite,
+ ) == (
+ other.base_dir,
+ other.repository,
+ other.changeset,
+ other.platform,
+ other.suite,
+ )
+
+ @property
+ def name(self):
+ return "{}/{}/{}:{}".format(
+ self.repository, self.changeset, self.platform, self.suite
+ )
+
+ @property
+ def path(self):
+ """Local path on FS, decompressed"""
+ return os.path.join(self.base_dir, f"{self.name}.json")
+
+ @property
+ def archive_path(self):
+ """Local path on FS, compressed"""
+ return os.path.join(self.base_dir, f"{self.name}.json.zstd")
+
+ @property
+ def gcp_path(self):
+ """Remote path on GCP storage"""
+ return f"{self.name}.json.zstd"
+
+ @property
+ def key_overall(self):
+ """Redis key to store the overall coverage data for that report"""
+ platform = self.platform or "all"
+ suite = self.suite or "all"
+ return f"overall:{self.repository}:{self.changeset}:{platform}:{suite}"
diff --git a/backend/tests/test_gcp.py b/backend/tests/test_gcp.py
index 9ed9fac1b..0a22e78cf 100644
--- a/backend/tests/test_gcp.py
+++ b/backend/tests/test_gcp.py
@@ -6,44 +6,33 @@
import pytest
-
-def test_store_push(mock_cache):
- """
- Test base method to store a push & changesets on redis
- """
- assert mock_cache.redis.keys("*") == []
- mock_cache.store_push("myrepo", 1234, ["deadbeef", "coffee"], 111222333)
-
- assert mock_cache.redis.keys("*") == [
- b"changeset:myrepo:deadbeef",
- b"changeset:myrepo:coffee",
- ]
- assert mock_cache.redis.hgetall("changeset:myrepo:deadbeef") == {
- b"push": b"1234",
- b"date": b"111222333",
- }
- assert mock_cache.redis.hgetall("changeset:myrepo:coffee") == {
- b"push": b"1234",
- b"date": b"111222333",
- }
+from code_coverage_backend.report import Report
def test_download_report(mock_cache):
"""
Test base method to download a report & store it on local FS
"""
- mock_cache.bucket.add_mock_blob("myrepo/deadbeef123.json.zstd")
+ mock_cache.bucket.add_mock_blob("myrepo/deadbeef123/all:all.json.zstd")
# Does not exist
- assert mock_cache.download_report("myrepo", "missing") is False
+ report = Report(mock_cache.reports_dir, "myrepo", "missing", date=1, push_id=1)
+ assert mock_cache.download_report(report) is False
- archive = os.path.join(mock_cache.reports_dir, "myrepo", "deadbeef123.json.zstd")
- payload = os.path.join(mock_cache.reports_dir, "myrepo", "deadbeef123.json")
+ archive = os.path.join(
+ mock_cache.reports_dir, "myrepo", "deadbeef123", "all:all.json.zstd"
+ )
+ payload = os.path.join(
+ mock_cache.reports_dir, "myrepo", "deadbeef123", "all:all.json"
+ )
assert not os.path.exists(archive)
assert not os.path.exists(payload)
# Valid blob
- assert mock_cache.download_report("myrepo", "deadbeef123") == payload
+ report = Report(mock_cache.reports_dir, "myrepo", "deadbeef123", date=1, push_id=1)
+ assert mock_cache.download_report(report) is True
+ assert archive == report.archive_path
+ assert payload == report.path
# Only the payload remains after download
assert not os.path.exists(archive)
@@ -51,15 +40,17 @@ def test_download_report(mock_cache):
assert json.load(open(payload)) == {"children": {}, "coveragePercent": 0.0}
+ assert mock_cache.redis.keys("*") == []
+
def test_ingestion(mock_cache):
"""
Test ingestion of several reports and their retrieval through Redis index
"""
# Setup blobs
- mock_cache.bucket.add_mock_blob("myrepo/rev1.json.zstd", coverage=0.1)
- mock_cache.bucket.add_mock_blob("myrepo/rev2.json.zstd", coverage=0.2)
- mock_cache.bucket.add_mock_blob("myrepo/rev10.json.zstd", coverage=1.0)
+ mock_cache.bucket.add_mock_blob("myrepo/rev1/all:all.json.zstd", coverage=0.1)
+ mock_cache.bucket.add_mock_blob("myrepo/rev2/all:all.json.zstd", coverage=0.2)
+ mock_cache.bucket.add_mock_blob("myrepo/rev10/all:all.json.zstd", coverage=1.0)
# No reports at first
assert mock_cache.redis.zcard(b"reports:myrepo") == 0
@@ -67,25 +58,30 @@ def test_ingestion(mock_cache):
assert mock_cache.list_reports("myrepo") == []
# Ingest those 3 reports
- mock_cache.ingest_report("myrepo", 1, "rev1", 1000)
- mock_cache.ingest_report("myrepo", 2, "rev2", 2000)
- mock_cache.ingest_report("myrepo", 10, "rev10", 9000)
+ report_1 = Report(mock_cache.reports_dir, "myrepo", "rev1", date=1000, push_id=1)
+ report_2 = Report(mock_cache.reports_dir, "myrepo", "rev2", date=2000, push_id=2)
+ report_10 = Report(mock_cache.reports_dir, "myrepo", "rev10", date=9000, push_id=10)
+ mock_cache.ingest_report(report_1)
+ mock_cache.ingest_report(report_2)
+ mock_cache.ingest_report(report_10)
# They must be in redis and on the file system
- assert mock_cache.redis.zcard(b"reports:myrepo") == 3
+ assert mock_cache.redis.zcard(b"reports:myrepo:all:all") == 3
assert mock_cache.redis.zcard(b"history:myrepo") == 3
- assert os.path.exists(os.path.join(mock_cache.reports_dir, "myrepo", "rev1.json"))
- assert os.path.exists(os.path.join(mock_cache.reports_dir, "myrepo", "rev2.json"))
- assert os.path.exists(os.path.join(mock_cache.reports_dir, "myrepo", "rev10.json"))
+ assert os.path.exists(
+ os.path.join(mock_cache.reports_dir, "myrepo", "rev1", "all:all.json")
+ )
+ assert os.path.exists(
+ os.path.join(mock_cache.reports_dir, "myrepo", "rev2", "all:all.json")
+ )
+ assert os.path.exists(
+ os.path.join(mock_cache.reports_dir, "myrepo", "rev10", "all:all.json")
+ )
# Reports are exposed, and sorted by push
assert mock_cache.list_reports("another") == []
- assert mock_cache.list_reports("myrepo") == [
- ("rev10", 10),
- ("rev2", 2),
- ("rev1", 1),
- ]
- assert mock_cache.find_report("myrepo") == ("rev10", 10)
+ assert mock_cache.list_reports("myrepo") == [report_10, report_2, report_1]
+ assert mock_cache.find_report("myrepo") == report_10
assert mock_cache.get_history("myrepo", start=200, end=20000) == [
{"changeset": "rev10", "coverage": 1.0, "date": 9000},
{"changeset": "rev2", "coverage": 0.2, "date": 2000},
@@ -93,16 +89,17 @@ def test_ingestion(mock_cache):
]
# Even if we add a smaller one later on, reports are still sorted
- mock_cache.bucket.add_mock_blob("myrepo/rev5.json.zstd", coverage=0.5)
- mock_cache.ingest_report("myrepo", 5, "rev5", 5000)
+ mock_cache.bucket.add_mock_blob("myrepo/rev5/all:all.json.zstd", coverage=0.5)
+ report_5 = Report(mock_cache.reports_dir, "myrepo", "rev5", date=5000, push_id=5)
+ mock_cache.ingest_report(report_5)
assert mock_cache.list_reports("myrepo") == [
- ("rev10", 10),
- ("rev5", 5),
- ("rev2", 2),
- ("rev1", 1),
+ report_10,
+ report_5,
+ report_2,
+ report_1,
]
- assert mock_cache.find_report("myrepo") == ("rev10", 10)
- assert mock_cache.find_report("myrepo", push_range=(7, 0)) == ("rev5", 5)
+ assert mock_cache.find_report("myrepo") == report_10
+ assert mock_cache.find_report("myrepo", push_range=(7, 0)) == report_5
assert mock_cache.get_history("myrepo", start=200, end=20000) == [
{"changeset": "rev10", "coverage": 1.0, "date": 9000},
{"changeset": "rev5", "coverage": 0.5, "date": 5000},
@@ -118,14 +115,18 @@ def test_ingest_hgmo(mock_cache, mock_hgmo):
# Add a report on push 995
rev = hashlib.md5(b"995").hexdigest()
- mock_cache.bucket.add_mock_blob("myrepo/{}.json.zstd".format(rev), coverage=0.5)
+ mock_cache.bucket.add_mock_blob(
+ "myrepo/{}/all:all.json.zstd".format(rev), coverage=0.5
+ )
# Ingest last pushes
assert mock_cache.list_reports("myrepo") == []
assert len(mock_cache.redis.keys("changeset:myrepo:*")) == 0
- mock_cache.ingest_pushes("myrepo")
+ mock_cache.ingest_pushes("myrepo", "all", "all")
assert len(mock_cache.redis.keys("changeset:myrepo:*")) > 0
- assert mock_cache.list_reports("myrepo") == [(rev, 995)]
+ assert mock_cache.list_reports("myrepo") == [
+ Report(mock_cache.reports_dir, "myrepo", rev, push_id=1, date=995)
+ ]
def test_closest_report(mock_cache, mock_hgmo):
@@ -150,34 +151,38 @@ def test_closest_report(mock_cache, mock_hgmo):
# Add a report on 994, 2 pushes after our revision
report_rev = hashlib.md5(b"994").hexdigest()
mock_cache.bucket.add_mock_blob(
- "myrepo/{}.json.zstd".format(report_rev), coverage=0.5
+ "myrepo/{}/all:all.json.zstd".format(report_rev), coverage=0.5
+ )
+ report_994 = Report(
+ mock_cache.reports_dir, "myrepo", report_rev, push_id=1, date=994
)
# Add a report on 990, 2 pushes before our revision
base_rev = hashlib.md5(b"990").hexdigest()
mock_cache.bucket.add_mock_blob(
- "myrepo/{}.json.zstd".format(base_rev), coverage=0.4
+ "myrepo/{}/all:all.json.zstd".format(base_rev), coverage=0.4
)
+ report_990 = Report(mock_cache.reports_dir, "myrepo", base_rev, push_id=1, date=990)
# Now we have a report !
assert mock_cache.list_reports("myrepo") == []
- assert mock_cache.find_closest_report("myrepo", revision) == (report_rev, 994)
- assert mock_cache.list_reports("myrepo") == [(report_rev, 994)]
+ assert mock_cache.find_closest_report("myrepo", revision) == report_994
+ assert mock_cache.list_reports("myrepo") == [report_994]
# This should also work for revisions before
revision = "991{}".format(uuid.uuid4().hex[3:])
- assert mock_cache.find_closest_report("myrepo", revision) == (report_rev, 994)
+ assert mock_cache.find_closest_report("myrepo", revision) == report_994
# ... and the revision on the push itself
revision = "994{}".format(uuid.uuid4().hex[3:])
- assert mock_cache.find_closest_report("myrepo", revision) == (report_rev, 994)
+ assert mock_cache.find_closest_report("myrepo", revision) == report_994
# We can also retrieve the base revision
revision = "990{}".format(uuid.uuid4().hex[3:])
- assert mock_cache.find_closest_report("myrepo", revision) == (base_rev, 990)
+ assert mock_cache.find_closest_report("myrepo", revision) == report_990
revision = "989{}".format(uuid.uuid4().hex[3:])
- assert mock_cache.find_closest_report("myrepo", revision) == (base_rev, 990)
- assert mock_cache.list_reports("myrepo") == [(report_rev, 994), (base_rev, 990)]
+ assert mock_cache.find_closest_report("myrepo", revision) == report_990
+ assert mock_cache.list_reports("myrepo") == [report_994, report_990]
# But not for revisions after the push
revision = "995{}".format(uuid.uuid4().hex[3:])
@@ -191,15 +196,16 @@ def test_get_coverage(mock_cache):
Test coverage access with re-download
"""
# No report at first
+ report = Report(mock_cache.reports_dir, "myrepo", "myhash", push_id=1, date=1)
with pytest.raises(AssertionError) as e:
- mock_cache.get_coverage("myrepo", "myhash", "")
- assert str(e.value) == "Missing report for myrepo at myhash"
+ mock_cache.get_coverage(report, "")
+ assert str(e.value) == "Missing report myrepo/myhash/all:all"
# Report available online
- mock_cache.bucket.add_mock_blob("myrepo/myhash.json.zstd")
+ mock_cache.bucket.add_mock_blob("myrepo/myhash/all:all.json.zstd")
# Coverage available
- coverage = mock_cache.get_coverage("myrepo", "myhash", "")
+ coverage = mock_cache.get_coverage(report, "")
assert coverage == {
"children": [],
"coveragePercent": 0.0,
@@ -209,12 +215,12 @@ def test_get_coverage(mock_cache):
}
# Remove local file
- path = os.path.join(mock_cache.reports_dir, "myrepo", "myhash.json")
+ path = os.path.join(mock_cache.reports_dir, "myrepo", "myhash", "all:all.json")
assert os.path.exists(path)
os.unlink(path)
# Coverage still available
- coverage = mock_cache.get_coverage("myrepo", "myhash", "")
+ coverage = mock_cache.get_coverage(report, "")
assert coverage == {
"children": [],
"coveragePercent": 0.0,
@@ -229,7 +235,7 @@ def test_get_coverage(mock_cache):
f.write("break")
# Coverage still available
- coverage = mock_cache.get_coverage("myrepo", "myhash", "")
+ coverage = mock_cache.get_coverage(report, "")
assert coverage == {
"children": [],
"coveragePercent": 0.0,
diff --git a/bot/code_coverage_bot/artifacts.py b/bot/code_coverage_bot/artifacts.py
index 51019cbef..65c95fa8f 100644
--- a/bot/code_coverage_bot/artifacts.py
+++ b/bot/code_coverage_bot/artifacts.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+import collections
import fnmatch
+import itertools
import os
import time
@@ -11,6 +13,9 @@
logger = structlog.get_logger(__name__)
+Artifact = collections.namedtuple("Artifact", "path, task_id, platform, suite, chunk")
+
+
SUITES_TO_IGNORE = [
"awsy",
"talos",
@@ -25,6 +30,7 @@ def __init__(self, task_ids, parent_dir="ccov-artifacts", task_name_filter="*"):
self.task_ids = task_ids
self.parent_dir = parent_dir
self.task_name_filter = task_name_filter
+ self.artifacts = []
def generate_path(self, platform, chunk, artifact):
file_name = "%s_%s_%s" % (platform, chunk, os.path.basename(artifact["name"]))
@@ -32,34 +38,66 @@ def generate_path(self, platform, chunk, artifact):
def get_chunks(self, platform):
return set(
- f.split("_")[1]
- for f in os.listdir(self.parent_dir)
- if os.path.basename(f).startswith(f"{platform}_")
+ artifact.chunk
+ for artifact in self.artifacts
+ if artifact.platform == platform
)
- def get(self, platform=None, suite=None, chunk=None):
- files = os.listdir(self.parent_dir)
+ def get_suites(self):
+ # Add the full report
+ out = collections.defaultdict(list)
+ out[("all", "all")] = [artifact.path for artifact in self.artifacts]
+
+ # Group by suite first
+ suites = itertools.groupby(
+ sorted(self.artifacts, key=lambda a: a.suite), lambda a: a.suite
+ )
+ for suite, artifacts in suites:
+ artifacts = list(artifacts)
+
+ # List all available platforms
+ platforms = {a.platform for a in artifacts}
+ platforms.add("all")
+
+ # And list all possible permutations with suite + platform
+ out[("all", suite)] += [artifact.path for artifact in artifacts]
+ for platform in platforms:
+ if platform != "all":
+ out[(platform, "all")] += [
+ artifact.path
+ for artifact in artifacts
+ if artifact.platform == platform
+ ]
+ out[(platform, suite)] = [
+ artifact.path
+ for artifact in artifacts
+ if platform == "all" or artifact.platform == platform
+ ]
+
+ return out
+ def get(self, platform=None, suite=None, chunk=None):
if suite is not None and chunk is not None:
raise Exception("suite and chunk can't both have a value")
# Filter artifacts according to platform, suite and chunk.
filtered_files = []
- for fname in files:
- if platform is not None and not fname.startswith("%s_" % platform):
+ for artifact in self.artifacts:
+ if platform is not None and artifact.platform != platform:
continue
- if suite is not None and suite not in fname:
+ if suite is not None and artifact.suite != suite:
continue
- if chunk is not None and ("%s_code-coverage" % chunk) not in fname:
+ if chunk is not None and artifact.chunk != chunk:
continue
- filtered_files.append(os.path.join(self.parent_dir, fname))
+ filtered_files.append(artifact.path)
return filtered_files
def download(self, test_task):
+ suite = taskcluster.get_suite(test_task["task"])
chunk_name = taskcluster.get_chunk(test_task["task"])
platform_name = taskcluster.get_platform(test_task["task"])
test_task_id = test_task["status"]["taskId"]
@@ -75,6 +113,10 @@ def download(self, test_task):
taskcluster.download_artifact(artifact_path, test_task_id, artifact["name"])
logger.info("%s artifact downloaded" % artifact_path)
+ self.artifacts.append(
+ Artifact(artifact_path, test_task_id, platform_name, suite, chunk_name)
+ )
+
def is_filtered_task(self, task):
"""
Apply name filter from CLI args on task name
diff --git a/bot/code_coverage_bot/codecov.py b/bot/code_coverage_bot/codecov.py
index 28297d5ca..c8be6ff8a 100644
--- a/bot/code_coverage_bot/codecov.py
+++ b/bot/code_coverage_bot/codecov.py
@@ -42,6 +42,7 @@ def __init__(self, repository, revision, task_name_filter, cache_root):
temp_dir = tempfile.mkdtemp()
self.artifacts_dir = os.path.join(temp_dir, "ccov-artifacts")
+ self.reports_dir = os.path.join(temp_dir, "ccov-reports")
self.index_service = taskcluster_config.get_service("index")
@@ -118,31 +119,56 @@ def retrieve_source_and_artifacts(self):
# Thread 2 - Clone repository.
executor.submit(self.clone_repository, self.repository, self.revision)
- def generate_covdir(self):
+ def build_reports(self, only=None):
"""
- Build the covdir report using current artifacts
+ Build all the possible covdir reports using current artifacts
"""
- output = grcov.report(
- self.artifactsHandler.get(), source_dir=self.repo_dir, out_format="covdir"
- )
- logger.info("Covdir report generated successfully")
- return json.loads(output)
+ os.makedirs(self.reports_dir, exist_ok=True)
- # This function is executed when the bot is triggered at the end of a mozilla-central build.
- def go_from_trigger_mozilla_central(self):
- # Check the covdir report does not already exists
- if uploader.gcp_covdir_exists(self.branch, self.revision):
- logger.warn("Covdir report already on GCP")
- return
+ reports = {}
+ for (platform, suite), artifacts in self.artifactsHandler.get_suites().items():
- self.retrieve_source_and_artifacts()
+ if only is not None and (platform, suite) not in only:
+ continue
+
+ # Generate covdir report for that suite & platform
+ logger.info(
+ "Building covdir suite report",
+ suite=suite,
+ platform=platform,
+ artifacts=len(artifacts),
+ )
+ output = grcov.report(
+ artifacts, source_dir=self.repo_dir, out_format="covdir"
+ )
+
+ # Write output on FS
+ path = os.path.join(self.reports_dir, f"{platform}.{suite}.json")
+ with open(path, "wb") as f:
+ f.write(output)
+
+ reports[(platform, suite)] = path
+
+ return reports
+
+ def upload_reports(self, reports):
+ """
+ Upload all provided covdir reports on GCP
+ """
+ for (platform, suite), path in reports.items():
+ report = json.load(open(path))
+ uploader.gcp(
+ self.branch, self.revision, report, suite=suite, platform=platform
+ )
- # Check that all JavaScript files present in the coverage artifacts actually exist.
- # If they don't, there might be a bug in the LCOV rewriter.
+ def check_javascript_files(self):
+ """
+ Check that all JavaScript files present in the coverage artifacts actually exist.
+ If they don't, there might be a bug in the LCOV rewriter.
+ """
for artifact in self.artifactsHandler.get():
if "jsvm" not in artifact:
continue
-
with zipfile.ZipFile(artifact, "r") as zf:
for file_name in zf.namelist():
with zf.open(file_name, "r") as fl:
@@ -161,7 +187,25 @@ def go_from_trigger_mozilla_central(self):
f"{missing_files} are present in coverage reports, but missing from the repository"
)
- report = self.generate_covdir()
+ # This function is executed when the bot is triggered at the end of a mozilla-central build.
+ def go_from_trigger_mozilla_central(self):
+ # Check the covdir report does not already exists
+ if uploader.gcp_covdir_exists(self.branch, self.revision, "all", "all"):
+ logger.warn("Full covdir report already on GCP")
+ return
+
+ self.retrieve_source_and_artifacts()
+
+ # TODO: restore that check
+ # self.check_javascript_files()
+
+ reports = self.build_reports()
+ logger.info("Built all covdir reports", nb=len(reports))
+
+ # Retrieve the full report
+ full_path = reports.get(("all", "all"))
+ assert full_path is not None, "Missing full report (all:all)"
+ report = json.load(open(full_path))
paths = uploader.covdir_paths(report)
expected_extensions = [".js", ".cpp"]
@@ -170,6 +214,9 @@ def go_from_trigger_mozilla_central(self):
path.endswith(extension) for path in paths
), "No {} file in the generated report".format(extension)
+ self.upload_reports(reports)
+ logger.info("Uploaded all covdir reports", nb=len(reports))
+
# Get pushlog and ask the backend to generate the coverage by changeset
# data, which will be cached.
with hgmo.HGMO(self.repo_dir) as hgmo_server:
@@ -179,9 +226,6 @@ def go_from_trigger_mozilla_central(self):
phabricatorUploader = PhabricatorUploader(self.repo_dir, self.revision)
changesets_coverage = phabricatorUploader.upload(report, changesets)
- uploader.gcp(self.branch, self.revision, report)
-
- logger.info("Build uploaded on GCP")
notify_email(self.revision, changesets, changesets_coverage)
# This function is executed when the bot is triggered at the end of a try build.
@@ -201,7 +245,10 @@ def go_from_trigger_try(self):
self.retrieve_source_and_artifacts()
- report = self.generate_covdir()
+ reports = self.build_reports(only=("all", "all"))
+ full_path = reports.get(("all", "all"))
+ assert full_path is not None, "Missing full report (all:all)"
+ report = json.load(open(full_path))
logger.info("Upload changeset coverage data to Phabricator")
phabricatorUploader.upload(report, changesets)
diff --git a/bot/code_coverage_bot/uploader.py b/bot/code_coverage_bot/uploader.py
index fe573453a..d48857fca 100644
--- a/bot/code_coverage_bot/uploader.py
+++ b/bot/code_coverage_bot/uploader.py
@@ -12,10 +12,10 @@
from code_coverage_tools.gcp import get_bucket
logger = structlog.get_logger(__name__)
-GCP_COVDIR_PATH = "{repository}/{revision}.json.zstd"
+GCP_COVDIR_PATH = "{repository}/{revision}/{platform}:{suite}.json.zstd"
-def gcp(repository, revision, report):
+def gcp(repository, revision, report, platform, suite):
"""
Upload a grcov raw report on Google Cloud Storage
* Compress with zstandard
@@ -23,6 +23,8 @@ def gcp(repository, revision, report):
* Trigger ingestion on channel's backend
"""
assert isinstance(report, dict)
+ assert isinstance(platform, str)
+ assert isinstance(suite, str)
bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])
# Compress report
@@ -30,7 +32,9 @@ def gcp(repository, revision, report):
archive = compressor.compress(json.dumps(report).encode("utf-8"))
# Upload archive
- path = GCP_COVDIR_PATH.format(repository=repository, revision=revision)
+ path = GCP_COVDIR_PATH.format(
+ repository=repository, revision=revision, platform=platform, suite=suite
+ )
blob = bucket.blob(path)
blob.upload_from_string(archive)
@@ -42,22 +46,28 @@ def gcp(repository, revision, report):
logger.info("Uploaded {} on {}".format(path, bucket))
# Trigger ingestion on backend
- retry(lambda: gcp_ingest(repository, revision), retries=10, wait_between_retries=60)
+ retry(
+ lambda: gcp_ingest(repository, revision, platform, suite),
+ retries=10,
+ wait_between_retries=60,
+ )
return blob
-def gcp_covdir_exists(repository, revision):
+def gcp_covdir_exists(repository, revision, platform, suite):
"""
Check if a covdir report exists on the Google Cloud Storage bucket
"""
bucket = get_bucket(secrets[secrets.GOOGLE_CLOUD_STORAGE])
- path = GCP_COVDIR_PATH.format(repository=repository, revision=revision)
+ path = GCP_COVDIR_PATH.format(
+ repository=repository, revision=revision, platform=platform, suite=suite
+ )
blob = bucket.blob(path)
return blob.exists()
-def gcp_ingest(repository, revision):
+def gcp_ingest(repository, revision, platform, suite):
"""
The GCP report ingestion is triggered remotely on a backend
by making a simple HTTP request on the /v2/path endpoint
@@ -65,12 +75,18 @@ def gcp_ingest(repository, revision):
will download automatically the new report.
"""
params = {"repository": repository, "changeset": revision}
+ if platform:
+ params["platform"] = platform
+ if suite:
+ params["suite"] = suite
backend_host = secrets[secrets.BACKEND_HOST]
logger.info(
"Ingesting report on backend",
host=backend_host,
repository=repository,
revision=revision,
+ platform=platform,
+ suite=suite,
)
resp = requests.get("{}/v2/path".format(backend_host), params=params)
resp.raise_for_status()
diff --git a/bot/history.json b/bot/history.json
new file mode 100644
index 000000000..85756ed66
--- /dev/null
+++ b/bot/history.json
@@ -0,0 +1,357 @@
+[
+ {
+ "changeset": "a5710687f9b44562f9ee54907ce81beb386da41b",
+ "coverage": 62.81,
+ "date": 1566165104
+ },
+ {
+ "changeset": "aacc1ab5272e62a55a50ab84f6b09b6a5b023ff5",
+ "coverage": 62.85,
+ "date": 1565559768
+ },
+ {
+ "changeset": "3a71baea939144b4ec37805a932f0250c74986b1",
+ "coverage": 62.94,
+ "date": 1565214732
+ },
+ {
+ "changeset": "2f9fcfd57416a8424ff12a11c9734ee9a2fb6ed0",
+ "coverage": 62.96,
+ "date": 1565177501
+ },
+ {
+ "changeset": "4ba2efc86669143d4ce3e31c6d8c180a0dbf28bf",
+ "coverage": 62.92,
+ "date": 1565171748
+ },
+ {
+ "changeset": "fb699b3c084c8d35e52f2b282de90ecb7b0992cd",
+ "coverage": 62.93,
+ "date": 1565127812
+ },
+ {
+ "changeset": "20688459a2572d79ccc77ab030c2f67e8baff591",
+ "coverage": 62.86,
+ "date": 1565127714
+ },
+ {
+ "changeset": "ba4cdceb505942e3e6ad46db72ce8ddf71fe4a1c",
+ "coverage": 62.92,
+ "date": 1565084441
+ },
+ {
+ "changeset": "0b0758d7768c226d1b460a69a03d9a4668b8a7c4",
+ "coverage": 62.86,
+ "date": 1565084130
+ },
+ {
+ "changeset": "dba2c8019074a017293f708cec0292607c2e803c",
+ "coverage": 62.83,
+ "date": 1565042430
+ },
+ {
+ "changeset": "d681969e4480a2cad692be94adcbc2b861efb723",
+ "coverage": 62.83,
+ "date": 1564998853
+ },
+ {
+ "changeset": "6e3e96412fd9cf6d5873f8b5fb13cbe151ea9d62",
+ "coverage": 62.88,
+ "date": 1564955293
+ },
+ {
+ "changeset": "30a8df41ff6db0323d045bdc56cb5f0c95e92b9a",
+ "coverage": 62.92,
+ "date": 1564912072
+ },
+ {
+ "changeset": "22b33b20fba18c8b22ca2231afc8c41136544df5",
+ "coverage": 62.79,
+ "date": 1564853940
+ },
+ {
+ "changeset": "37229cef2cc79d44470afc9e04016bac8ddd0ae8",
+ "coverage": 62.74,
+ "date": 1564782761
+ },
+ {
+ "changeset": "b5f2fa86e69682a8cf1571f478c31c9afb26fba1",
+ "coverage": 62.86,
+ "date": 1564739212
+ },
+ {
+ "changeset": "5f8aeb02af2259acfceed9e1abe987849fbb67ca",
+ "coverage": 62.91,
+ "date": 1564695747
+ },
+ {
+ "changeset": "46b354546ad842b72f02a025126dfedcc38e5ad0",
+ "coverage": 62.9,
+ "date": 1564588003
+ },
+ {
+ "changeset": "e259d43073bc84dbf3aa547c3dd080d62c89f45f",
+ "coverage": 62.78,
+ "date": 1564523430
+ },
+ {
+ "changeset": "639f502ded6b1d8db390b625b65692622a3c943f",
+ "coverage": 62.85,
+ "date": 1564480172
+ },
+ {
+ "changeset": "927abd2c418b308a474ba96e58df4bfdbc6a3ca5",
+ "coverage": 62.86,
+ "date": 1564436319
+ },
+ {
+ "changeset": "4274af431edec7f6f08fb340275297253566202c",
+ "coverage": 62.86,
+ "date": 1564393823
+ },
+ {
+ "changeset": "1416771db267f77fa6bd28b2eaa214a706427f55",
+ "coverage": 62.83,
+ "date": 1564263341
+ },
+ {
+ "changeset": "8007ff804e997b621df8df0a63b2466cbea88621",
+ "coverage": 62.9,
+ "date": 1564219868
+ },
+ {
+ "changeset": "a6fa09658817e99eb52335a0773f567ab20a34aa",
+ "coverage": 62.89,
+ "date": 1564158416
+ },
+ {
+ "changeset": "2a9a26aba8288cb1aeb52a2629c6d2e19243b18c",
+ "coverage": 62.88,
+ "date": 1564091346
+ },
+ {
+ "changeset": "5805cd9ae2947386263069e1a3b2d832384bd45f",
+ "coverage": 62.81,
+ "date": 1564048518
+ },
+ {
+ "changeset": "922be4adb708aee5ab59602b38fbb19f37c2de53",
+ "coverage": 62.86,
+ "date": 1564026811
+ },
+ {
+ "changeset": "6598e37c88d2816deed4fdaedbddf9c9dade7987",
+ "coverage": 62.72,
+ "date": 1563984435
+ },
+ {
+ "changeset": "e36533bbd9166cb1c7049fa51f3b3a7b9fa1835f",
+ "coverage": 62.75,
+ "date": 1563940403
+ },
+ {
+ "changeset": "b8141448e0baa767e8eff61e28c5a418c438f3d2",
+ "coverage": 62.78,
+ "date": 1563831826
+ },
+ {
+ "changeset": "17267dd9b2281d253c06133e990ba2ed330a7519",
+ "coverage": 62.81,
+ "date": 1563831648
+ },
+ {
+ "changeset": "64fc6a9a9fb2fbcb5483241e7cfde82e1dc5156f",
+ "coverage": 62.85,
+ "date": 1563770786
+ },
+ {
+ "changeset": "eb7f4d56f54b3283fc15983ee859b5e62fcb9f3b",
+ "coverage": 62.81,
+ "date": 1563550241
+ },
+ {
+ "changeset": "5fff2a9bf0785afbdb774c178135cc9e9ad18211",
+ "coverage": 62.78,
+ "date": 1563508535
+ },
+ {
+ "changeset": "5fceb8c496bfe98c1081d9ff8712e9107dd22767",
+ "coverage": 62.85,
+ "date": 1563486651
+ },
+ {
+ "changeset": "ca1dbd076e1e47a8616ab6ee3e6fd4083d576857",
+ "coverage": 62.8,
+ "date": 1563465283
+ },
+ {
+ "changeset": "8442d36972721915a1d00d7c4f06f0a9872b0769",
+ "coverage": 62.84,
+ "date": 1563465147
+ },
+ {
+ "changeset": "b3f5385fa0b37bca7c46269ff394aca964baec7c",
+ "coverage": 62.85,
+ "date": 1563443698
+ },
+ {
+ "changeset": "4116a7254a4ee7033f38b7126007bd2af678a7b8",
+ "coverage": 62.87,
+ "date": 1563443452
+ },
+ {
+ "changeset": "b6d154b2309846531934289c1e167208fc385ac3",
+ "coverage": 32.05,
+ "date": 1563400279
+ },
+ {
+ "changeset": "32d7797bd8bd91e7b62ef2a5e19b8888881766f1",
+ "coverage": 62.83,
+ "date": 1562276697
+ },
+ {
+ "changeset": "6a2bd09a6bf9aada14581e923408d7308479b76d",
+ "coverage": 62.83,
+ "date": 1562233530
+ },
+ {
+ "changeset": "da33e6261a816c477c919b30a67042c886eb56b3",
+ "coverage": 62.84,
+ "date": 1562103964
+ },
+ {
+ "changeset": "70e7c3ef6cae2266147c38ad250692ffe84aec26",
+ "coverage": 62.93,
+ "date": 1561628088
+ },
+ {
+ "changeset": "207bcf72dac70e275daee08aebfbb5df0900c9d0",
+ "coverage": 62.96,
+ "date": 1561455622
+ },
+ {
+ "changeset": "4b3431481d55e13e739e42dd5e526c11bea22f45",
+ "coverage": 63.07,
+ "date": 1559166620
+ },
+ {
+ "changeset": "2bb77ed1fcc5ad06f91612d419160f54c09369db",
+ "coverage": 63.05,
+ "date": 1559145508
+ },
+ {
+ "changeset": "8d86cc081fe5d16980a0a610af6b967cc03bf814",
+ "coverage": 63.05,
+ "date": 1559128548
+ },
+ {
+ "changeset": "3c26311b3d8df20c4c0b00eda34b932df121b65c",
+ "coverage": 63.05,
+ "date": 1559123415
+ },
+ {
+ "changeset": "d78adc84bde8cb23a2e34c444d48406bd198c797",
+ "coverage": 63.06,
+ "date": 1559123327
+ },
+ {
+ "changeset": "d12917561f27e4c4b4808707b55e88973dc4a385",
+ "coverage": 62.92,
+ "date": 1558560943
+ },
+ {
+ "changeset": "5f95b3f2ea44723ba6a8c41a4b27c88032df709f",
+ "coverage": 62.92,
+ "date": 1558517603
+ },
+ {
+ "changeset": "257f2c96cef502a1d674df56c8e39d76d8ed4d89",
+ "coverage": 62.9,
+ "date": 1558462834
+ },
+ {
+ "changeset": "3c0f78074b727fbae112b6eda111d4c4d30cc3ec",
+ "coverage": 62.92,
+ "date": 1558456141
+ },
+ {
+ "changeset": "b74e5737da64a7af28ab4f81f996950917aa71c5",
+ "coverage": 62.9,
+ "date": 1558398256
+ },
+ {
+ "changeset": "319a369ccde4ff1c4842c62fe90e9adf4eb5c028",
+ "coverage": 62.74,
+ "date": 1558359743
+ },
+ {
+ "changeset": "e013f1f17109a8c22cbc7abf6f78db55bd2a8efb",
+ "coverage": 62.95,
+ "date": 1558196397
+ },
+ {
+ "changeset": "9b2f851979cb8d0dd0cd2618656eddee32e4f143",
+ "coverage": 62.93,
+ "date": 1558175128
+ },
+ {
+ "changeset": "7c540586aedbc69e75649ab34fbaaceee912bebd",
+ "coverage": 62.62,
+ "date": 1558129213
+ },
+ {
+ "changeset": "bc17771ceb28c31cd06889be51ae2eda72efc451",
+ "coverage": 62.59,
+ "date": 1558110149
+ },
+ {
+ "changeset": "96802be91766718fa33fe2e98f7a910e4dd1bb5e",
+ "coverage": 62.58,
+ "date": 1558013638
+ },
+ {
+ "changeset": "6f732caaed60783f57944a66f7ea494f5fd78d6c",
+ "coverage": 62.96,
+ "date": 1557870092
+ },
+ {
+ "changeset": "230016dbba05b36ecc1ccada9abdc2d5370a0ae7",
+ "coverage": 62.94,
+ "date": 1557839847
+ },
+ {
+ "changeset": "fa3cfee27619ddc9bcbcf70555bda4eb1e815146",
+ "coverage": 62.94,
+ "date": 1557735776
+ },
+ {
+ "changeset": "b83d8a064f1694627e66f2dd3a683b66c350b3b3",
+ "coverage": 62.94,
+ "date": 1557697330
+ },
+ {
+ "changeset": "4b3945b758896f5153e61a1b84a2b3614a98021a",
+ "coverage": 62.87,
+ "date": 1557668753
+ },
+ {
+ "changeset": "03166449953fbcaaf6c66d2c3b358319781a0e52",
+ "coverage": 62.69,
+ "date": 1556888354
+ },
+ {
+ "changeset": "da2b564f6df03fd8ce37f2eb394fd48289d43a55",
+ "coverage": 62.41,
+ "date": 1556596117
+ },
+ {
+ "changeset": "094b212a3cbf55d92b85db2b5e1d04f8d46a5dfb",
+ "coverage": 62.75,
+ "date": 1556271975
+ },
+ {
+ "changeset": "5b2a282f73d3a6dfef9094f7f64eb72d0f227e13",
+ "coverage": 62.66,
+ "date": 1555842312
+ }
+]
diff --git a/bot/tests/test_artifacts.py b/bot/tests/test_artifacts.py
index d03aa4a06..c59955250 100644
--- a/bot/tests/test_artifacts.py
+++ b/bot/tests/test_artifacts.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-
+import hashlib
import itertools
import os
from unittest import mock
@@ -7,6 +7,7 @@
import pytest
import responses
+from code_coverage_bot.artifacts import Artifact
from code_coverage_bot.artifacts import ArtifactsHandler
FILES = [
@@ -21,14 +22,28 @@
@pytest.fixture
-def FAKE_ARTIFACTS_DIR(tmpdir):
- for f in FILES:
- open(os.path.join(tmpdir.strpath, f), "w")
- return tmpdir.strpath
+def fake_artifacts(tmpdir):
+ def name_to_artifact(name):
+ """
+ Touch the fake artifact & build instance
+ """
+ path = os.path.join(tmpdir.strpath, name)
+ open(path, "w")
+
+ platform, chunk, _ = name.split("_")
+ return Artifact(
+ path,
+ hashlib.md5(name.encode("utf-8")).hexdigest()[:10],
+ platform,
+ chunk[: chunk.rindex("-")] if "-" in chunk else chunk,
+ chunk,
+ )
+
+ return [name_to_artifact(f) for f in FILES]
-def test_generate_path(FAKE_ARTIFACTS_DIR):
- a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR)
+def test_generate_path(fake_artifacts):
+ a = ArtifactsHandler([])
artifact_jsvm = {"name": "code-coverage-jsvm.info"}
artifact_grcov = {"name": "code-coverage-grcov.zip"}
assert os.path.join(
@@ -39,8 +54,9 @@ def test_generate_path(FAKE_ARTIFACTS_DIR):
) == a.generate_path("windows", "cppunit", artifact_grcov)
-def test_get_chunks(FAKE_ARTIFACTS_DIR):
- a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR)
+def test_get_chunks(fake_artifacts):
+ a = ArtifactsHandler([])
+ a.artifacts = fake_artifacts
assert a.get_chunks("windows") == {"mochitest-1", "xpcshell-7", "cppunit"}
assert a.get_chunks("linux") == {
"mochitest-2",
@@ -50,11 +66,82 @@ def test_get_chunks(FAKE_ARTIFACTS_DIR):
}
-def test_get_coverage_artifacts(FAKE_ARTIFACTS_DIR):
+def test_get_suites(tmpdir, fake_artifacts):
def add_dir(files):
- return set([os.path.join(FAKE_ARTIFACTS_DIR, f) for f in files])
+ return [os.path.join(tmpdir.strpath, f) for f in files]
- a = ArtifactsHandler([], parent_dir=FAKE_ARTIFACTS_DIR)
+ a = ArtifactsHandler([])
+ a.artifacts = fake_artifacts
+ assert dict(a.get_suites()) == {
+ ("all", "all"): add_dir(
+ [
+ "windows_mochitest-1_code-coverage-jsvm.info",
+ "linux_mochitest-2_code-coverage-grcov.zip",
+ "windows_xpcshell-7_code-coverage-jsvm.info",
+ "linux_xpcshell-7_code-coverage-grcov.zip",
+ "linux_xpcshell-3_code-coverage-grcov.zip",
+ "windows_cppunit_code-coverage-grcov.zip",
+ "linux_firefox-ui-functional-remote_code-coverage-jsvm.info",
+ ]
+ ),
+ ("linux", "all"): add_dir(
+ [
+ "linux_firefox-ui-functional-remote_code-coverage-jsvm.info",
+ "linux_mochitest-2_code-coverage-grcov.zip",
+ "linux_xpcshell-7_code-coverage-grcov.zip",
+ "linux_xpcshell-3_code-coverage-grcov.zip",
+ ]
+ ),
+ ("windows", "all"): add_dir(
+ [
+ "windows_cppunit_code-coverage-grcov.zip",
+ "windows_mochitest-1_code-coverage-jsvm.info",
+ "windows_xpcshell-7_code-coverage-jsvm.info",
+ ]
+ ),
+ ("all", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]),
+ ("windows", "cppunit"): add_dir(["windows_cppunit_code-coverage-grcov.zip"]),
+ ("all", "firefox-ui-functional"): add_dir(
+ ["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"]
+ ),
+ ("linux", "firefox-ui-functional"): add_dir(
+ ["linux_firefox-ui-functional-remote_code-coverage-jsvm.info"]
+ ),
+ ("all", "mochitest"): add_dir(
+ [
+ "windows_mochitest-1_code-coverage-jsvm.info",
+ "linux_mochitest-2_code-coverage-grcov.zip",
+ ]
+ ),
+ ("linux", "mochitest"): add_dir(["linux_mochitest-2_code-coverage-grcov.zip"]),
+ ("windows", "mochitest"): add_dir(
+ ["windows_mochitest-1_code-coverage-jsvm.info"]
+ ),
+ ("all", "xpcshell"): add_dir(
+ [
+ "windows_xpcshell-7_code-coverage-jsvm.info",
+ "linux_xpcshell-7_code-coverage-grcov.zip",
+ "linux_xpcshell-3_code-coverage-grcov.zip",
+ ]
+ ),
+ ("linux", "xpcshell"): add_dir(
+ [
+ "linux_xpcshell-7_code-coverage-grcov.zip",
+ "linux_xpcshell-3_code-coverage-grcov.zip",
+ ]
+ ),
+ ("windows", "xpcshell"): add_dir(
+ ["windows_xpcshell-7_code-coverage-jsvm.info"]
+ ),
+ }
+
+
+def test_get_coverage_artifacts(tmpdir, fake_artifacts):
+ def add_dir(files):
+ return set([os.path.join(tmpdir.strpath, f) for f in files])
+
+ a = ArtifactsHandler([])
+ a.artifacts = fake_artifacts
assert set(a.get()) == add_dir(FILES)
assert set(a.get(suite="mochitest")) == add_dir(
[
@@ -174,7 +261,7 @@ def build_task(task_state):
@responses.activate
def test_download_all(
- LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, FAKE_ARTIFACTS_DIR
+ LINUX_TASK_ID, LINUX_TASK, GROUP_TASKS_1, GROUP_TASKS_2, fake_artifacts
):
responses.add(
responses.GET,
@@ -190,7 +277,7 @@ def test_download_all(
status=200,
)
- a = ArtifactsHandler({"linux": LINUX_TASK_ID}, parent_dir=FAKE_ARTIFACTS_DIR)
+ a = ArtifactsHandler({"linux": LINUX_TASK_ID})
downloaded = set()
diff --git a/bot/tools/covdir_gen.py b/bot/tools/covdir_gen.py
index e2be44679..7a5659756 100644
--- a/bot/tools/covdir_gen.py
+++ b/bot/tools/covdir_gen.py
@@ -1,85 +1,31 @@
# -*- coding: utf-8 -*-
import argparse
+import json
import os
from datetime import datetime
-import requests
-from libmozdata.vcs_map import download_mapfile
-from libmozdata.vcs_map import git_to_mercurial
from taskcluster.utils import slugId
from code_coverage_bot.secrets import secrets
-from code_coverage_tools.taskcluter import TaskclusterConfig
+from code_coverage_bot.taskcluster import taskcluster_config
-CODECOV_URL = "https://codecov.io/api/gh/marco-c/gecko-dev/commit"
MC_REPO = "https://hg.mozilla.org/mozilla-central"
-HOOK_GROUP = "project-releng"
-HOOK_ID = "services-{app_channel}-codecoverage/bot-generation"
-
-taskcluster = TaskclusterConfig()
-taskcluster.auth(
- os.environ["TASKCLUSTER_CLIENT_ID"], os.environ["TASKCLUSTER_ACCESS_TOKEN"]
-)
-secrets.load(os.environ["TASKCLUSTER_SECRET"])
-
-
-def list_commits(codecov_token, maximum=None, unique=None, skip_commits=[]):
- """
- List all the commits ingested on codecov
- """
- assert unique in (None, "week", "day")
- params = {"access_token": codecov_token, "page": 1}
- nb = 0
- dates = set()
- while True:
- resp = requests.get(CODECOV_URL, params=params)
- resp.raise_for_status()
- data = resp.json()
-
- if not data["commits"]:
- return
-
- for commit in data["commits"]:
-
- # Skip commit if that day or week has already been processed earlier
- day = datetime.strptime(commit["timestamp"], "%Y-%m-%d %H:%M:%S").date()
- week = day.isocalendar()[:2]
- if unique == "day" and day in dates:
- continue
- if unique == "week" and week in dates:
- continue
- dates.add(day)
- dates.add(week)
-
- # Convert git to mercurial revision
- commit["mercurial"] = git_to_mercurial(commit["commitid"])
- if commit["mercurial"] in skip_commits:
- print(
- "Skipping already processed commit {}".format(commit["mercurial"])
- )
- continue
-
- yield commit
- nb += 1
-
- if maximum is not None and nb >= maximum:
- return
-
- params["page"] += 1
+HOOK_GROUP = "project-relman"
+HOOK_ID = "code-coverage-{app_channel}"
def trigger_task(task_group_id, commit):
"""
Trigger a code coverage task to build covdir at a specified revision
"""
- assert "mercurial" in commit
- name = "covdir {} - {} - {}".format(
- secrets[secrets.APP_CHANNEL], commit["timestamp"], commit["mercurial"]
+ date = datetime.fromtimestamp(commit["date"]).strftime("%Y-%m-%d")
+ name = "covdir with suites on {} - {} - {}".format(
+ secrets[secrets.APP_CHANNEL], date, commit["changeset"]
)
- hooks = taskcluster.get_service("hooks")
+ hooks = taskcluster_config.get_service("hooks")
payload = {
"REPOSITORY": MC_REPO,
- "REVISION": commit["mercurial"],
+ "REVISION": commit["changeset"],
"taskGroupId": task_group_id,
"taskName": name,
}
@@ -91,11 +37,6 @@ def main():
# CLI args
parser = argparse.ArgumentParser()
parser.add_argument("--nb-tasks", type=int, default=5, help="NB of tasks to create")
- parser.add_argument(
- "--unique",
- choices=("day", "week"),
- help="Trigger only one task per day or week",
- )
parser.add_argument(
"--group", type=str, default=slugId(), help="Task group to create/update"
)
@@ -106,20 +47,17 @@ def main():
help="List actions without triggering any new task",
)
parser.add_argument(
- "--codecov-token",
- type=str,
- default=os.environ.get("CODECOV_TOKEN"),
- help="Codecov access token",
+ "history", type=open, help="JSON payload of /v2/history endpoint"
)
args = parser.parse_args()
- # Download revision mapper database
- print("Downloading revision database...")
- download_mapfile()
+ # Setup Taskcluster
+ taskcluster_config.auth()
+ secrets.load(os.environ["TASKCLUSTER_SECRET"])
# List existing tags & commits
print("Group", args.group)
- queue = taskcluster.get_service("queue")
+ queue = taskcluster_config.get_service("queue")
try:
group = queue.listTaskGroup(args.group)
commits = [
@@ -136,14 +74,24 @@ def main():
print("Invalid task group : {}".format(e))
commits = []
+ # Read the history file
+ history = json.load(args.history)
+
# Trigger a task for each commit
- for commit in list_commits(args.codecov_token, args.nb_tasks, args.unique, commits):
- print("Triggering commit {mercurial} from {timestamp}".format(**commit))
+ nb = 0
+ for commit in history:
+ if nb >= args.nb_tasks:
+ break
+ if commit in commits:
+ print("Skipping {commit {changeset} from {date}".format(**commit))
+ continue
+ print("Triggering commit {changeset} from {date}".format(**commit))
if args.dry_run:
print(">>> No trigger on dry run")
else:
out = trigger_task(args.group, commit)
print(">>>", out["status"]["taskId"])
+ nb += 1
if __name__ == "__main__":
diff --git a/frontend/.eslintrc.yml b/frontend/.eslintrc.yml
new file mode 100644
index 000000000..8d08aa14a
--- /dev/null
+++ b/frontend/.eslintrc.yml
@@ -0,0 +1,14 @@
+env:
+ browser: true
+ es6: true
+extends:
+ - google
+globals:
+ Atomics: readonly
+ SharedArrayBuffer: readonly
+parserOptions:
+ ecmaVersion: 2018
+ sourceType: module
+rules:
+ require-jsdoc: off
+ max-len: [warn, {code: 120}]
diff --git a/frontend/package.json b/frontend/package.json
index 2acaa8853..0a33884ab 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -27,6 +27,9 @@
"babel-polyfill": "^6.26.0",
"clean-webpack-plugin": "^3.0.0",
"css-loader": "^3.0.0",
+ "eslint": "^6.2.2",
+ "eslint-config-google": "^0.13.0",
+ "eslint-loader": "^3.0.0",
"html-webpack-plugin": "^3.2.0",
"mini-css-extract-plugin": "^0.8.0",
"optimize-css-assets-webpack-plugin": "^5.0.3",
diff --git a/frontend/src/base.html b/frontend/src/base.html
index afa39072a..fc3d312cc 100755
--- a/frontend/src/base.html
+++ b/frontend/src/base.html
@@ -10,7 +10,7 @@
: {{ total }} files
@@ -47,7 +47,7 @@
@@ -73,7 +73,7 @@
: {{ files.length }} files
@@ -88,7 +88,7 @@
{{#files}}
@@ -102,29 +102,39 @@
Revision {{ revision }} from {{ date }}
-
+
+
+
Loading...
diff --git a/frontend/src/common.js b/frontend/src/common.js
index 3fda84781..23de1df71 100644
--- a/frontend/src/common.js
+++ b/frontend/src/common.js
@@ -1,56 +1,55 @@
import Mustache from 'mustache';
+import {buildRoute, readRoute, updateRoute} from './route.js';
export const REV_LATEST = 'latest';
-function assert(condition, message) {
- if (!condition) {
- throw new Error(message || "Assertion failed");
- }
-}
+// eslint-disable-next-line max-len
+const ZERO_COVERAGE_REPORT = 'https://index.taskcluster.net/v1/task/project.releng.services.project.production.code_coverage_bot.latest/artifacts/public/zero_coverage_report.json';
function domContentLoaded() {
- return new Promise(resolve => document.addEventListener('DOMContentLoaded', resolve));
+ return new Promise((resolve) => document.addEventListener('DOMContentLoaded', resolve));
}
export const DOM_READY = domContentLoaded();
-export async function main(load, display, opts) {
- // Immediately listen to DOM event
+export async function main(load, display) {
// Load initial data before DOM is available
- let data = await load();
+ const data = await load();
// Wait for DOM to be ready before displaying
await DOM_READY;
await display(data);
+ monitorOptions();
// Full workflow, loading then displaying data
// used for following updates
- let full = async function() {
- let data = await load();
+ const full = async function() {
+ const data = await load();
await display(data);
+ monitorOptions();
};
- monitor_options(opts, full);
+
+ // React to url changes
window.onhashchange = full;
}
-
// Coverage retrieval.
-const COVERAGE_BACKEND_HOST = 'https://api.coverage.moz.tools';
+const COVERAGE_BACKEND_HOST = process.env.BACKEND_URL;
-function cache_get(cache, key) {
+function cacheGet(cache, key) {
if (key in cache) {
return cache[key].val;
}
}
-function cache_set(cache, key, value) {
- let now = new Date().getTime() / 1000;
+function cacheSet(cache, key, value) {
+ const now = new Date().getTime() / 1000;
// If the cache got too big, remove all elements that were added more
// than 15 minutes ago.
if (Object.keys(cache).length > 100) {
- for (let key in cache) {
+ for (const key in cache) {
if (cache[key].time < now - 15 * 60) {
delete cache[key];
}
@@ -63,9 +62,10 @@ function cache_set(cache, key, value) {
};
}
-let path_coverage_cache = {};
-export async function get_path_coverage(path, changeset) {
- let data = cache_get(path_coverage_cache, `${changeset}_${path}`);
+const pathCoverageCache = {};
+export async function getPathCoverage(path, changeset, platform, suite) {
+ const cacheKey = `${changeset}_${path}_${platform}_${suite}`;
+ let data = cacheGet(pathCoverageCache, cacheKey);
if (data) {
return data;
}
@@ -74,37 +74,51 @@ export async function get_path_coverage(path, changeset) {
if (changeset && changeset !== REV_LATEST) {
params += `&changeset=${changeset}`;
}
- let response = await fetch(`${COVERAGE_BACKEND_HOST}/v2/path?${params}`).catch(alert);
+ if (platform && platform !== 'all') {
+ params += `&platform=${platform}`;
+ }
+ if (suite && suite !== 'all') {
+ params += `&suite=${suite}`;
+ }
+ const response = await fetch(`${COVERAGE_BACKEND_HOST}/v2/path?${params}`).catch(alert);
if (response.status !== 200) {
throw new Error(response.status + ' - ' + response.statusText);
}
data = await response.json();
- cache_set(path_coverage_cache, `${changeset}_${path}`, data);
+ cacheSet(pathCoverageCache, cacheKey, data);
return data;
}
-let history_cache = {};
-export async function get_history(path) {
+const historyCache = {};
+export async function getHistory(path, platform, suite) {
// Backend needs path without trailing /
if (path && path.endsWith('/')) {
path = path.substring(0, path.length-1);
}
- let data = cache_get(history_cache, path);
+ const cacheKey = `${path}_${platform}_${suite}`;
+ let data = cacheGet(historyCache, cacheKey);
if (data) {
return data;
}
- let response = await fetch(`${COVERAGE_BACKEND_HOST}/v2/history?path=${path}`);
+ let params = `path=${path}`;
+ if (platform && platform !== 'all') {
+ params += `&platform=${platform}`;
+ }
+ if (suite && suite !== 'all') {
+ params += `&suite=${suite}`;
+ }
+ const response = await fetch(`${COVERAGE_BACKEND_HOST}/v2/history?${params}`);
data = await response.json();
- cache_set(history_cache, path, data);
+ cacheSet(historyCache, cacheKey, data);
// Check data has coverage values
// These values are missing when going above 2 levels right now
- let coverage = data.filter(point => {
+ const coverage = data.filter((point) => {
return point.coverage !== null;
});
if (coverage.length === 0 ) {
@@ -115,17 +129,33 @@ export async function get_history(path) {
return data;
}
-let zero_coverage_cache = {};
-export async function get_zero_coverage_data() {
- let data = cache_get(zero_coverage_cache, '');
+const zeroCoverageCache = {};
+export async function getZeroCoverageData() {
+ let data = cacheGet(zeroCoverageCache, '');
+ if (data) {
+ return data;
+ }
+
+ const response = await fetch(ZERO_COVERAGE_REPORT );
+ data = await response.json();
+
+ cacheSet(zeroCoverageCache, '', data);
+
+ return data;
+}
+
+
+const filtersCache = {};
+export async function getFilters() {
+ let data = cacheGet(filtersCache, '');
if (data) {
return data;
}
- let response = await fetch('https://index.taskcluster.net/v1/task/project.releng.services.project.production.code_coverage_bot.latest/artifacts/public/zero_coverage_report.json');
+ const response = await fetch(`${COVERAGE_BACKEND_HOST}/v2/filters`);
data = await response.json();
- cache_set(zero_coverage_cache, '', data);
+ cacheSet(filtersCache, '', data);
return data;
}
@@ -133,50 +163,70 @@ export async function get_zero_coverage_data() {
// Option handling.
-function is_enabled(opt) {
- let elem = document.getElementById(opt);
- return elem.checked;
+function isEnabled(opt) {
+ const route = readRoute();
+ return route[opt] === 'on';
}
-function monitor_options(opts, callback) {
- for (let opt of opts) {
- let elem = document.getElementById(opt);
- elem.onchange = callback;
+function monitorOptions() {
+ // Monitor input & select changes
+ const fields = document.querySelectorAll('input, select');
+ for (const field of fields) {
+ if (field.type == 'text') {
+ // React on enter
+ field.onkeydown = async (evt) => {
+ if (evt.keyCode === 13) {
+ const params = {};
+ params[evt.target.name] = evt.target.value;
+ updateRoute(params);
+ }
+ };
+ } else {
+ // React on change
+ field.onchange = async (evt) => {
+ let value = evt.target.value;
+ if (evt.target.type == 'checkbox') {
+ value = evt.target.checked ? 'on' : 'off';
+ }
+ const params = {};
+ params[evt.target.name] = value;
+ updateRoute(params);
+ };
+ }
}
}
-
// hgmo.
-export async function get_source(file) {
- let response = await fetch(`https://hg.mozilla.org/mozilla-central/raw-file/tip/${file}`);
+export async function getSource(file) {
+ const response = await fetch(`https://hg.mozilla.org/mozilla-central/raw-file/tip/${file}`);
return await response.text();
}
// Filtering.
-let get_third_party_paths = function() {
+const getThirdPartyPaths = function() {
let paths = null;
return async function() {
if (!paths) {
- let response = await get_source('tools/rewriting/ThirdPartyPaths.txt');
- paths = response.split('\n').filter(path => path != '');
+ const response = await getSource('tools/rewriting/ThirdPartyPaths.txt');
+ paths = response.split('\n').filter((path) => path != '');
}
return paths;
};
}();
-export async function filter_third_party(files) {
- if (is_enabled('third_party')) {
+export async function filterThirdParty(files) {
+ if (isEnabled('third_party')) {
return files;
}
- let paths = await get_third_party_paths();
+ const paths = await getThirdPartyPaths();
- return files.filter(file => {
- for (let path of paths) {
+ return files.filter((file) => {
+ for (const path of paths) {
if (file.path.startsWith(path)) {
return false;
}
@@ -186,26 +236,26 @@ export async function filter_third_party(files) {
});
}
-export function filter_languages(files) {
- let cpp = is_enabled('cpp');
- let cpp_extensions = ['c', 'cpp', 'cxx', 'cc', 'h', 'hh', 'hxx', 'hpp', 'inl', 'inc'];
- let js = is_enabled('js');
- let js_extensions = ['js', 'jsm', 'xml', 'xul', 'xhtml', 'html'];
- let java = is_enabled('java');
- let java_extensions = ['java'];
- let rust = is_enabled('rust');
- let rust_extensions = ['rs'];
-
- return files.filter(file => {
- if (file.type == "directory") {
+export function filterLanguages(files) {
+ const cpp = isEnabled('cpp');
+ const cppExtensions = ['c', 'cpp', 'cxx', 'cc', 'h', 'hh', 'hxx', 'hpp', 'inl', 'inc'];
+ const js = isEnabled('js');
+ const jsExtensions = ['js', 'jsm', 'xml', 'xul', 'xhtml', 'html'];
+ const java = isEnabled('java');
+ const javaExtensions = ['java'];
+ const rust = isEnabled('rust');
+ const rustExtensions = ['rs'];
+
+ return files.filter((file) => {
+ if (file.type == 'directory') {
return true;
- } else if (cpp_extensions.find(ext => file.path.endsWith('.' + ext))) {
+ } else if (cppExtensions.find((ext) => file.path.endsWith('.' + ext))) {
return cpp;
- } else if (js_extensions.find(ext => file.path.endsWith('.' + ext))) {
+ } else if (jsExtensions.find((ext) => file.path.endsWith('.' + ext))) {
return js;
- } else if (rust_extensions.find(ext => file.path.endsWith('.' + ext))) {
+ } else if (rustExtensions.find((ext) => file.path.endsWith('.' + ext))) {
return rust;
- } else if (java_extensions.find(ext => file.path.endsWith('.' + ext))) {
+ } else if (javaExtensions.find((ext) => file.path.endsWith('.' + ext))) {
return java;
} else {
console.warn('Unknown language for ' + file.path);
@@ -214,43 +264,43 @@ export function filter_languages(files) {
});
}
-export function filter_headers(files) {
- if (is_enabled('headers')) {
+export function filterHeaders(files) {
+ if (isEnabled('headers')) {
return files;
}
- return files.filter(file => !file.path.endsWith('.h'));
+ return files.filter((file) => !file.path.endsWith('.h'));
}
-export function filter_completely_uncovered(files) {
- if (!is_enabled('completely_uncovered')) {
+export function filterCompletelyUncovered(files) {
+ if (!isEnabled('completely_uncovered')) {
return files;
}
- return files.filter(file => file.uncovered);
+ return files.filter((file) => file.uncovered);
}
-export function filter_last_push_date(files) {
- let elem = document.getElementById('last_push');
- let upper_limit = new Date();
- let lower_limit = new Date();
+export function filterLastPushDate(files) {
+ const elem = document.getElementById('last_push');
+ const upperLimit = new Date();
+ let lowerLimit = new Date();
if (elem.value == 'one_year') {
- lower_limit.setFullYear(upper_limit.getFullYear() - 1);
+ lowerLimit.setFullYear(upperLimit.getFullYear() - 1);
} else if (elem.value == 'two_years') {
- upper_limit.setFullYear(upper_limit.getFullYear() - 1);
- lower_limit.setFullYear(lower_limit.getFullYear() - 2);
+ upperLimit.setFullYear(upperLimit.getFullYear() - 1);
+ lowerLimit.setFullYear(lowerLimit.getFullYear() - 2);
} else if (elem.value == 'older_than_two_years') {
- upper_limit.setFullYear(upper_limit.getFullYear() - 2);
- lower_limit = new Date('1970-01-01T00:00:00Z');
+ upperLimit.setFullYear(upperLimit.getFullYear() - 2);
+ lowerLimit = new Date('1970-01-01T00:00:00Z');
} else {
return files;
}
- return files.filter(file => {
- let last_push_date = new Date(file.last_push_date);
- if (last_push_date.getTime() <= upper_limit.getTime()
- && last_push_date.getTime() >= lower_limit.getTime()) {
+ return files.filter((file) => {
+ const lastPushDate = new Date(file.lastPushDate);
+ if (lastPushDate.getTime() <= upperLimit.getTime()
+ && lastPushDate.getTime() >= lowerLimit.getTime()) {
return true;
} else {
return false;
@@ -259,22 +309,22 @@ export function filter_last_push_date(files) {
}
// Build the urls for a breadcrumb Navbar from a path
-export function build_navbar(path, revision) {
+export function buildNavbar(path, revision) {
if (path.endsWith('/')) {
path = path.substring(0, path.length-1);
}
let base = '';
- let links = [
+ const links = [
{
'name': 'mozilla-central',
- 'path': '',
- }
+ 'route': buildRoute({path: '', revision}),
+ },
];
- return links.concat(path.split('/').map(file => {
+ return links.concat(path.split('/').map((file) => {
base += (base ? '/' : '') + file;
return {
'name': file,
- 'path': base,
+ 'route': buildRoute({path: base, revision}),
};
}));
}
@@ -285,25 +335,25 @@ function canDisplay() {
}
export function message(cssClass, message) {
- if(!canDisplay()) return;
+ if (!canDisplay()) return;
- let box = document.getElementById('message');
+ const box = document.getElementById('message');
box.className = 'message ' + cssClass;
box.textContent = message;
box.style.display = 'block';
}
export function hide(id) {
- if(!canDisplay()) return;
+ if (!canDisplay()) return;
- let box = document.getElementById(id);
+ const box = document.getElementById(id);
box.style.display = 'none';
}
export function show(id, node) {
- if(!canDisplay()) return;
+ if (!canDisplay()) return;
- let box = document.getElementById(id);
+ const box = document.getElementById(id);
box.style.display = 'block';
if (node) {
box.replaceWith(node);
@@ -312,8 +362,8 @@ export function show(id, node) {
}
export function render(template, data, target) {
- var output = Mustache.render(document.getElementById(template).innerHTML, data);
- let box = document.getElementById(target);
+ const output = Mustache.render(document.getElementById(template).innerHTML, data);
+ const box = document.getElementById(target);
box.innerHTML = output;
box.style.display = 'block';
return box;
diff --git a/frontend/src/index.js b/frontend/src/index.js
index 55185ad1b..aa81a9ae0 100644
--- a/frontend/src/index.js
+++ b/frontend/src/index.js
@@ -1,5 +1,7 @@
-import {REV_LATEST, DOM_READY, main, show, hide, message, get_path_coverage, get_history, get_zero_coverage_data, build_navbar, render, get_source} from './common.js';
-import {zero_coverage_display} from './zero_coverage_report.js';
+import {REV_LATEST, DOM_READY, main, show, hide, message, getPathCoverage, getHistory,
+ getZeroCoverageData, buildNavbar, render, getSource, getFilters} from './common.js';
+import {buildRoute, readRoute, updateRoute} from './route.js';
+import {zeroCoverageDisplay, zeroCoverageMenu} from './zero_coverage_report.js';
import './style.css';
import Prism from 'prismjs';
import Chartist from 'chartist';
@@ -8,31 +10,51 @@ import 'chartist/dist/chartist.css';
const VIEW_ZERO_COVERAGE = 'zero';
const VIEW_BROWSER = 'browser';
+
+function browserMenu(revision, filters, route) {
+ const context = {
+ revision,
+ platforms: filters.platforms.map((p) => {
+ return {
+ 'name': p,
+ 'selected': p == route.platform,
+ };
+ }),
+ suites: filters.suites.map((s) => {
+ return {
+ 'name': s,
+ 'selected': s == route.suite,
+ };
+ }),
+ };
+ render('menu_browser', context, 'menu');
+}
+
async function graphHistory(history, path) {
if (history === null) {
message('warning', `No history data for ${path}`);
return;
}
- let dateStr = function(timestamp){
- let date = new Date(timestamp);
+ const dateStr = function(timestamp) {
+ const date = new Date(timestamp);
return `${date.getDate()}/${date.getMonth() + 1}/${date.getFullYear()}`;
- }
+ };
- var data = {
+ const data = {
series: [
{
name: 'History',
- data: history.map(push => {
+ data: history.map((push) => {
return {
x: push.date * 1000,
y: push.coverage,
- }
- })
- }
+ };
+ }),
+ },
],
};
- var config = {
+ const config = {
// Display dates on a linear scale
axisX: {
type: Chartist.FixedScaleAxis,
@@ -45,20 +67,20 @@ async function graphHistory(history, path) {
tension: 1,
}),
};
- let elt = show('history').querySelector('.ct-chart');
- let chart = new Chartist.Line(elt, data, config);
+ const elt = show('history').querySelector('.ct-chart');
+ const chart = new Chartist.Line(elt, data, config);
chart.on('draw', function(evt) {
- if(evt.type === 'point') {
+ if (evt.type === 'point') {
// Load revision from graph when a point is clicked
- let revision = history[evt.index].changeset;
- evt.element._node.onclick = function(){
- updateHash(revision, path);
+ const revision = history[evt.index].changeset;
+ evt.element._node.onclick = function() {
+ updateRoute({revision});
};
// Display revision from graph when a point is overed
- evt.element._node.onmouseover = function(){
- let ctx = {
+ evt.element._node.onmouseover = function() {
+ const ctx = {
revision: revision.substring(0, 12),
date: dateStr(evt.value.x),
};
@@ -69,20 +91,25 @@ async function graphHistory(history, path) {
}
async function showDirectory(dir, revision, files) {
- let context = {
- navbar: build_navbar(dir, revision),
- files: files,
+ const context = {
+ navbar: buildNavbar(dir, revision),
+ files: files.map((file) => {
+ file.route = buildRoute({
+ path: file.path,
+ });
+ return file;
+ }),
revision: revision || REV_LATEST,
- file_name: function(){
+ file_name: function() {
// Build filename relative to current dir
return dir ? this.path.substring(dir.length+1) : this.path;
- }
+ },
};
render('browser', context, 'output');
}
async function showFile(file, revision) {
- let source = await get_source(file.path);
+ const source = await getSource(file.path);
let language;
if (file.path.endsWith('cpp') || file.path.endsWith('h')) {
@@ -99,133 +126,94 @@ async function showFile(file, revision) {
language = 'java';
}
- let context = {
- navbar: build_navbar(file.path, revision),
+ const context = {
+ navbar: buildNavbar(file.path, revision),
revision: revision || REV_LATEST,
language: language,
lines: source.split('\n').map((line, nb) => {
- let coverage = file.coverage[nb];
- let css_class = '';
+ const coverage = file.coverage[nb];
+ let cssClass = '';
if (coverage !== -1) {
- css_class = coverage > 0 ? 'covered': 'uncovered';
+ cssClass = coverage > 0 ? 'covered': 'uncovered';
}
return {
nb: nb,
line: line || ' ',
- covered: css_class,
- }
+ covered: cssClass,
+ };
}),
};
hide('message');
hide('history');
- let output = render('file_coverage', context, 'output');
+ const output = render('file_coverage', context, 'output');
// Highlight source code once displayed
Prism.highlightAll(output);
}
-function readHash() {
- // Reads changeset & path from current URL hash
- let hash = window.location.hash.substring(1);
- let pos = hash.indexOf(':');
- if (pos === -1) {
- return ['', ''];
- }
- return [
- hash.substring(0, pos),
- hash.substring(pos+1),
- ]
-}
-
-function updateHash(newChangeset, newPath) {
- // Set the URL hash with both changeset & path
- let [changeset, path] = readHash();
- changeset = newChangeset || changeset || REV_LATEST;
- path = newPath || path || '';
- window.location.hash = '#' + changeset + ':' + path;
-}
-
async function load() {
- let [revision, path] = readHash();
+ const route = readRoute();
// Reset display, dom-safe
hide('history');
hide('output');
- message('loading', 'Loading coverage data for ' + (path || 'mozilla-central') + ' @ ' + (revision || REV_LATEST));
+ message('loading', 'Loading coverage data for ' + (route.path || 'mozilla-central') + ' @ ' + route.revision);
// Load only zero coverage for that specific view
- if (revision === VIEW_ZERO_COVERAGE) {
- let zero_coverage = await get_zero_coverage_data();
+ if (route.view === VIEW_ZERO_COVERAGE) {
+ const zeroCoverage = await getZeroCoverageData();
return {
view: VIEW_ZERO_COVERAGE,
- path,
- zero_coverage,
- }
+ path: route.path,
+ zeroCoverage,
+ route,
+ };
}
try {
- var [coverage, history] = await Promise.all([
- get_path_coverage(path, revision),
- get_history(path),
+ const [coverage, history, filters] = await Promise.all([
+ getPathCoverage(route.path, route.revision, route.platform, route.suite),
+ getHistory(route.path, route.platform, route.suite),
+ getFilters(),
]);
+
+ return {
+ view: VIEW_BROWSER,
+ path: route.path,
+ revision: route.revision,
+ route,
+ coverage,
+ history,
+ filters,
+ };
} catch (err) {
console.warn('Failed to load coverage', err);
await DOM_READY; // We want to always display this message
message('error', 'Failed to load coverage: ' + err.message);
throw err;
}
-
- return {
- view: VIEW_BROWSER,
- path,
- revision,
- coverage,
- history,
- };
}
async function display(data) {
-
- // Toggle menu per views
- if (data.view === VIEW_BROWSER) {
- show('menu_browser');
- hide('menu_zero');
- } else if (data.view === VIEW_ZERO_COVERAGE) {
- show('menu_zero');
- hide('menu_browser');
- } else {
- message('error', 'Invalid view : ' + data.view);
- }
-
- // Revision input management
- const revision = document.getElementById('revision');
- revision.onkeydown = async function(evt){
- if(evt.keyCode === 13) {
- updateHash(data.revision.value);
- }
- };
-
- // Also update the revision element
- if (data.revision && data.revision != REV_LATEST) {
- let input = document.getElementById('revision');
- input.value = data.revision;
- }
-
if (data.view === VIEW_ZERO_COVERAGE ) {
- await zero_coverage_display(data.zero_coverage, data.path);
-
- } else if (data.view === VIEW_BROWSER && data.coverage.type === 'directory') {
- hide('message');
- await graphHistory(data.history, data.path);
- await showDirectory(data.path, data.revision, data.coverage.children);
-
- } else if (data.view === VIEW_BROWSER && data.coverage.type === 'file') {
- await showFile(data.coverage, data.revision);
-
+ await zeroCoverageMenu(data.route);
+ await zeroCoverageDisplay(data.zeroCoverage, data.path);
+ } else if (data.view === VIEW_BROWSER) {
+ browserMenu(data.revision, data.filters, data.route);
+
+ if (data.coverage.type === 'directory') {
+ hide('message');
+ await graphHistory(data.history, data.path);
+ await showDirectory(data.path, data.revision, data.coverage.children);
+ } else if (data.coverage.type === 'file') {
+ await showFile(data.coverage, data.revision);
+ } else {
+ message('error', 'Invalid file type: ' + data.coverate.type);
+ }
} else {
- message('error', 'Invalid file type: ' + data.coverage.type);
+ message('error', 'Invalid view : ' + data.view);
}
}
-main(load, display, ['third_party', 'headers', 'completely_uncovered', 'cpp', 'js', 'java', 'rust', 'last_push'])
+main(load, display);
diff --git a/frontend/src/route.js b/frontend/src/route.js
new file mode 100644
index 000000000..17bab226f
--- /dev/null
+++ b/frontend/src/route.js
@@ -0,0 +1,42 @@
+import {REV_LATEST} from './common.js';
+
+export function readRoute() {
+ // Reads all filters from current URL hash
+ const hash = window.location.hash.substring(1);
+ const pairs = hash.split('&');
+ const out = {};
+ pairs.forEach((pair) => {
+ const [key, value] = pair.split('=');
+ if (!key) {
+ return;
+ }
+ out[decodeURIComponent(key)] = decodeURIComponent(value);
+ });
+
+ // Default values
+ if (!out.revision) {
+ out.revision = REV_LATEST;
+ }
+ if (!out.path) {
+ out.path = '';
+ }
+ return out;
+}
+
+export function buildRoute(params) {
+ // Add all params on top of current route
+ let route = readRoute();
+ if (params) {
+ route = {...route, ...params};
+ }
+
+ // Build query string from filters
+ return '#' + Object.keys(route)
+ .map((k) => encodeURIComponent(k) + '=' + encodeURIComponent(route[k]))
+ .join('&');
+}
+
+export function updateRoute(params) {
+ // Update full hash with an updated url
+ window.location.hash = buildRoute(params);
+}
diff --git a/frontend/src/style.css b/frontend/src/style.css
index 71e33b677..cf8e9c454 100644
--- a/frontend/src/style.css
+++ b/frontend/src/style.css
@@ -24,11 +24,6 @@ header #browser input {
font-family: monospace;
}
-header > div {
- /* By default do not display any menu : managed by JS */
- display: none;
-}
-
#main {
background-color: white;
border-top: 1px solid darkgray;
diff --git a/frontend/src/zero_coverage_report.js b/frontend/src/zero_coverage_report.js
index c935e6e71..07b71bd47 100644
--- a/frontend/src/zero_coverage_report.js
+++ b/frontend/src/zero_coverage_report.js
@@ -1,7 +1,47 @@
-import {hide, message, build_navbar, render, filter_third_party, filter_languages, filter_headers, filter_completely_uncovered, filter_last_push_date} from './common.js';
+import {hide, message, buildNavbar, render, filterThirdParty, filterLanguages,
+ filterHeaders, filterCompletelyUncovered, filterLastPushDate} from './common.js';
+import {buildRoute} from './route.js';
+
+const ZERO_COVERAGE_FILTERS = {
+ // eslint-disable-next-line quotes
+ 'third_party': "Show third-party files",
+ 'headers': 'Show headers',
+ 'completely_uncovered': 'Show completely uncovered files only',
+ 'cpp': 'C/C++',
+ 'js': 'JavaScript',
+ 'java': 'Java',
+ 'rust': 'Rust',
+};
+const ZERO_COVERAGE_PUSHES = {
+ 'all': 'All',
+ 'one_year': '0 < 1 year',
+ 'two_years': '1 < 2 years',
+ 'older_than_two_years': 'Older than 2 years',
+};
+
+
+export function zeroCoverageMenu(route) {
+ const context = {
+ filters: Object.entries(ZERO_COVERAGE_FILTERS).map(([key, message]) => {
+ return {
+ key,
+ message,
+ checked: route[key] === 'on',
+ };
+ }),
+ last_pushes: Object.entries(ZERO_COVERAGE_PUSHES).map(([value, message]) => {
+ return {
+ value,
+ message,
+ selected: route['last_push'] === value,
+ };
+ }),
+ };
+ render('menu_zero', context, 'menu');
+}
-function sort_entries(entries) {
+function sortEntries(entries) {
return entries.sort(([dir1, stats1], [dir2, stats2]) => {
if (stats1.children != stats2.children) {
return stats1.children < stats2.children;
@@ -12,12 +52,12 @@ function sort_entries(entries) {
}
return dir1 > dir2;
- }).map(([dir , stats]) => {
+ }).map(([dir, stats]) => {
return {stats, dir};
});
}
-function get_min_date(oldDate, newDate) {
+function getMinDate(oldDate, newDate) {
if (!oldDate) {
return newDate;
}
@@ -30,11 +70,11 @@ function get_min_date(oldDate, newDate) {
function getBaseStats(file, children) {
return {'children': children,
- 'funcs': file.funcs,
- 'first_push_date': file.first_push_date,
- 'last_push_date': file.last_push_date,
- 'size': file.size,
- 'commits': file.commits};
+ 'funcs': file.funcs,
+ 'first_push_date': file.first_push_date,
+ 'last_push_date': file.last_push_date,
+ 'size': file.size,
+ 'commits': file.commits};
}
function cumStats(prevStats, newStats) {
@@ -42,20 +82,11 @@ function cumStats(prevStats, newStats) {
prevStats.funcs += newStats.funcs;
prevStats.size += newStats.size;
prevStats.commits += newStats.commits;
- prevStats.first_push_date = get_min_date(prevStats.first_push_date, newStats.first_push_date);
- prevStats.last_push_date = get_min_date(prevStats.last_push_date, newStats.last_push_date);
-}
-
-function getFileSize(size) {
- if (size >= 1e6) {
- return (size / 1e6).toFixed(2) + 'M';
- } else if (size >= 1e3) {
- return (size / 1e3).toFixed(1) + 'K';
- }
- return size;
+ prevStats.first_push_date = getMinDate(prevStats.first_push_date, newStats.first_push_date);
+ prevStats.last_push_date = getMinDate(prevStats.last_push_date, newStats.last_push_date);
}
-export async function zero_coverage_display(data, dir) {
+export async function zeroCoverageDisplay(data, dir) {
hide('output');
hide('history');
message('loading', 'Loading zero coverage report for ' + (dir || 'mozilla-central'));
@@ -66,20 +97,20 @@ export async function zero_coverage_display(data, dir) {
dir = '';
}
- let files = data['files'].filter(file => file.name.startsWith(dir));
+ let files = data['files'].filter((file) => file.name.startsWith(dir));
// TODO: Do this in the backend directly!
- files.forEach(file => {
+ files.forEach((file) => {
file.path = file.name;
});
- files = await filter_third_party(files);
- files = filter_languages(files);
- files = filter_headers(files);
- files = filter_completely_uncovered(files);
- files = filter_last_push_date(files);
+ files = await filterThirdParty(files);
+ files = filterLanguages(files);
+ files = filterHeaders(files);
+ files = filterCompletelyUncovered(files);
+ files = filterLastPushDate(files);
- let map = new Map();
+ const map = new Map();
- for (let file of files) {
+ for (const file of files) {
let rest = file.path.substring(dir.lastIndexOf('/') + 1);
if (rest.includes('/')) {
@@ -98,18 +129,22 @@ export async function zero_coverage_display(data, dir) {
}
const revision = data['hg_revision'];
- let context = {
+ const context = {
current_dir: dir,
- entries: sort_entries(Array.from(map.entries())),
- entry_url : function() {
- let path = dir + this.dir;
+ entries: sortEntries(Array.from(map.entries())),
+ entry_url: function() {
+ const path = dir + this.dir;
if (this.stats.children != 0) {
- return `#zero:${path}`;
+ return buildRoute({
+ view: 'zero',
+ path,
+ });
} else {
- return `#${revision}:${path}`;
+ // Fully reset the url when moving back to browser view
+ return `#view=browser&revision=${revision}&path=${path}`;
}
},
- navbar: build_navbar(dir),
+ navbar: buildNavbar(dir),
total: files.length,
};
diff --git a/frontend/webpack.common.js b/frontend/webpack.common.js
index b0c6db5b1..c48db7b7b 100644
--- a/frontend/webpack.common.js
+++ b/frontend/webpack.common.js
@@ -1,4 +1,5 @@
const path = require('path');
+const webpack = require('webpack');
const { CleanWebpackPlugin } = require('clean-webpack-plugin');
const HtmlWebpackPlugin = require('html-webpack-plugin')
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
@@ -18,9 +19,18 @@ module.exports = {
new MiniCssExtractPlugin({
filename: 'coverage-[hash].css',
}),
+ new webpack.EnvironmentPlugin({
+ BACKEND_URL: 'http://localhost:8000',
+ }),
],
module: {
rules: [
+ {
+ enforce: 'pre',
+ test: /\.js$/,
+ exclude: /node_modules/,
+ loader: 'eslint-loader',
+ },
{
test: /\.js$/,
exclude: /(node_modules)/,