|
8 | 8 | import copy |
9 | 9 | import traceback |
10 | 10 | from pathlib import Path |
| 11 | +from typing import Iterable |
| 12 | +from urllib.parse import urlencode |
| 13 | +import urllib.request |
| 14 | +import urllib.error |
11 | 15 |
|
12 | 16 | # Resolve repository root from this script location |
13 | 17 | SCRIPT_DIR = Path(__file__).resolve().parent |
@@ -184,6 +188,109 @@ def parse_list_arg(s: str) -> list[str]: |
184 | 188 | return [part.strip() for part in txt.split(",") if part.strip()] |
185 | 189 |
|
186 | 190 |
|
| 191 | +def _gitlab_auth_header() -> tuple[str, str]: |
| 192 | + """Return header key and value for GitLab API auth, preferring PRIVATE-TOKEN, then JOB-TOKEN. |
| 193 | +
|
| 194 | + Falls back to empty auth if neither is available. |
| 195 | + """ |
| 196 | + private = os.environ.get("GITLAB_API_TOKEN") or os.environ.get("PRIVATE_TOKEN") |
| 197 | + if private: |
| 198 | + return ("PRIVATE-TOKEN", private) |
| 199 | + job = os.environ.get("CI_JOB_TOKEN") |
| 200 | + if job: |
| 201 | + return ("JOB-TOKEN", job) |
| 202 | + return ("", "") |
| 203 | + |
| 204 | + |
| 205 | +def _gitlab_api_get(path: str) -> tuple[int, dict | list | None]: |
| 206 | + """Perform a GET to GitLab API v4 and return (status_code, json_obj_or_None). |
| 207 | +
|
| 208 | + Uses project-level API base from CI env. Returns (0, None) if base env is missing. |
| 209 | + """ |
| 210 | + base = os.environ.get("CI_API_V4_URL") |
| 211 | + if not base: |
| 212 | + return 0, None |
| 213 | + url = base.rstrip("/") + "/" + path.lstrip("/") |
| 214 | + key, value = _gitlab_auth_header() |
| 215 | + req = urllib.request.Request(url) |
| 216 | + if key: |
| 217 | + req.add_header(key, value) |
| 218 | + try: |
| 219 | + with urllib.request.urlopen(req, timeout=15) as resp: |
| 220 | + status = resp.getcode() |
| 221 | + data = resp.read() |
| 222 | + try: |
| 223 | + obj = json.loads(data.decode("utf-8")) if data else None |
| 224 | + except Exception: |
| 225 | + obj = None |
| 226 | + return status, obj |
| 227 | + except urllib.error.HTTPError as e: |
| 228 | + try: |
| 229 | + body = e.read().decode("utf-8") |
| 230 | + except Exception: |
| 231 | + body = str(e) |
| 232 | + sys.stderr.write(f"[WARN] GitLab API GET {url} failed: {e} body={body}\n") |
| 233 | + return e.code, None |
| 234 | + except Exception as e: |
| 235 | + sys.stderr.write(f"[WARN] GitLab API GET {url} error: {e}\n") |
| 236 | + sys.stderr.write(traceback.format_exc() + "\n") |
| 237 | + return -1, None |
| 238 | + |
| 239 | + |
| 240 | +def list_project_runners() -> list[dict]: |
| 241 | + """List runners available to this project via GitLab API. |
| 242 | +
|
| 243 | + Requires CI vars CI_API_V4_URL and CI_PROJECT_ID and either GITLAB_API_TOKEN or CI_JOB_TOKEN. |
| 244 | + Returns an empty list if not accessible. |
| 245 | + """ |
| 246 | + project_id = os.environ.get("CI_PROJECT_ID") |
| 247 | + if not project_id: |
| 248 | + return [] |
| 249 | + |
| 250 | + runners: list[dict] = [] |
| 251 | + page = 1 |
| 252 | + per_page = 100 |
| 253 | + while True: |
| 254 | + q = urlencode({"per_page": per_page, "page": page}) |
| 255 | + status, obj = _gitlab_api_get(f"projects/{project_id}/runners?{q}") |
| 256 | + if status != 200 or not isinstance(obj, list): |
| 257 | + # Project-scoped listing might be restricted for JOB-TOKEN in some instances. |
| 258 | + # Return what we have (likely nothing) and let caller decide. |
| 259 | + break |
| 260 | + runners.extend(x for x in obj if isinstance(x, dict)) |
| 261 | + if len(obj) < per_page: |
| 262 | + break |
| 263 | + page += 1 |
| 264 | + return runners |
| 265 | + |
| 266 | + |
| 267 | +def runner_supports_tags(runner: dict, required_tags: Iterable[str]) -> bool: |
| 268 | + tag_list = runner.get("tag_list") or [] |
| 269 | + if not isinstance(tag_list, list): |
| 270 | + return False |
| 271 | + tags = {str(t).strip() for t in tag_list if isinstance(t, str) and t.strip()} |
| 272 | + if not tags: |
| 273 | + return False |
| 274 | + # Skip paused/inactive runners |
| 275 | + if runner.get("paused") is True: |
| 276 | + return False |
| 277 | + if runner.get("active") is False: |
| 278 | + return False |
| 279 | + return all(t in tags for t in required_tags) |
| 280 | + |
| 281 | + |
| 282 | +def any_runner_matches(required_tags: Iterable[str], runners: list[dict]) -> bool: |
| 283 | + req = [t for t in required_tags if t] |
| 284 | + for r in runners: |
| 285 | + try: |
| 286 | + if runner_supports_tags(r, req): |
| 287 | + return True |
| 288 | + except Exception: |
| 289 | + # Be robust to unexpected runner payloads |
| 290 | + continue |
| 291 | + return False |
| 292 | + |
| 293 | + |
187 | 294 | def main(): |
188 | 295 | ap = argparse.ArgumentParser() |
189 | 296 | ap.add_argument("--chips", required=True, help="Comma-separated or JSON array list of SoCs") |
@@ -249,25 +356,72 @@ def main(): |
249 | 356 |
|
250 | 357 | # Build child pipeline YAML in deterministic order |
251 | 358 | jobs_entries = [] # list of (sort_key, job_name, job_dict) |
| 359 | + |
| 360 | + # Discover available runners (best-effort) |
| 361 | + available_runners = list_project_runners() |
| 362 | + if not available_runners: |
| 363 | + print("[WARN] Could not enumerate project runners or none found; skipping runner-tag availability checks.") |
| 364 | + |
| 365 | + # Accumulate all missing-runner groups to emit a single stub job |
| 366 | + missing_groups: list[dict] = [] |
| 367 | + |
252 | 368 | for (chip, tagset, test_type), test_dirs in group_map.items(): |
253 | 369 | tag_list = sorted(tagset) |
254 | 370 | # Build name suffix excluding the SOC itself to avoid duplication |
255 | 371 | non_soc_tags = [t for t in tag_list if t != chip] |
256 | 372 | tag_suffix = "-".join(non_soc_tags) if non_soc_tags else "generic" |
257 | | - job_name = f"hw-{chip}-{test_type}-{tag_suffix}"[:255] |
258 | 373 |
|
259 | | - # Clone base job and adjust (preserve key order using deepcopy) |
| 374 | + # Determine if any runner can serve this job |
| 375 | + can_schedule = True |
| 376 | + if available_runners: |
| 377 | + can_schedule = any_runner_matches(tag_list, available_runners) |
| 378 | + |
| 379 | + if can_schedule: |
| 380 | + job_name = f"hw-{chip}-{test_type}-{tag_suffix}"[:255] |
| 381 | + |
| 382 | + # Clone base job and adjust (preserve key order using deepcopy) |
| 383 | + job = copy.deepcopy(base_job) |
| 384 | + # Ensure tags include SOC+extras |
| 385 | + job["tags"] = tag_list |
| 386 | + vars_block = job.get("variables", {}) |
| 387 | + vars_block["TEST_CHIP"] = chip |
| 388 | + vars_block["TEST_TYPE"] = test_type |
| 389 | + # Provide list of test directories for this job |
| 390 | + vars_block["TEST_LIST"] = "\n".join(sorted(test_dirs)) |
| 391 | + job["variables"] = vars_block |
| 392 | + |
| 393 | + sort_key = (chip, test_type, tag_suffix) |
| 394 | + jobs_entries.append((sort_key, job_name, job)) |
| 395 | + else: |
| 396 | + # Accumulate for a single combined missing-runner job |
| 397 | + missing_groups.append( |
| 398 | + { |
| 399 | + "chip": chip, |
| 400 | + "test_type": test_type, |
| 401 | + "required_tags": tag_list, |
| 402 | + "test_dirs": sorted(test_dirs), |
| 403 | + } |
| 404 | + ) |
| 405 | + |
| 406 | + # If any groups are missing runners, create one combined stub job to emit all JUnit errors |
| 407 | + if missing_groups: |
| 408 | + job_name = "hw-missing-runners" |
260 | 409 | job = copy.deepcopy(base_job) |
261 | | - # Ensure tags include SOC+extras |
262 | | - job["tags"] = tag_list |
| 410 | + if "tags" in job: |
| 411 | + del job["tags"] |
| 412 | + job["before_script"] = [ |
| 413 | + "echo 'No suitable hardware runners found for some groups; generating combined JUnit error stubs.'" |
| 414 | + ] |
263 | 415 | vars_block = job.get("variables", {}) |
264 | | - vars_block["TEST_CHIP"] = chip |
265 | | - vars_block["TEST_TYPE"] = test_type |
266 | | - # Provide list of test directories for this job |
267 | | - vars_block["TEST_LIST"] = "\n".join(sorted(test_dirs)) |
| 416 | + # Store as JSON string for the generator script to process |
| 417 | + vars_block["MISSING_GROUPS_JSON"] = json.dumps(missing_groups) |
268 | 418 | job["variables"] = vars_block |
269 | | - |
270 | | - sort_key = (chip, test_type, tag_suffix) |
| 419 | + job["script"] = [ |
| 420 | + "python3 .gitlab/scripts/generate_missing_runner_junit.py", |
| 421 | + "exit 1", |
| 422 | + ] |
| 423 | + # Ensure it sorts after normal jobs |
| 424 | + sort_key = ("zzz", "zzz", "zzz") |
271 | 425 | jobs_entries.append((sort_key, job_name, job)) |
272 | 426 |
|
273 | 427 | # Order jobs by (chip, type, tag_suffix) |
|
0 commit comments