|
| 1 | +#!/usr/bin/env python3 |
| 2 | +""" |
| 3 | +This script fetches previous cloned BATS jobs to check whether we can |
| 4 | +tag a job as passed by doing a set intersection of all failed jobs. |
| 5 | +""" |
| 6 | + |
| 7 | +import argparse |
| 8 | +import itertools |
| 9 | +import logging |
| 10 | +import os |
| 11 | +import re |
| 12 | +import sys |
| 13 | +from concurrent.futures import ThreadPoolExecutor |
| 14 | +from functools import cache |
| 15 | +from urllib.parse import urlparse |
| 16 | + |
| 17 | +import requests |
| 18 | +from requests.exceptions import RequestException |
| 19 | + |
| 20 | + |
| 21 | +TIMEOUT = 30 |
| 22 | +USER_AGENT = "openqa-bats-review (https://github.com/os-autoinst/scripts)" |
| 23 | + |
| 24 | +logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s") |
| 25 | +log = logging.getLogger(sys.argv[0] if __name__ == "__main__" else __name__) |
| 26 | +session = requests.Session() |
| 27 | + |
| 28 | + |
| 29 | +# We want to extract the test name stripping the numbers and the optional timing information |
| 30 | +# not ok 166 bud-git-context in 118ms |
| 31 | +# not ok 655 [520] podman checkpoint --export, with volumes in 1558ms |
| 32 | +NOT_OK = re.compile(r"^not ok \d+ (?:\[\d+\] )?(.*)(?: in \d+ms)?$") |
| 33 | + |
| 34 | + |
| 35 | +def get_file(url: str) -> str: |
| 36 | + """ |
| 37 | + Get a text file |
| 38 | + """ |
| 39 | + headers = { |
| 40 | + "User-Agent": USER_AGENT, |
| 41 | + } |
| 42 | + try: |
| 43 | + got = session.get(url, headers=headers, timeout=TIMEOUT) |
| 44 | + got.raise_for_status() |
| 45 | + except RequestException as error: |
| 46 | + log.error("%s: %s", url, error) |
| 47 | + sys.exit(1) |
| 48 | + return got.text |
| 49 | + |
| 50 | + |
| 51 | +@cache |
| 52 | +def get_job(url: str) -> dict: |
| 53 | + """ |
| 54 | + Get a job |
| 55 | + """ |
| 56 | + headers = { |
| 57 | + "User-Agent": USER_AGENT, |
| 58 | + } |
| 59 | + try: |
| 60 | + got = session.get(url, headers=headers, timeout=TIMEOUT) |
| 61 | + got.raise_for_status() |
| 62 | + data = got.json() |
| 63 | + except RequestException as error: |
| 64 | + log.error("%s: %s", url, error) |
| 65 | + sys.exit(1) |
| 66 | + return data["job"] |
| 67 | + |
| 68 | + |
| 69 | +def grep_notok(url: str) -> set[str]: |
| 70 | + """ |
| 71 | + Grep for "not ok" lines and return a set with the failing tests |
| 72 | + prefixed by the filename |
| 73 | + """ |
| 74 | + notok = set() |
| 75 | + prefix = os.path.basename(url) |
| 76 | + data = get_file(url) |
| 77 | + lines = data.splitlines() |
| 78 | + last = 0 |
| 79 | + for line in lines: |
| 80 | + if line.startswith("1.."): |
| 81 | + last = int(line.split("..", 1)[1]) |
| 82 | + break |
| 83 | + if not last: |
| 84 | + log.error("Malformed TAP file: %s", url) |
| 85 | + sys.exit(1) |
| 86 | + tests = 0 |
| 87 | + for line in lines: |
| 88 | + if not line.startswith(("ok", "not ok", "#not ok")): |
| 89 | + continue |
| 90 | + tests += 1 |
| 91 | + try: |
| 92 | + test = NOT_OK.findall(line)[0] |
| 93 | + notok.add(f"{prefix}:{test}") |
| 94 | + except IndexError: |
| 95 | + continue |
| 96 | + if tests != last: |
| 97 | + log.error("Truncated TAP file: %s", url) |
| 98 | + sys.exit(1) |
| 99 | + return notok |
| 100 | + |
| 101 | + |
| 102 | +def process_files(files: list[str]) -> set[str]: |
| 103 | + """ |
| 104 | + Process TAP files |
| 105 | + """ |
| 106 | + with ThreadPoolExecutor(max_workers=len(files)) as executor: |
| 107 | + return set(itertools.chain.from_iterable(executor.map(grep_notok, files))) |
| 108 | + |
| 109 | + |
| 110 | +def get_clone_chain(openqa_host: str, job_id: int) -> list[int]: |
| 111 | + """ |
| 112 | + Follow clone_id recursively and return the full chain [job_id, clone_id, clone_id_of_clone, ...] |
| 113 | + """ |
| 114 | + |
| 115 | + chain = [] |
| 116 | + current = job_id |
| 117 | + while current: |
| 118 | + # We use "/details" because we'll need this information again and get_job() is cached |
| 119 | + job = get_job(f"{openqa_host}/api/v1/jobs/{current}/details") |
| 120 | + if "BATS_PACKAGE" not in job["settings"]: |
| 121 | + log.error("Not a BATS test: %d", job_id) |
| 122 | + sys.exit(1) |
| 123 | + chain.append(current) |
| 124 | + current = job["clone_id"] |
| 125 | + return chain |
| 126 | + |
| 127 | + |
| 128 | +def main(url: str) -> None: |
| 129 | + """ |
| 130 | + Main function |
| 131 | + """ |
| 132 | + |
| 133 | + if not url.startswith(("http://", "https://")): |
| 134 | + url = f"https://{url}" |
| 135 | + urlx = urlparse(url) |
| 136 | + openqa_host = f"{urlx.scheme}://{urlx.netloc}" |
| 137 | + job_id = int(os.path.basename(urlx.path)) |
| 138 | + |
| 139 | + chain = get_clone_chain(openqa_host, job_id) |
| 140 | + if len(chain) <= 1: |
| 141 | + log.info("No clones. Exiting") |
| 142 | + sys.exit(0) |
| 143 | + log.info("Processing clone chain: %s", " -> ".join(map(str, chain))) |
| 144 | + |
| 145 | + all_failures = [] |
| 146 | + |
| 147 | + for job_id in chain: |
| 148 | + job = get_job(f"{openqa_host}/api/v1/jobs/{job_id}/details") |
| 149 | + url = f"{openqa_host}/tests/{job_id}" |
| 150 | + logs = [ |
| 151 | + f"{openqa_host}/tests/{job_id}/file/{log}" |
| 152 | + for log in job["ulogs"] |
| 153 | + if log.endswith(".tap.txt") |
| 154 | + ] |
| 155 | + if not logs: |
| 156 | + log.info("Job %s has no .tap.txt logs, skipping", job_id) |
| 157 | + continue |
| 158 | + |
| 159 | + failed = process_files(logs) |
| 160 | + all_failures.append(failed) |
| 161 | + |
| 162 | + if not all_failures: |
| 163 | + log.info("No logs found in chain. Exiting") |
| 164 | + sys.exit(0) |
| 165 | + |
| 166 | + common_failures: set[str] = set.intersection(*all_failures) |
| 167 | + |
| 168 | + if not common_failures: |
| 169 | + log.info("No common failures across clone chain. Tagging as PASSED.") |
| 170 | + # TODO: Tag job as passed |
| 171 | + else: |
| 172 | + log.info("Common failures found across clone chain:") |
| 173 | + sys.exit(0) |
| 174 | + |
| 175 | + |
| 176 | +def parse_args() -> argparse.Namespace: |
| 177 | + """ |
| 178 | + Parse args |
| 179 | + """ |
| 180 | + parser = argparse.ArgumentParser() |
| 181 | + parser.add_argument("url", help="URL to openQA jobs") |
| 182 | + return parser.parse_args() |
| 183 | + |
| 184 | + |
| 185 | +if __name__ == "__main__": |
| 186 | + main(parse_args().url) |
0 commit comments