From ea0ad2ebe4587a758198f7f6f589fa3a981a0345 Mon Sep 17 00:00:00 2001 From: Douglas Reis Date: Tue, 2 Apr 2024 09:22:34 +0100 Subject: [PATCH] Create script to run and upload the es nightly The script are used to run the Sival test suite and upload the result to a google sheet. Signed-off-by: Douglas Reis --- util/silicon-nightly-runner/README.md | 63 +++++ util/silicon-nightly-runner/bazel_report.py | 266 ++++++++++++++++++ util/silicon-nightly-runner/ot_test_parser.py | 137 +++++++++ .../parse_test_results.sh | 57 ++++ util/silicon-nightly-runner/push_database.py | 80 ++++++ util/silicon-nightly-runner/push_google.py | 219 ++++++++++++++ util/silicon-nightly-runner/requirements.txt | 2 + util/silicon-nightly-runner/results.py | 61 ++++ util/silicon-nightly-runner/run_server.sh | 24 ++ util/silicon-nightly-runner/run_tests.sh | 27 ++ util/silicon-nightly-runner/upload_results.sh | 38 +++ 11 files changed, 974 insertions(+) create mode 100644 util/silicon-nightly-runner/README.md create mode 100644 util/silicon-nightly-runner/bazel_report.py create mode 100755 util/silicon-nightly-runner/ot_test_parser.py create mode 100755 util/silicon-nightly-runner/parse_test_results.sh create mode 100644 util/silicon-nightly-runner/push_database.py create mode 100644 util/silicon-nightly-runner/push_google.py create mode 100644 util/silicon-nightly-runner/requirements.txt create mode 100644 util/silicon-nightly-runner/results.py create mode 100755 util/silicon-nightly-runner/run_server.sh create mode 100755 util/silicon-nightly-runner/run_tests.sh create mode 100755 util/silicon-nightly-runner/upload_results.sh diff --git a/util/silicon-nightly-runner/README.md b/util/silicon-nightly-runner/README.md new file mode 100644 index 00000000000000..b97d8d1b674f8b --- /dev/null +++ b/util/silicon-nightly-runner/README.md @@ -0,0 +1,63 @@ +# Silicon nightly runner + +## Summary +This a tool that pull the latest changes in the `Eargrey_es_sival` branch, runs the Sival test suite and upload the results to a google sheets. + +## Requirements +You need to get a google OAuth token as described [here](https://docs.gspread.org/en/v6.0.0/oauth2.html#for-end-users-using-oauth-client-id). +Once you have the user account token in a json file, choose a folder to store it. The folder `$(HOME)/.config/silicon-nightly-runner/` is recommended. + +You also need to create a config file in `$(HOME)/.config/silicon-nightly-runner/config.json` with the following format: +```json +{ + "ot_home" : "path/to/opentitan/repo", + "google_oauth_file": "path/to/token/user-account.json", + "google_service_file": "path/to/token/of/service/account", + "sheet_id": "spreadsheet-id", + "sheet_tab": "tab-name", + "sheet_row_offset": 2, + "sheet_column_offset": 4, + "sheet_testname_column_offset": 3 +} +``` + +Where: + + 1. **ot_home** : Is the path to a clone of Opentitan repository. + 1. **google_oauth_file**: Is the path to the google OAuth token in case of user ID will be used. + 1. **google_service_file**: Is the path to the service account token in case of service account. + 1. **sheet_id**: Is the id of the spread sheet, which is part of the sheet url, i.e. `https://docs.google.com/spreadsheets/d/` + 1. **sheet_tab**: The tab name in the sheet, it will be created if doesn't exist. + 1. **sheet_row_offset**: Is the offset row where the results should start to be populated. Normally the first row is reserved for the header. + 1. **sheet_column_offset**: Is the offset column where the results should start to be populated. + 1. **sheet_testname_column_offset**: Is the offset of columns where the tests names should be. + +## Installing a nightly job +Create a cronjob using the command: + ```sh + crontab -e + ``` +The cronjob configuration file should look like: + + ```console +$crontab -l + +SHELL=/bin/bash + +# 7:00am each day +00 7 * * * cd /utils/silicon-nightly-runner && mkdir -p ./logs && ./run_tests.sh 2>&1 | tee "./logs/$(date +\%Y-\%m-\%d)-run-tests.log" && ./parse_test_results.sh 2>&1 | tee "./logs/$(date +\%Y-\%m-\%d)-parse-results.log" + ``` + + ## Uploading results + In case the upload part fails for some reason, they can be uploaded later, as the results are stored in the folder `archive`. + The script `upload_results.sh` can upload all the results in `archive` or only one day. + + To upload all the results in `archive`: + ```sh + ./upload_results.sh + ``` + +To upload a specific day: + ```sh + ./upload_results.sh ./archive//test.xml + ``` diff --git a/util/silicon-nightly-runner/bazel_report.py b/util/silicon-nightly-runner/bazel_report.py new file mode 100644 index 00000000000000..a21880fb4db411 --- /dev/null +++ b/util/silicon-nightly-runner/bazel_report.py @@ -0,0 +1,266 @@ +#!/usr/bin/env python3 +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +""" +Parser for the Open Titan test result files. + +The test result files are supplied as JUnitXML, which has test cases named in a particular way. +We can process this and extract the content so that it is able to be processed into our data +collection systems. + +The JUnitXML is in a slightly different format to that expected by the junitparser library. +The format used by the bazel output is (approximately) + + + + + + + + CDATA + + + + +Whereas the junitparser expects the to be present within the testcase, for it to +be accessible. +""" + +import datetime +import os +import socket + +import junitparser + +from results import Results, Result, State + + +# Configuration for the module + +# Should we change the name of the testcase in what we write out in the JUnitXML ? +MODIFY_TEST_NAME_IN_JUNITXML = True + + +class JUnitNotRecognisedError(Exception): + """ + If we didn't understand the JUnitXML this error is raised. + """ + + pass + + +class OTJUnitXML: + def __init__(self, filename): + self.filename = filename + self._junitxml = None + self._results = None + + @property + def junitxml(self): + if self._junitxml is None: + try: + self._junitxml = junitparser.JUnitXml.fromfile(self.filename) + except junitparser.junitparser.JUnitXmlError as exc: + raise JUnitNotRecognisedError( + "JUnitXML not recognised: {}".format(exc) + ) from exc + + # Fix up the JUnit XML by moving the output around. + suites = list(self._junitxml) + for suite in suites: + # Move the system-data at the suite level to the test level. + # (only for the first test) + tests = list(suite) + system_out = suite._elem.find("system-out") + if system_out is not None: + test = tests[0] + # Add the system-out to the test element + test._elem.append(system_out) + suite._elem.remove(system_out) + + if MODIFY_TEST_NAME_IN_JUNITXML: + for test in tests: + test.name = self.bazel_name(test.name) + + return self._junitxml + + def bazel_name(self, name): + """ + Convert the unit test name to the Bazel specification name. + """ + if not name.startswith("//"): + # We only manipulate the test name if it does not start with a //. + name = "//" + name + if name.endswith(".bash"): + name = name[:-5] + (left, right) = name.rsplit("/", 1) + name = "{}:{}".format(left, right) + return name + + @property + def timestamp(self): + return self.junitxml.timestamp + + @timestamp.setter + def timestamp(self, value): + self.junitxml.timestamp = value + + @property + def results(self): + """ + Turn the JUnitXML into a Results object. + """ + if not self._results: + self._results = Results() + + suites = list(self.junitxml) + for suite in suites: + for test in suite: + name = self.bazel_name(test.name) + if test.is_skipped: + state = State.SKIPPED + elif test.is_passed: + state = State.PASSED + else: + # Error is not reported in the junitparser library, so we need to do this + # ourselves. + state = State.FAILED + for res in test.result: + if isinstance(res, junitparser.Error): + state = State.ERRORED + duration = test.time + output = test.system_out + + result = Result(name, state, duration, output) + self._results.tests.append(result) + + return self._results + + def ntests(self): + return self.results.ntests + + +class OTDir: + def __init__(self, path, collection_date=None): + """ + OpenTitan results directory parser. + + @param path: Path to the bazel-out directory to parse test results from + @param collection_date: Datetime that the data was collected to populate into results, + or None to use today + """ + self.path = path + all_results = Results() + all_junitxml = [] + + if collection_date is None: + self.timestamp = None + self.timestamp_datetime = None + elif isinstance(collection_date, datetime.datetime): + # Turn into ISO 8601 formatted time string if we're given a datetime. + self.timestamp_datetime = collection_date + self.timestamp = collection_date.isoformat() + else: + # Ensure that collection date is a datetime, and that timestamp is a ISO 8601 string + self.timestamp = collection_date + self.timestamp_datetime = datetime.datetime.fromisoformat(collection_date) + + all_results.timestamp = self.timestamp + + print("Scanning for test files in %s" % (self.path,)) + for dir_path, dir_names, file_names in os.walk(self.path): + # Ensure that we walk down the directories in a known order + dir_names.sort() + + # The only file we care about is 'test.xml' at present - if there are other XML files + # present, we will ignore them as they're almost certainly not JUnitXML. + test_file = os.path.join(dir_path, "test.xml") + if os.path.exists(test_file): + print("Processing %s" % (test_file,)) + try: + testxml = OTJUnitXML(test_file) + if collection_date: + # Override the timestamp (or supply one) if one was given. + testxml.timestamp = self.timestamp + else: + if testxml.timestamp: + # If we didn't have a timestamp, populate it from the read data + self.timestamp = testxml.timestamp + self.timestamp_datetime = datetime.datetime.fromisoformat( + testxml.timestamp + ) + + results = testxml.results + except JUnitNotRecognisedError as exc: + # If we don't recognise the JUnitXML, we'll just skip this file + print("Skipping XML file '%s': %s" % (test_file, exc)) + continue + all_junitxml.append(testxml) + all_results.tests.extend(results.tests) + + self.all_junitxml = all_junitxml + self.all_results = all_results + + def ntests(self): + """ + Retrieve the total number of tests. + """ + return self.all_results.ntests + + def write( + self, output, flatten_testsuites=False, add_hostname=False, add_properties=None + ): + """ + Write out an amalgamated JUnitXML file. + + @param output: The file to write the JUnitXML file to + @param flatten_testsuites: Flatten the test suites to just one test suite + @param properties: Properties to set as a dictionary; use a value + of None to delete. + """ + + def modify_suite(suite): + if add_hostname: + suite.hostname = socket.getaddrinfo( + socket.gethostname(), 0, flags=socket.AI_CANONNAME + )[0][3] + if add_properties: + for key, value in add_properties.items(): + if value is None: + suite.remove_property(key) + else: + suite.add_property(key, str(value)) + + xml = junitparser.JUnitXml() + if flatten_testsuites: + # Produce a file that has only a single test suite containing all the tests + print("Flattening suites") + ts = junitparser.TestSuite(name="OpenTitan test results") + modify_suite(ts) + for otjunitxml in self.all_junitxml: + if not ts.timestamp: + ts.timestamp = otjunitxml.timestamp + for suite in otjunitxml.junitxml: + for test in suite: + ts.add_testcase(test) + xml.add_testsuite(ts) + + else: + # Produce a file that has many test suites, each containing a single test + for otjunitxml in self.all_junitxml: + for suite in otjunitxml.junitxml: + modify_suite(suite) + if not suite.timestamp: + suite.timestamp = otjunitxml.timestamp + xml.add_testsuite(suite) + xml.write(output) + + +if __name__ == "__main__": + bazel_out_dir = "bazel-out/" + + otdir = OTDir(bazel_out_dir) + + for result in otdir.all_results: + print("Test '%s': state=%s" % (result.name, result.state)) diff --git a/util/silicon-nightly-runner/ot_test_parser.py b/util/silicon-nightly-runner/ot_test_parser.py new file mode 100755 index 00000000000000..d9966128cd73b3 --- /dev/null +++ b/util/silicon-nightly-runner/ot_test_parser.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python3 +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +""" +Run the tests we're interested in, then file the results away. +""" + +import argparse +import datetime +import sys +import os +import json + +import gspread + +import bazel_report +import push_google + + +def date_or_datetime(s): + """ + Parse a date or a datetime into a datetime structure. + """ + try: + # Simple date and time + dt = datetime.datetime.strptime(s, "%Y-%m-%d") + dt.replace(hour=12, minute=0, second=0) + except ValueError: + # Fully specified date and time + dt = datetime.datetime.strptime(s, "%Y-%m-%d %H:%M") + return dt + + +parser = argparse.ArgumentParser( + description="Importing test results from OpenTitan test runs" +) + +parser.add_argument( + "--results-date", + type=date_or_datetime, + default=None, + help="Results data collection date", +) + +parser.add_argument( + "--parse", + action="store_true", + help="Collate output into a single JUnitXML file specified by --filename", +) + +parser.add_argument( + "--upload-sheet", + action="store_true", + help="Upload the result in --filename to google sheets.", +) + +parser.add_argument( + "--filename", + type=str, + default=None, + required=True, + help="Collate output into a single JUnitXML file", +) + +parser.add_argument( + "--output-flattened", + action="store_true", + default=False, + help="Flatten the JUnitXML output to a single test suite (rather than multiple)", +) + +parser.add_argument( + "--output-add-hostname", + action="store_true", + default=False, + help="Add the hostname to the test suites", +) + +parser.add_argument( + "--output-add-property", + action="append", + help="Add properties to the test suite in the form KEY=VALUE. May be specified " + "multiple times", +) + +parser.add_argument( + "--runner-id", type=str, default="", help="A string identifying the board" +) + +options = parser.parse_args() + +# Get the path to the home directory +home_dir = os.path.expanduser("~") +config_filepath = os.path.join(home_dir, ".config/silicon-nightly-runner/config.json") +config = {} +with open(config_filepath, "r", encoding="utf-8") as f: + config = json.load(f) + +if options.parse: + otdir = bazel_report.OTDir( + config.get("ot_home") + "/bazel-out/", collection_date=options.results_date + ) + if not otdir.ntests: + sys.exit("No tests were found in {}".format(options.results_dir)) + + properties = dict(prop.split("=", 1) for prop in options.output_add_property) + otdir.write( + options.filename, + flatten_testsuites=options.output_flattened, + add_hostname=options.output_add_hostname, + add_properties=properties, + ) + +if options.upload_sheet: + if config.get("google_service_file"): + gcreds = gspread.service_account(filename=config.get("google_service_file")) + elif config.get("google_oauth_file"): + gcreds = gspread.oauth(credentials_filename=config.get("google_oauth_file")) + else: + sys.exit( + """Either google-oauth-file or google-service-file must be supplied in the + config.json file.""" + ) + + tab_name = config.get("sheet_tab") + if options.runner_id: + tab_name += " " + options.runner_id + pusher = push_google.TestResultPusher( + gcreds, + sheet_id=config.get("sheet_id"), + sheet_tab=tab_name, + row_offset=config.get("sheet_row_offset"), + column_offset=config.get("sheet_column_offset"), + test_name_column=config.get("sheet_testname_column_offset"), + ) + pusher.push(options.filename) diff --git a/util/silicon-nightly-runner/parse_test_results.sh b/util/silicon-nightly-runner/parse_test_results.sh new file mode 100755 index 00000000000000..c5038334cc17a6 --- /dev/null +++ b/util/silicon-nightly-runner/parse_test_results.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +## +# Parse the test results and submit them to the google sheets. +# + +USER=$(whoami) +BRANCH=${BRANCH:-earlgrey_es_sival} + +extra_parameters=() +ESLABELS=$(ls -1 "/dev/esowner-$USER" | sort -n | paste -sd,) +extra_parameters+=(--output-add-property "ESLabel=$ESLABELS") +extra_parameters+=(--output-add-property "User=$USER") +if [[ "$SHA1" != '' ]] ; then + extra_parameters+=(--output-add-property "GITSHA=$SHA1") +fi +extra_parameters+=(--output-add-hostname) +extra_parameters+=(--output-flattened) + +set -e + +VARIANT="${VARIANT:-}" + +if [ ! -f venv/configured ] ; then + rm -rf venv + python3 -m venv venv + source "venv/bin/activate" + pip install -r requirements.txt + touch venv/configured +else + source "venv/bin/activate" +fi + +datenow=$(date +%Y-%m-%d) + +archive_dir="archive/${datenow}" +mkdir -p "$archive_dir" + +reports_dir=reports +mkdir -p "$reports_dir" + +output_xml="${archive_dir}/test.xml" + +echo "++++ Parsing and uploading tests results" +${PYTHON:-python3} ./ot_test_parser.py --results-date "$(date +%Y-%m-%d)" \ + --parse \ + --upload \ + --filename "$output_xml" \ + --runner-id "$VARIANT" \ + "${extra_parameters[@]}" \ + "$@" + +echo "++++ Generating HTML for the matrix of results" +ln -sf "$PWD/$output_xml" "$reports_dir/${datenow}.xml" +cd "$reports_dir" ; junit2html *.xml --report-matrix "all.html" diff --git a/util/silicon-nightly-runner/push_database.py b/util/silicon-nightly-runner/push_database.py new file mode 100644 index 00000000000000..e27503857d4b30 --- /dev/null +++ b/util/silicon-nightly-runner/push_database.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python3 +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +""" +Push result data to a database. +""" + +import sqlite3 + + +class DBError(Exception): + pass + + +class TestResultPusher: + def __init__( + self, database, table_name_execs, table_name_testcases, table_name_testnames + ): + """ + Push the results to a database using the given table names. + + @param database: The database object to push to + @param table_name_execs: Table name for executions + @param table_name_testcases: Table name for test cases + @param table_name_testnames: Table name for test names + """ + self.connection = database + self.table_name_execs = table_name_execs + self.table_name_testnames = table_name_testnames + self.table_name_testcases = table_name_testcases + + def execute(self, sql): + try: + c = self.connection.cursor() + c.execute(sql) + except sqlite3.Error as exc: + raise DBError(str(exc)) from exc + + def create_tables(self): + """ + Ensure that the tables for the data exist. + """ + + # The executions table + sql = """\ +CREATE TABLE {} IF NOT EXISTS ( + id INTEGER PRIMARY KEY, + timestamp NOT NULL, -- a unix epoch time object + hostname VARCHAR(256) +); +""".format( + self.table_name_execs + ) + self.execute(sql) + + # The test names table + sql = """\ +CREATE TABLE {} IF NOT EXISTS ( + id INTEGER PRIMARY KEY, + name VARCHAR(256) UNIQUE +); +""".format( + self.table_name_testnames + ) + self.execute(sql) + + # The test cases table + sql = """\ +CREATE TABLE {} IF NOT EXISTS ( + id INTEGER, + name_id INTEGER NOT NULL, + duration REAL NOT NULL, + result VARCHAR(8), # Passed, Failed, Skipped, Errored + FOREIGN KEY(name_id) REFERENCES {}(id) +); +""".format( + self.table_name_testcases, self.table_name_testnames + ) + self.execute(sql) diff --git a/util/silicon-nightly-runner/push_google.py b/util/silicon-nightly-runner/push_google.py new file mode 100644 index 00000000000000..54ad93d9b9aa58 --- /dev/null +++ b/util/silicon-nightly-runner/push_google.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +""" +Push data to a google sheet +""" + +import datetime +import re +import gspread +import bazel_report + +SERVICE_FILE = "service-account.json" +USER_FILE = "user-account.json" + + +def continuous_sequences(numbers): + """ + Turn a list of numbers into a list of lists of continuous sequences. + + For example the number sequence [1,2,3,5,6] would be turned into [[1,2,3], [5,6]]. + """ + numbers = sorted(numbers) + sequences = [] + sequence = [] + last_number = None + for number in numbers: + if last_number is not None and last_number + 1 != number: + sequences.append(sequence) + sequence = [] + + sequence.append(number) + last_number = number + if sequence: + sequences.append(sequence) + + return sequences + + +def column_name(column): + column -= 1 + if column < 26: + return chr(65 + column) + else: + return chr(65 + (column // 26) -1 ) + chr(65 + (column % 26)) + + +def cell_name(row, column): + return column_name(column) + str(row) + + +class CoalescingSheetUpdater: + """ + Update a Google sheet with random access to cells, coalescing requests. + + The Google sheet API has a rate limit on the number of write operations that can be applied + per minute. We want to be able to use random access to write to the cells, and then apply + the changes to the sheet em masse. + """ + + def __init__(self, sheet): + self.sheet = sheet + + # For the operations we're doing, we focus on updating columns of data in chunks + # when the changes are committed + self.update_columns = {} + + def update_cell(self, y, x, value): + """ + Update a cell in our list of updates to apply. + """ + if x not in self.update_columns: + self.update_columns[x] = {} + self.update_columns[x][y] = value + + def commit(self): + """ + Commit the cell updates that have been applied on a per-column basis. + """ + for x, column in self.update_columns.items(): + # Each column may have discontinuous regions that are being updated + # We need to separate out the updates into regions that are continuous + # so that we can update the region in one go. + rows = column.keys() + for group in continuous_sequences(rows): + # Each group is a set of rows that makes up a continuous sequence + values = [[column[row]] for row in group] + cell_range = "{}:{}".format( + cell_name(group[0], x), cell_name(group[-1], x) + ) + print("Cells %s = %r" % (cell_range, values)) + self.sheet.update(range_name=cell_range, values=values) + + +class TestResultPusher: + def __init__( + self, + gcreds, + sheet_id, + sheet_tab, + row_offset=1, + column_offset=2, + test_name_column=1, + ): + self.gcreds = gcreds + self.sheet_id = sheet_id + self.sheet_tab = sheet_tab + self.row_offset = row_offset + self.column_offset = column_offset + self.test_name_column = test_name_column + self.test_name_rows = {} + self.test_name_last_row = self.row_offset - 1 + + def push(self, junit_file, date=None): + sh = self.gcreds.open_by_key(self.sheet_id) + + sheet = None + try: + sheet = sh.worksheet(self.sheet_tab) + except gspread.WorksheetNotFound: + sheet = sh.add_worksheet(self.sheet_tab, rows=1000, cols=200) + + updater = CoalescingSheetUpdater(sheet) + self.get_test_names(sheet) + + suites = bazel_report.OTJUnitXML(junit_file) + + # If no result date was supplied, we use today + if date: + date = date.date() + # FIX-ME: Use suites.timestamp instead. + elif list(suites.junitxml)[0].timestamp: + date = datetime.datetime.strptime( + list(suites.junitxml)[0].timestamp, "%Y-%m-%dT%H:%M:%S" + ).date() + else: + date = datetime.date.today() + start = self.start_date(sheet) + delta = date - start + column = max(0, delta.days) + + # Update the date entry as a heading (if there is space) + if self.row_offset > 1: + updater.update_cell( + self.row_offset - 1, column + self.column_offset, date.isoformat() + ) + for result in suites.results: + value = result.state.value + row = self.test_name_rows.get(result.name, None) + if not row: + # This test isn't known to the table, so we need to add it as a new row + self.test_name_last_row += 1 + row = self.test_name_last_row + print("No test name found for %s, need to add to rows" % (result.name,)) + updater.update_cell(row, self.test_name_column, result.name) + + updater.update_cell(row, column + self.column_offset, value) + + updater.commit() + + def start_date(self, sheet): + """ + Obtain the first date within the sheet, if can't find then returns today. + """ + columns = self.get_column_names(sheet) + columns = [] if len(columns) == 0 else columns[0] + pattern = re.compile(r"\d{4}-\d{2}-\d{2}") + for column in columns: + matches = pattern.findall(column) + if matches: + return datetime.date.fromisoformat(column) + return datetime.date.today() + + def get_test_names(self, sheet): + """ + Obtain a list of the test names and their rows within the sheet. + """ + row_chunks = 100 + row_base = self.row_offset + while True: + cell_range = "{}:{}".format( + cell_name(row_base, self.test_name_column), + cell_name(row_base + row_chunks - 1, self.test_name_column), + ) + cells = sheet.get_values(cell_range) + done = len(cells) < row_chunks + for offset, cell in enumerate(cells): + row = row_base + offset + if len(cell) == 0: + done = True + continue + value = cell[0] + if not value: + done = True + else: + print("Test '%s' is row %i" % (value, row)) + if self.test_name_rows.get(value): + print( + "WARNING: The row '%s' is present multiple times in the sheet" + % (value,) + ) + self.test_name_rows[value] = row + self.test_name_last_row = row + if done: + break + row_base += row_chunks + + def get_column_names(self, sheet): + """ + Obtain a list of column names within the sheet. + """ + row_base = 1 + cell_range = "{}:{}".format( + cell_name(row_base, self.test_name_column), + cell_name(row_base, self.test_name_column + 50), + ) + + return sheet.get_values(cell_range) diff --git a/util/silicon-nightly-runner/requirements.txt b/util/silicon-nightly-runner/requirements.txt new file mode 100644 index 00000000000000..9bd9282274651c --- /dev/null +++ b/util/silicon-nightly-runner/requirements.txt @@ -0,0 +1,2 @@ +gspread==6.0.2 +junitparser diff --git a/util/silicon-nightly-runner/results.py b/util/silicon-nightly-runner/results.py new file mode 100644 index 00000000000000..17d704a70f4c04 --- /dev/null +++ b/util/silicon-nightly-runner/results.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python3 +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +""" +Extracted result information from the tests. +""" + +import datetime + +from dataclasses import dataclass, field +from enum import Enum + + +class State(Enum): + PASSED = "Passed" + FAILED = "Failed" + SKIPPED = "Skipped" + ERRORED = "Error" + + +@dataclass +class Result(object): + name: str + state: State + duration: float + output: str + + +@dataclass +class Results(object): + hostname: str = "" + tests: list[Result] = field(default_factory=list) + timestamp: datetime.datetime = None + + def __iter__(self): + return iter(self.tests) + + @property + def ntests(self): + return len(self.tests) + + @property + def npassed(self): + return len([test for test in self.tests if test.state == State.PASSED]) + + @property + def nskipped(self): + return len([test for test in self.tests if test.state == State.SKIPPED]) + + @property + def nfailed(self): + return len([test for test in self.tests if test.state == State.FAILED]) + + @property + def nerrored(self): + return len([test for test in self.tests if test.state == State.ERRORED]) + + @property + def duration(self): + return sum(test.duration for test in self.tests) diff --git a/util/silicon-nightly-runner/run_server.sh b/util/silicon-nightly-runner/run_server.sh new file mode 100755 index 00000000000000..6b75a1f98a8bae --- /dev/null +++ b/util/silicon-nightly-runner/run_server.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 + +## +# Run a simple HTTP server to display the results for individual runs. +# + +set -e + +PORT=3080 + +if [ ! -f venv/configured ] ; then + rm -rf venv + python3 -m venv venv + source venv/bin/activate + pip install -r requirements.txt + touch venv/configured +else + source venv/bin/activate +fi + +python3 -m http.server --directory reports/ $PORT & diff --git a/util/silicon-nightly-runner/run_tests.sh b/util/silicon-nightly-runner/run_tests.sh new file mode 100755 index 00000000000000..a7e64550f7b94e --- /dev/null +++ b/util/silicon-nightly-runner/run_tests.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +## +# Run the tests. +# + +USER=$(whoami) +BRANCH=${BRANCH:-earlgrey_es_sival} +OT_HOME=../opentitan + +cd $OT_HOME || exit +git checkout "${BRANCH}" +git pull --autostash + +if [ -d bazel-out ] ; then + chmod -R +w bazel-out/ + rm -rf bazel-out/ +fi + +./bazelisk.sh clean +./bazelisk.sh test --//signing:token=//signing/tokens:cloud_kms \ + --build_tests_only \ + --test_tag_filters="silicon_owner_sival_rom_ext" \ + //sw/device/tests/... || true +cd - || exit diff --git a/util/silicon-nightly-runner/upload_results.sh b/util/silicon-nightly-runner/upload_results.sh new file mode 100755 index 00000000000000..c791f87faa43a5 --- /dev/null +++ b/util/silicon-nightly-runner/upload_results.sh @@ -0,0 +1,38 @@ +#!/usr/bin/env bash +# Copyright lowRISC contributors. +# Licensed under the Apache License, Version 2.0, see LICENSE for details. +# SPDX-License-Identifier: Apache-2.0 +## +# Submit test results to the google sheets. +# If a resport is not provided it scans the arquive dir and upload all. +# + +VARIANT="${VARIANT:-}" + +if [ ! -f venv/configured ] ; then + rm -rf venv + python3 -m venv venv + source "venv/bin/activate" + pip install -r requirements.txt + touch venv/configured +else + source "venv/bin/activate" +fi + +set -e + +if [ -n "$1" ]; then + echo "Uploading report $1" + ${PYTHON:-python3} ./ot_test_parser.py --upload --filename "$1" --runner-id "$VARIANT" +else + archive_dir="archive" + dir_list=`ls -1 -X $archive_dir` + for dir in $dir_list + do + report="${archive_dir}/${dir}/test.xml" + echo "Uploading report $report" + ${PYTHON:-python3} ./ot_test_parser.py --upload --filename "$report" --runner-id "$VARIANT" + # Sleep is needed to not exceed the google API quota + sleep 10 + done +fi