Skip to content

build(deps): bump python-semantic-release/python-semantic-release from 9.12.0 to 9.14.0 #1058

build(deps): bump python-semantic-release/python-semantic-release from 9.12.0 to 9.14.0

build(deps): bump python-semantic-release/python-semantic-release from 9.12.0 to 9.14.0 #1058

Workflow file for this run

name: CI
on:
workflow_dispatch:
pull_request:
push:
branches:
- main
jobs:
tests:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
name: Python ${{ matrix.python-version }} Build & Tests
steps:
- name: Add apt repo
run: sudo add-apt-repository universe
# Setup Java & Python
- name: Setup Java
uses: actions/setup-java@b36c23c0d998641eff861008f374ee103c25ac73 # v4.4.0
with:
distribution: 'temurin'
java-version: 8
- name: Setup Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
with:
python-version: ${{ matrix.python-version }}
architecture: x64
- name: Checkout DataGateway API
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
# Install Nox, Poetry and API's dependencies
- name: Install Nox
run: pip install nox==2020.8.22
- name: Install Poetry
run: pip install poetry==1.1.9
# This fixes the issue with cachecontrol (https://github.com/psf/cachecontrol/issues/292).
# We will not be facing this issue when we upgrade to a newer Poetry version.
- name: Install Requests
run: pip install 'requests<2.30'
# This command is a workaround for getting Poetry working with Python 3.10. An
# fix is made in Poetry 1.2.0a2 but there is currently no official release for
# Poetry 1.2 and am apprehensive to moving to a pre-release. Disabling the
# experimental installer is a workaround for Poetry 1.1.x
# See https://github.com/python-poetry/poetry/issues/4210 for more details
- name: Disable Poetry's experimental new installer
if: ${{ matrix.python-version == '3.10' }}
run: poetry config experimental.new-installer false
# Prep for using the API for tests
- name: Create log file
run: touch logs.log
- name: Configure log file location
run: echo "`yq \
'.log_location="${GITHUB_WORKSPACE}/logs.log"' datagateway_api/config.yaml.example | envsubst`" > datagateway_api/config.yaml.example
- name: Configure datagateway extension
run: echo "`yq \
'.datagateway_api.extension="/datagateway_api"' datagateway_api/config.yaml.example`" > datagateway_api/config.yaml.example
- name: Create config.yaml
run: cp datagateway_api/config.yaml.example datagateway_api/config.yaml
- name: Create search_api_mapping.json
run: cp datagateway_api/search_api_mapping.json.example datagateway_api/search_api_mapping.json
# Run Unit tests
- name: Run Nox unit tests session
run: nox -p ${{ matrix.python-version }} -s unit_tests -- --cov=datagateway_api --cov-report=xml
- name: Upload unit test code coverage report
if: matrix.python-version == '3.6'
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4.6.0
# ICAT Ansible clone and install dependencies
- name: Checkout icat-ansible
if: success()
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
with:
repository: icatproject-contrib/icat-ansible
ref: master
path: icat-ansible
- name: Install Ansible
run: pip install -r icat-ansible/requirements.txt
# Prep for running the playbook
- name: Create hosts file
run: echo -e "[icatdb_minimal_hosts]\nlocalhost ansible_connection=local" > icat-ansible/hosts
- name: Prepare vault pass
run: echo -e "icattravispw" > icat-ansible/vault_pass.txt
- name: Move vault to directory it'll get detected by Ansible
run: mv icat-ansible/vault.yml icat-ansible/group_vars/all
- name: Replace default payara user with Actions user
run: |
sed -i -e "s/^payara_user: \"glassfish\"/payara_user: \"runner\"/" icat-ansible/group_vars/all/vars.yml
# Force hostname to localhost - bug fix for previous ICAT Ansible issues on Actions
- name: Change hostname to localhost
run: sudo hostname -b localhost
# Remove existing MySQL installation so it doesn't interfere with GitHub Actions
- name: Remove existing mysql
run: |
sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.mysqld
sudo apt-get remove --purge "mysql*"
sudo rm -rf /var/lib/mysql* /etc/mysql
# Create local instance of ICAT
- name: Run ICAT Ansible Playbook
run: |
ansible-playbook icat-ansible/icatdb_minimal_hosts.yml -i icat-ansible/hosts --vault-password-file icat-ansible/vault_pass.txt -vv
# rootUserNames needs editing as anon/anon is used in search API and required to pass endpoint tests
- name: Add anon user to rootUserNames
run: |
awk -F" =" '/rootUserNames/{$2="= simple/root anon/anon";print;next}1' /home/runner/install/icat.server/run.properties > /home/runner/install/icat.server/run.properties.tmp
- name: Apply rootUserNames change
run: |
mv -f /home/runner/install/icat.server/run.properties.tmp /home/runner/install/icat.server/run.properties
- name: Reinstall ICAT Server
run: |
cd /home/runner/install/icat.server/ && ./setup -vv install
- name: Checkout DataGateway API
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
# Prep for using the API for tests
- name: Create log file
run: touch logs.log
- name: Configure log file location
run: echo "`yq \
'.log_location="${GITHUB_WORKSPACE}/logs.log"' datagateway_api/config.yaml.example | envsubst`" > datagateway_api/config.yaml.example
- name: Configure datagateway extension
run: echo "`yq \
'.datagateway_api.extension="/datagateway_api"' datagateway_api/config.yaml.example`" > datagateway_api/config.yaml.example
- name: Create config.yaml
run: cp datagateway_api/config.yaml.example datagateway_api/config.yaml
- name: Create search_api_mapping.json
run: cp datagateway_api/search_api_mapping.json.example datagateway_api/search_api_mapping.json
# See comment in noxfile.py for explanation regarding this step
- name: Downgrade setuptools
run: poetry run pip install --upgrade setuptools==70.0.0
if: matrix.python-version == '3.8' || matrix.python-version == '3.9' || matrix.python-version == '3.10'
- name: Install dependencies
run: poetry install
- name: Add dummy data to icatdb
run: |
poetry run python -m util.icat_db_generator
# Run Nox integration tests session, saves and uploads a coverage report to codecov
- name: Run Nox Integration tests session
if: success()
run: nox -p ${{ matrix.python-version }} -s integration_tests -- --cov=datagateway_api --cov-report=xml
- name: Upload integration test code coverage report
if: matrix.python-version == '3.6'
uses: codecov/codecov-action@b9fd7d16f6d7d1b5d2bec1a2887e65ceed900238 # v4.6.0
linting:
runs-on: ubuntu-20.04
name: Linting
steps:
- name: Setup Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
with:
python-version: "3.9.7"
architecture: x64
- name: Checkout DataGateway API
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
- name: Install Nox
run: pip install nox==2020.8.22
- name: Install Poetry
run: pip install poetry==1.1.9
- name: Run Nox lint session
run: nox -s lint
formatting:
runs-on: ubuntu-20.04
name: Code Formatting
steps:
- name: Setup Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
with:
python-version: "3.9.7"
architecture: x64
- name: Checkout DataGateway API
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
- name: Install Nox
run: pip install nox==2020.8.22
- name: Install Poetry
run: pip install poetry==1.1.9
- name: Run Nox black session
run: nox -s black
safety:
runs-on: ubuntu-20.04
name: Dependency Safety
steps:
- name: Setup Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
with:
python-version: "3.9.7"
architecture: x64
- name: Checkout DataGateway API
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
- name: Install Nox
run: pip install nox==2020.8.22
- name: Install Poetry
run: pip install poetry==1.1.9
- name: Run Nox safety session
run: nox -s safety
generator-script-testing:
runs-on: ubuntu-20.04
continue-on-error: true
name: icatdb Generator Script Consistency Test
steps:
- name: Add apt repo
run: sudo add-apt-repository universe
# Setup Java & Python
- name: Setup Java
uses: actions/setup-java@b36c23c0d998641eff861008f374ee103c25ac73 # v4.4.0
with:
distribution: 'temurin'
java-version: 8
- name: Setup Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
with:
python-version: "3.9.7"
architecture: x64
# ICAT Ansible clone and install dependencies
- name: Checkout icat-ansible
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
with:
repository: icatproject-contrib/icat-ansible
ref: master
path: icat-ansible
- name: Install Ansible
run: pip install -r icat-ansible/requirements.txt
# Prep for running the playbook
- name: Create hosts file
run: echo -e "[icatdb_minimal_hosts]\nlocalhost ansible_connection=local" > icat-ansible/hosts
- name: Prepare vault pass
run: echo -e "icattravispw" > icat-ansible/vault_pass.txt
- name: Move vault to directory it'll get detected by Ansible
run: mv icat-ansible/vault.yml icat-ansible/group_vars/all
- name: Replace default payara user with Actions user
run: |
sed -i -e "s/^payara_user: \"glassfish\"/payara_user: \"runner\"/" icat-ansible/group_vars/all/vars.yml
# Force hostname to localhost - bug fix for previous ICAT Ansible issues on Actions
- name: Change hostname to localhost
run: sudo hostname -b localhost
# Remove existing MySQL installation so it doesn't interfere with GitHub Actions
- name: Remove existing mysql
run: |
sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.mysqld
sudo apt-get remove --purge "mysql*"
sudo rm -rf /var/lib/mysql* /etc/mysql
# Create local instance of ICAT
- name: Run ICAT Ansible Playbook
run: |
ansible-playbook icat-ansible/icatdb_minimal_hosts.yml -i icat-ansible/hosts --vault-password-file icat-ansible/vault_pass.txt -vv
- name: Checkout DataGateway API
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
- name: Create config.yaml
run: cd /home/runner/work/datagateway-api/datagateway-api; cp datagateway_api/config.yaml.example datagateway_api/config.yaml
- name: Create search_api_mapping.json
run: cd /home/runner/work/datagateway-api/datagateway-api; cp datagateway_api/search_api_mapping.json.example datagateway_api/search_api_mapping.json
- name: Install Poetry
run: pip install poetry==1.1.9
# This fixes the issue with cachecontrol (https://github.com/psf/cachecontrol/issues/292).
# We will not be facing this issue when we upgrade to a newer Poetry version.
- name: Install Requests
run: pip install 'requests<2.30'
# See comment in noxfile.py for explanation regarding this step
- name: Downgrade setuptools
run: poetry run pip install --upgrade setuptools==70.0.0
- name: Install dependencies
run: poetry install
- name: Add dummy data to icatdb
run: poetry run python -m util.icat_db_generator
- name: Drop modTime and createTime
run: mysql -picatdbuserpw -uicatdbuser icatdb < /home/runner/work/datagateway-api/datagateway-api/util/columns_to_drop.sql
- name: Get SQL dump of dummy data
run: mysqldump -picatdbuserpw -uicatdbuser --skip-comments icatdb > ~/generator_script_dump_1.sql
# Drop and re-create icatdb to remove generated data
- name: Drop icatdb
run: mysql -picatdbuserpw -uicatdbuser -e 'DROP DATABASE icatdb;'
- name: Create icatdb
run: mysql -picatdbuserpw -uicatdbuser -e 'CREATE DATABASE icatdb;'
# Regenerate table structure of icatdb
- name: Reinstall ICAT Server
run: cd /home/runner/install/icat.server; ./setup -vv install
- name: Add ICAT 5 triggers
run: cd /home/runner/install/icat.server; sudo mysql -uroot -D icatdb < create_triggers_mysql_5_0.sql
- name: Add (new) dummy data to icatdb
run: |
cd /home/runner/work/datagateway-api/datagateway-api; poetry run python -m util.icat_db_generator
- name: Drop modTime and createTime
run: mysql -picatdbuserpw -uicatdbuser icatdb < /home/runner/work/datagateway-api/datagateway-api/util/columns_to_drop.sql
- name: Get SQL dump of new dummy data
run: mysqldump -picatdbuserpw -uicatdbuser --skip-comments icatdb > ~/generator_script_dump_2.sql
# Tests that the generator script produces consistent data over two separate runs
- name: Diff SQL dumps
run: diff -s ~/generator_script_dump_1.sql ~/generator_script_dump_2.sql
# Drop and re-create icatdb to remove generated data
- name: Drop icatdb
run: mysql -picatdbuserpw -uicatdbuser -e 'DROP DATABASE icatdb;'
- name: Create icatdb
run: mysql -picatdbuserpw -uicatdbuser -e 'CREATE DATABASE icatdb;'
# Regenerate table structure of icatdb
- name: Reinstall ICAT Server
run: cd /home/runner/install/icat.server; ./setup -vv install
- name: Add ICAT 5 triggers
run: cd /home/runner/install/icat.server; sudo mysql -uroot -D icatdb < create_triggers_mysql_5_0.sql
- name: Checkout DataGateway API (default branch)
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v4.2.1
with:
ref: ${{ github.event.repository.default_branch }}
- name: Create config.yaml
run: cd /home/runner/work/datagateway-api/datagateway-api; cp datagateway_api/config.yaml.example datagateway_api/config.yaml
- name: Create search_api_mapping.json
run: cd /home/runner/work/datagateway-api/datagateway-api; cp datagateway_api/search_api_mapping.json.example datagateway_api/search_api_mapping.json
# See comment in noxfile.py for explanation regarding this step
- name: Downgrade setuptools
run: poetry run pip install --upgrade setuptools==70.0.0
- name: Install dependencies
run: poetry install
- name: Add dummy data to icatdb
run: poetry run python -m util.icat_db_generator
- name: Drop modTime and createTime
run: mysql -picatdbuserpw -uicatdbuser icatdb < /home/runner/work/datagateway-api/datagateway-api/util/columns_to_drop.sql
- name: Get SQL dump of dummy data from main's generator script
run: mysqldump -picatdbuserpw -uicatdbuser --skip-comments icatdb > ~/generator_script_dump_main.sql
# Tests that the generator script produces the same data as is produced with main's version
# NOTE: If a delibrate change is made to the script that will change the data generated,
# the diff (and therefore this job) will fail. If this is the case, don't be alarmed.
# The `continue-on-error` keyword has been added to this job so the workflow should
# pass when the PR is merged in, even if this job fails.
# But, if you didn't mean to change the output of the script, there is likely a
# problem with the changes made that will make the generator script's data
# different to SciGateway preprod
- name: Diff SQL dumps
run: diff -s ~/generator_script_dump_main.sql ~/generator_script_dump_1.sql
pip-install-testing:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
python-version: ["3.6", "3.7", "3.8", "3.9", "3.10"]
name: Pip Install ${{ matrix.python-version }} Test
steps:
# Checkout DataGateway API and setup Python
- name: Check out repo
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v3.5.3
- name: Setup Python
uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0
with:
python-version: ${{ matrix.python-version }}
architecture: x64
# Create virtual environment and install DataGateway API
- name: Create and activate virtual environment
run: |
python3 -m venv dg-api-venv
source dg-api-venv/bin/activate
- name: Install DataGateway API via pip
run: pip install .
# Prep for using the API for tests
- name: Create log file
run: touch logs.log
- name: Configure log file location
run: echo "`yq \
'.log_location="${GITHUB_WORKSPACE}/logs.log"' datagateway_api/config.yaml.example | envsubst`" > datagateway_api/config.yaml.example
- name: Configure datagateway extension
run: echo "`yq \
'.datagateway_api.extension="/datagateway_api"' datagateway_api/config.yaml.example`" > datagateway_api/config.yaml.example
- name: Create config.yaml
run: cp datagateway_api/config.yaml.example datagateway_api/config.yaml
# These sections are removed so the API doesn't try to (and fail) to connect to an ICAT stack on startup
- name: Remove DataGateway API and Search API sections from config
run: yq -i 'del(.datagateway_api, .search_api)' datagateway_api/config.yaml
- name: Create search_api_mapping.json
run: cp datagateway_api/search_api_mapping.json.example datagateway_api/search_api_mapping.json
# Launch API to see if it starts correctly or has a startup issue
# Code logic used from https://stackoverflow.com/a/63643845
- name: Start API
run: timeout 10 python -m datagateway_api.src.main || code=$?; if [[ $code -ne 124 && $code -ne 0 ]]; then exit $code; fi
docker:
# This job triggers only if all the other jobs succeed and does different things depending on the context.
# The job builds the Docker image in all cases and also pushes the image to Harbor only if something is
# pushed to the main branch.
needs: [tests, linting, formatting, safety, generator-script-testing, pip-install-testing]
name: Docker
runs-on: ubuntu-20.04
steps:
- name: Check out repo
uses: actions/checkout@eef61447b9ff4aafe5dcd4e0bbf5d482be7e7871 # v3.5.3
- name: Login to Harbor
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
with:
registry: harbor.stfc.ac.uk/datagateway
username: ${{ secrets.HARBOR_USERNAME }}
password: ${{ secrets.HARBOR_TOKEN }}
- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: harbor.stfc.ac.uk/datagateway/datagateway-api
- name: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' && 'Build and push Docker image to Harbor' || 'Build Docker image' }}
uses: docker/build-push-action@4f58ea79222b3b9dc2c8bbdd6debcef730109a75 # v6.9.0
with:
context: .
push: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}