WIP: VV stress under quench model review #2258
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | |
name: PROCESS main/develop testing | |
on: | |
pull_request: | |
push: | |
permissions: | |
contents: read | |
pages: write | |
id-token: write | |
jobs: | |
make: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Build PROCESS | |
run: | | |
git config --global --add safe.directory '*' | |
cmake -S. -Bbuild | |
cmake --build build | |
- name: Archive build artifacts in process | |
uses: actions/upload-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: | | |
process/**/*.so | |
process/io/python_fortran_dicts.json | |
- name: Archive coverage data | |
uses: actions/upload-artifact@v3 | |
with: | |
name: coverage-artifacts | |
path: | | |
build/CMakeFiles/process.dir/source/fortran/*.gcno | |
lcov_results | |
- name: Archive ford artifacts | |
uses: actions/upload-artifact@v3 | |
with: | |
name: ford-artifact | |
path: build/ford_project.pickle | |
unit-test: | |
runs-on: ubuntu-latest | |
needs: make | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Download build artifacts | |
uses: actions/download-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: process/ | |
- name: Install PROCESS | |
# Editable install to match default install | |
run: | | |
pip install -e '.[test, examples]' | |
pip install -r requirements_dev.txt | |
- name: Run unit tests | |
run: | | |
pytest --cov=process tests/unit -v \ | |
--cov-report xml | |
- name: Archive unit test coverage data | |
uses: actions/upload-artifact@v3 | |
with: | |
name: unit-coverage-artifacts | |
path: .coverage | |
- name: Upload coverage to Codecov | |
uses: codecov/codecov-action@v4 | |
env: | |
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | |
with: | |
fail_ci_if_error: ${{ github.ref_name == 'main' }} | |
integration-test: | |
runs-on: ubuntu-latest | |
needs: make | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Download build artifacts | |
uses: actions/download-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: process/ | |
- name: Install PROCESS | |
# Editable install to match default install | |
run: | | |
pip install -e '.[test, examples]' | |
pip install -r requirements_dev.txt | |
- name: Install poppler | |
run: | | |
sudo apt update | |
sudo apt install poppler-utils | |
- name: Run integration tests | |
run: pytest tests/integration -n auto -v | |
regression-test: | |
runs-on: ubuntu-latest | |
needs: [make, tracking] | |
# depend on make and tracking job, check make job was successful | |
# but dont skip if tracking was skipped | |
if: always() && needs.make.result == 'success' | |
continue-on-error: true | |
strategy: | |
matrix: | |
tolerance: [0, 5] | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Download build artifacts | |
uses: actions/download-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: process/ | |
- name: Install PROCESS | |
# Editable install to match default install | |
run: | | |
pip install -e '.[test, examples]' | |
pip install -r requirements_dev.txt | |
- name: Allow git commands to be run | |
run: git config --global --add safe.directory '*' | |
- name: Run regression tests | |
run: pytest tests/regression -sv --reg-tolerance=${{ matrix.tolerance }} | |
run-tracking-inputs: | |
runs-on: ubuntu-latest | |
needs: make | |
if: github.ref == 'refs/heads/main' | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Download build artifacts | |
uses: actions/download-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: process/ | |
- name: Install PROCESS | |
run: pip install -e . | |
- name: Run regression input files | |
run: | | |
process -i tests/regression/input_files/large_tokamak.IN.DAT | |
mv tests/regression/input_files/large_tokamak.MFILE.DAT tracking/large_tokamak_MFILE.DAT | |
process -i tests/regression/input_files/st_regression.IN.DAT | |
mv tests/regression/input_files/st_regression.MFILE.DAT tracking/st_regression_MFILE.DAT | |
process -i tests/regression/input_files/large_tokamak_nof.IN.DAT | |
mv tests/regression/input_files/large_tokamak_nof.MFILE.DAT tracking/large_tokamak_nof_MFILE.DAT | |
process -i tests/regression/input_files/large_tokamak_once_through.IN.DAT | |
mv tests/regression/input_files/large_tokamak_once_through.MFILE.DAT tracking/large_tokamak_once_through_MFILE.DAT | |
process -i tests/regression/input_files/stellarator.IN.DAT | |
mv tests/regression/input_files/stellarator.MFILE.DAT tracking/stellarator_MFILE.DAT | |
- name: Archive tracked MFILEs | |
uses: actions/upload-artifact@v3 | |
with: | |
name: tracked-mfiles | |
path: tracking/*_MFILE.DAT | |
pre-commit-quality-check: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v3 | |
- uses: actions/setup-python@v4 | |
with: | |
python-version: '3.10' | |
- name: Install dev dependencies | |
run: pip install -r requirements_dev.txt | |
- name: Run pre-commit | |
run: pre-commit run --all-files | |
tracking: | |
concurrency: | |
group: tracking-jobs | |
cancel-in-progress: false | |
runs-on: ubuntu-latest | |
needs: run-tracking-inputs | |
if: github.ref == 'refs/heads/main' | |
env: | |
COMMIT_MESSAGE: ${{ github.event.head_commit.message }} | |
steps: | |
- uses: actions/checkout@v3 | |
- name: Download build artifacts | |
uses: actions/download-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: process/ | |
- name: Install PROCESS | |
run: pip install -e . | |
- name: Install dev dependencies | |
run: pip install -r requirements_dev.txt | |
- name: Setup SSH identity | |
uses: webfactory/[email protected] | |
with: | |
ssh-private-key: ${{ secrets.SSH_PRIVATE_KEY }} | |
- name: Download tracking data | |
run: git clone [email protected]:timothy-nunn/process-tracking-data.git process-tracking-data | |
- name: Download large tokamak MFILE | |
uses: actions/download-artifact@v3 | |
with: | |
name: tracked-mfiles | |
path: tracking/ | |
- name: Create new tracking entries | |
shell: bash | |
run: | | |
MSG=$(printf "%q " $COMMIT_MESSAGE) | |
git config --global --add safe.directory '*' | |
python tracking/tracking_data.py track process-tracking-data --mfile tracking/large_tokamak_MFILE.DAT --hash ${{ github.sha }} --commit "${MSG}" | |
cp tracking/large_tokamak_MFILE.DAT process-tracking-data/large_tokamak_MFILE_$(git rev-parse HEAD).DAT | |
python tracking/tracking_data.py track process-tracking-data --mfile tracking/st_regression_MFILE.DAT --hash ${{ github.sha }} --commit "${MSG}" | |
cp tracking/st_regression_MFILE.DAT process-tracking-data/st_regression_MFILE_$(git rev-parse HEAD).DAT | |
python tracking/tracking_data.py track process-tracking-data --mfile tracking/large_tokamak_nof_MFILE.DAT --hash ${{ github.sha }} --commit "${MSG}" | |
cp tracking/large_tokamak_nof_MFILE.DAT process-tracking-data/large_tokamak_nof_MFILE_$(git rev-parse HEAD).DAT | |
python tracking/tracking_data.py track process-tracking-data --mfile tracking/large_tokamak_once_through_MFILE.DAT --hash ${{ github.sha }} --commit "${MSG}" | |
cp tracking/large_tokamak_once_through_MFILE.DAT process-tracking-data/large_tokamak_once_through_MFILE_$(git rev-parse HEAD).DAT | |
python tracking/tracking_data.py track process-tracking-data --mfile tracking/stellarator_MFILE.DAT --hash ${{ github.sha }} --commit "${MSG}" | |
cp tracking/stellarator_MFILE.DAT process-tracking-data/stellarator_MFILE_$(git rev-parse HEAD).DAT | |
- name: Create the tracking dashboard | |
run: python tracking/tracking_data.py plot process-tracking-data --out tracking.html | |
- name: Archive tracking dashboard | |
uses: actions/upload-artifact@v3 | |
with: | |
name: tracking-html | |
path: tracking.html | |
- name: Setup Git identity | |
run: | | |
git config --global user.email "${{ github.triggering_actor }}@github.runner" | |
git config --global user.name "${{ github.job }}" | |
- name: Commit and push tracking data | |
run: | | |
cd process-tracking-data | |
git add . | |
git commit -m "${{ github.run_id }}-${{ github.run_number }}-${{ github.run_attempt }}" | |
git push | |
docs: | |
concurrency: | |
group: docs-jobs | |
cancel-in-progress: false | |
runs-on: ubuntu-latest | |
needs: tracking | |
if: github.ref == 'refs/heads/main' | |
steps: | |
- uses: actions/checkout@v3 | |
with: | |
fetch-depth: 0 | |
- name: Download build artifacts | |
uses: actions/download-artifact@v3 | |
with: | |
name: process-build-artifacts | |
path: process/ | |
- name: Install PROCESS | |
run: pip install -e . | |
- name: Install dev dependencies | |
run: pip install -r requirements_dev.txt | |
- name: Install ford | |
run: pip install git+https://github.com/jonmaddock/ford | |
- run: ford documentation/ford/index.md | |
- name: Download ford project | |
uses: actions/download-artifact@v3 | |
with: | |
name: ford-artifact | |
path: build/ | |
- run: python scripts/document_fortran_interface.py | |
- run: python scripts/vardes.py | |
- run: git config --global --add safe.directory '*' | |
- run: mkdocs build | |
- run: mv ford_site site | |
- name: Download tracking html | |
uses: actions/download-artifact@v3 | |
with: | |
name: tracking-html | |
- run: mv tracking.html site || cp site/404.html site/tracking.html | |
- name: Upload documentation page | |
uses: actions/upload-pages-artifact@v1 | |
with: | |
path: site/ | |
- name: Deploy GitHub pages | |
id: deployment | |
uses: actions/deploy-pages@v2 |