Skip to content

[Feature] Assign epochs dynamically #125

[Feature] Assign epochs dynamically

[Feature] Assign epochs dynamically #125

Workflow file for this run

name: Build and Test
on:
pull_request:
branches: [main, develop]
push:
branches: [main, develop]
tags:
- "v*.*.*" # Match version tags like v1.0.0, v0.1.2, etc.
workflow_dispatch:
inputs:
run_full_tests:
description: "Run full test suite"
type: boolean
default: false
jobs:
build-and-test:
strategy:
fail-fast: false
matrix:
os: ["ubuntu-22.04"]
python: ["3.9", "3.10", "3.11"]
name: ${{ matrix.os }}-${{ matrix.python }}
runs-on: ${{ matrix.os }}
timeout-minutes: 30
env:
DFANALYZER_CLUSTER_RESTART_TIMEOUT_SECONDS: 600
OMPI_ALLOW_RUN_AS_ROOT: 1
OMPI_ALLOW_RUN_AS_ROOT_CONFIRM: 1
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
- name: Install system dependencies
run: |
sudo apt update
sudo apt install -y \
ca-certificates \
lsb-release \
wget
wget https://apache.jfrog.io/artifactory/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt install -y ./apache-arrow-apt-source-latest-$(lsb_release --codename --short).deb
sudo apt update
sudo apt install -y \
build-essential \
cmake \
git \
libarrow-dev \
libhdf5-dev \
libncurses-dev \
libopenmpi-dev \
libparquet-dev \
libreadline-dev \
meson \
ninja-build \
nlohmann-json3-dev \
openmpi-bin \
openmpi-common
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade meson-python setuptools wheel
pip install -r tests/requirements.txt
- name: Install DFAnalyzer
run: |
pip install .[darshan] \
-Csetup-args="--prefix=$HOME/.local" \
-Csetup-args="-Denable_tests=true" \
-Csetup-args="-Denable_tools=true"
- name: Determine test type
id: test-type
run: |
# Check if this is a tagged release (v*.*.*)
if [[ "${{ github.ref }}" == refs/tags/v* ]]; then
echo "Run full test suite for release tag"
echo "run_full=true" >> $GITHUB_OUTPUT
# Check if this is a manual run requesting full tests
elif [[ "${{ github.event_name }}" == "workflow_dispatch" && "${{ github.event.inputs.run_full_tests }}" == "true" ]]; then
echo "Run full test suite due to manual request"
echo "run_full=true" >> $GITHUB_OUTPUT
else
echo "Run smoke tests for regular development"
echo "run_full=false" >> $GITHUB_OUTPUT
fi
- name: Run Python tests with coverage
run: |
if [[ "${{ steps.test-type.outputs.run_full }}" == "true" ]]; then
echo "Running FULL test suite"
pytest -m full --verbose --cov=dftracer.analyzer --cov-report=xml
else
echo "Running SMOKE test suite"
pytest -m smoke --verbose --cov=dftracer.analyzer --cov-report=xml
fi
- name: Run DFAnalyzer with external cluster
run: |
# Start the cluster in the background and capture scheduler address
dfanalyzer-cluster cluster=local cluster.processes=False \
+cluster.protocol=tcp +cluster.worker_class=distributed.nanny.Nanny > cluster.log 2>&1 &
cluster_pid=$!
# Wait for the scheduler address to appear in the log (file will contain only the address)
for i in {1..30}; do
if grep -q '^tcp://' cluster.log; then break; fi
sleep 1
done
scheduler_address=$(grep '^tcp://' cluster.log | tail -1)
echo "Scheduler address: $scheduler_address"
# Run analysis commands using the external cluster
if [[ "${{ steps.test-type.outputs.run_full }}" == "true" ]]; then
dfanalyzer analyzer=darshan trace_path=tests/data/extracted/darshan-posix-dxt \
cluster=external cluster.restart_on_connect=True cluster.scheduler_address=$scheduler_address
dfanalyzer analyzer=recorder trace_path=tests/data/extracted/recorder-posix-parquet \
cluster=external cluster.restart_on_connect=True cluster.scheduler_address=$scheduler_address
dfanalyzer analyzer=dftracer analyzer/preset=dlio trace_path=tests/data/extracted/dftracer-dlio \
cluster=external cluster.restart_on_connect=True cluster.scheduler_address=$scheduler_address
else
dfanalyzer analyzer=dftracer analyzer/preset=dlio trace_path=tests/data/extracted/dftracer-dlio \
cluster=external cluster.restart_on_connect=False cluster.scheduler_address=$scheduler_address
fi
# Kill the cluster process
kill $cluster_pid || true
wait $cluster_pid 2>/dev/null || true
- name: Run C++ tests
run: |
rm -rf build
meson build --prefix=$HOME/.local -Denable_tests=true -Denable_tools=true
meson compile -C build --verbose
meson test -C build --verbose
meson test -C build --verbose --setup=mpi
- name: Upload test coverage
uses: codecov/codecov-action@v3
with:
file: ./coverage.xml
fail_ci_if_error: false
- name: Upload test artifacts
if: always()
uses: actions/upload-artifact@v4
with:
name: test-results-${{ matrix.os }}-py${{ matrix.python }}
path: |
build/meson-logs/
.coverage
coverage.xml
retention-days: 7