Skip to content

Fuzz main for 3600 seconds #277

Fuzz main for 3600 seconds

Fuzz main for 3600 seconds #277

Workflow file for this run

on:
schedule:
- cron: 22 0/4 * * *
workflow_dispatch:
inputs:
fuzz-time:
description: Number of seconds to run the fuzzer for
type: number
default: 600
roc-branch:
description: Roc branch to fuzz
type: string
default: main
env:
roc-branch: ${{ inputs.roc-branch || 'main' }}
fuzz-time: ${{ inputs.fuzz-time || 3600 }}
name: Fuzz Roc
run-name: Fuzz ${{ inputs.roc-branch || 'main' }} for ${{ inputs.fuzz-time || 3600 }} seconds
# Only allow one copy of this job to run at a time.
# This ensures no merge or cache conflicts.
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: false
permissions:
actions: write
contents: write
deployments: write
id-token: write
pages: write
jobs:
fuzz:
if: ${{ (inputs.fuzz-time || 3600) != 0 }}
strategy:
matrix:
fuzzer:
- name: tokenize
use-snapshot-corpus: true
- name: parse
use-snapshot-corpus: true
fail-fast: false
env:
# Updating the version is a trick to help deal with some github cache issues
# Github cache is not meant to be deleted and overwritten.
# As such, this flow can sometimes break the cache leading to only save failures.
# Updating the key generates a new cache but gets around save failures.
cache-key: ${{ matrix.fuzzer.name }}-corpus-v3
fuzzer-exe: fuzz-${{ matrix.fuzzer.name }}
runs-on: [ubuntu-24.04]
steps:
- name: checkout roc-compiler-fuzz
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4
with:
path: roc-compiler-fuzz
- name: checkout roc
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4
with:
path: roc
repository: roc-lang/roc
ref: ${{ env.roc-branch }}
- name: install zig
uses: mlugg/setup-zig@a67e68dc5c8281d9608136d3d7ca1b282213e4ac # ratchet:mlugg/setup-zig@v1
with:
version: 0.14.0
- name: install afl++
run: |
sudo apt update
sudo apt install -y afl++
afl-fuzz --version
- name: build roc fuzzer (base)
env:
AFL_CC_COMPILER: LTO
run: |
cd roc
rm -rf .zig-cache zig-out $ZIG_LOCAL_CACHE_DIR
zig build -Dfuzz -Dtarget=native-native -Dsystem-afl
mv zig-out/bin/${{ env.fuzzer-exe }} ../${{ env.fuzzer-exe }}.afl
# cmplog enables extracting comparision info to get better fuzz results.
- name: build roc fuzzer (cmplog)
env:
AFL_LLVM_CMPLOG: 1
AFL_CC_COMPILER: LTO
run: |
cd roc
rm -rf .zig-cache zig-out $ZIG_LOCAL_CACHE_DIR
zig build -Dfuzz -Dtarget=native-native -Dsystem-afl
mv zig-out/bin/${{ env.fuzzer-exe }} ../${{ env.fuzzer-exe }}.cmplog
- name: load cached corpus
id: restore-cache-corpus
uses: actions/cache/restore@d4323d4df104b026a6aa633fdb11d772146be0bf # ratchet:actions/cache/restore@v4
with:
key: ${{ env.cache-key }}
path: corpus
# No matter what we reload examples from the repo.
# They might get pruned, but we want to make sure we don't miss any new examples.
- name: copy over initial corpus
if: ${{ !matrix.fuzzer.use-snapshot-corpus }}
run: |
mkdir -p corpus/
cp roc/src/fuzz-corpus/${{ matrix.fuzzer.name }}/* corpus/
- name: generate initial corpus
if: ${{ matrix.fuzzer.use-snapshot-corpus }}
run: |
mkdir -p corpus/
cd roc
zig build snapshot -- --fuzz-corpus ../corpus --verbose
- name: print corpus
run: |
ls corpus
- name: configure system for fuzzing
run: |
sudo afl-system-config
- name: run fuzz jobs
env:
AFL_TESTCACHE_SIZE: 500
AFL_IGNORE_SEED_PROBLEMS: 1
AFL_IMPORT_FIRST: 1
AFL_FINAL_SYNC: 1
run: |
# This is a rough attempt to follow best practices from: https://aflplus.plus/docs/fuzzing_in_depth/#c-using-multiple-cores
afl-fuzz \
-i corpus/ \
-o fuzz-out/ \
-V ${{ env.fuzz-time }} \
-M main \
-c ./${{ env.fuzzer-exe }}.cmplog \
-l 2AT \
-p explore \
-- ./${{ env.fuzzer-exe }}.afl &
afl-fuzz \
-i corpus/ \
-o fuzz-out/ \
-V ${{ env.fuzz-time }} \
-S s1 \
-p fast \
-c ./${{ env.fuzzer-exe }}.cmplog \
-- ./${{ env.fuzzer-exe }}.afl &
AFL_DISABLE_TRIM=1 afl-fuzz \
-i corpus/ \
-o fuzz-out/ \
-V ${{ env.fuzz-time }} \
-S s2 \
-p explore \
-- ./${{ env.fuzzer-exe }}.afl &
afl-fuzz \
-i corpus/ \
-o fuzz-out/ \
-V ${{ env.fuzz-time }} \
-S s3 \
-p exploit \
-- ./${{ env.fuzzer-exe }}.afl &
wait
- name: fuzz stats
run: |
afl-whatsup -d fuzz-out/
- name: minimize corpus - cmin
id: minimize-corpus-cmin
continue-on-error: true
timeout-minutes: 120
env:
# Some reason this is needed.
# Afl doesn't trust the binary otherwise on these CI machines.
AFL_SKIP_BIN_CHECK: 1
run: |
# No matter what, replace the corpus with the new queue.
rm -rf corpus
mv fuzz-out/main/queue corpus
# Minimize corpus.
afl-cmin \
-i corpus/ \
-o fuzz-cmin/ \
-T all \
-- ./${{ env.fuzzer-exe }}.afl
- name: minimize corpus - cmin (copy over)
if: steps.minimize-corpus-cmin.outcome == 'success'
run: |
# Overwrite corpus with minimized version.
rm -rf corpus
mv fuzz-cmin corpus
# This is slow and doesn't save many bytes.
# Maybe reconsider if corpus's get big, but they are pretty small currently.
# - name: minimize corpus - tmin
# continue-on-error: true
# run: |
# # Minimize a random subset of the corpus.
# # Allow up to an hour of minimization before killing.
# # Eventually, this should minimize all files in the corpus.
# mkdir fuzz-tmin
# cd corpus
# ls | shuf | timeout 3600 parallel \
# --memfree 1G \
# --memsuspend 2G \
# afl-tmin \
# -i {} \
# -o ../fuzz-tmin/{} \
# -- ../${{ env.fuzzer-exe }}.afl \
# || true
# cd ..
# # Overwrite any files with their minimized version.
# mv fuzz-tmin/* corpus
- name: print corpus
run: |
ls corpus
# delete previous cache to enable overwritting it.
- name: delete previous cache
if: ${{ steps.restore-cache-corpus.outputs.cache-hit }}
continue-on-error: true
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
cd roc-compiler-fuzz
gh extension install actions/gh-actions-cache
gh actions-cache delete "${{ env.cache-key }}" --confirm
- name: save corpus
id: save-cache-corpus
uses: actions/cache/save@d4323d4df104b026a6aa633fdb11d772146be0bf # ratchet:actions/cache/save@v4
with:
key: ${{ env.cache-key }}
path: corpus
- name: minimize crashes
continue-on-error: true
env:
# Limit the number of crashes we minimize to not waste time.
# Still minimize a lot to hopefully find a really small sample to report.
# Crashes are ultimately quite cheap to minimize.
MAX_TMIN: 200
run: |
mv fuzz-out/main/crashes/ fuzz-crashes
mkdir fuzz-out/main/crashes/
cd fuzz-crashes
ls | shuf | head -n $MAX_TMIN | timeout 3600 parallel \
--memfree 1G \
--memsuspend 2G \
afl-tmin \
-i {} \
-o ../fuzz-out/main/crashes/{} \
-- ../${{ env.fuzzer-exe }}.afl \
|| true
- name: minimize hangs
continue-on-error: true
env:
# Limit the number of hangs we minimize to not waste time.
# Hangs are expensive to minimize, so only minimize a few.
MAX_TMIN: 8
run: |
mv fuzz-out/main/hangs/ fuzz-hangs
mkdir fuzz-out/main/hangs/
cd fuzz-hangs
ls | shuf | head -n $MAX_TMIN | timeout 3600 parallel \
--memfree 1G \
--memsuspend 2G \
afl-tmin \
-i {} \
-o ../fuzz-out/main/hangs/{} \
-H \
-- ../${{ env.fuzzer-exe }}.afl \
|| true
- name: list failures
run: |
echo "Crashes:"
ls fuzz-out/main/crashes/
echo -e "\nHangs:"
ls fuzz-out/main/hangs/
# calculate list of crashes/hangs to report
- name: record results
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
cd roc-compiler-fuzz
git config --global user.name "${{ github.actor }}"
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
# allow for 10 tries to update the database.
for i in {1..10}; do
git fetch origin main
git reset --hard origin/main
zig build update-database -- ../roc ${{ matrix.fuzzer.name }} ../fuzz-out
git add data.json
git commit -m "update fuzzing database (${{ matrix.fuzzer.name }})"
if git push; then
break
fi
sleep 10
done
deploy:
# deploy site even if one of the fuzzers fails.
if: ${{ success() || failure() || inputs.fuzz-time == 0 }}
needs: [fuzz]
runs-on: ubuntu-24.04
steps:
- name: checkout roc-compiler-fuzz
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # ratchet:actions/checkout@v4
- name: install zig
uses: mlugg/setup-zig@a67e68dc5c8281d9608136d3d7ca1b282213e4ac # ratchet:mlugg/setup-zig@v1
with:
version: 0.14.0
- name: generate site
run: |
# Ensure we pull in the changes pushed above.
git pull
zig build generate-website
- name: upload website artifacts
uses: actions/upload-pages-artifact@56afc609e74202658d3ffba0e8f6dda462b719fa # ratchet:actions/upload-pages-artifact@v3
with:
path: 'www'
- name: deploy site
uses: actions/deploy-pages@d6db90164ac5ed86f2b6aed7e0febac5b3c0c03e # ratchet:actions/deploy-pages@v4