Skip to content

Execute ZKVM-Perf

Execute ZKVM-Perf #21

Workflow file for this run

name: Execute ZKVM-Perf
on:
workflow_dispatch:
inputs:
ami_id:
description: 'AMI ID'
required: true
type: string
default: 'ami-079a6a210557ef0e4'
provers:
description: 'Provers to use (comma-separated)'
required: true
type: string
default: 'sp1,risc0'
programs:
description: 'Programs to benchmark (comma-separated, leave empty for all)'
required: false
type: string
filename:
description: 'Filename for the benchmark'
required: true
default: 'benchmark'
type: string
trials:
description: 'Number of trials to run'
required: true
default: '1'
type: number
hashfns:
description: 'Hash functions to use (comma-separated)'
required: true
type: string
default: 'poseidon'
shard_sizes:
description: 'Shard sizes to use (comma-separated)'
required: true
default: '22'
type: string
sp1_ref:
description: 'SP1 reference (commit hash or branch name)'
required: false
type: string
default: '2e8b0a8'
jobs:
run-benchmarks:
name: Run ZKVM-Perf
runs-on: ubuntu-latest
strategy:
matrix:
instance_config:
- {type: "g6.16xlarge", gpu: true}
- {type: "r7i.16xlarge", gpu: false}
fail-fast: false
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ secrets.AWS_REGION }}
- name: Start EC2 runner
id: "start-ec2-runner"
uses: "xJonathanLEI/ec2-github-runner@main"
with:
mode: start
github-token: ${{ secrets.GH_PAT }}
ec2-image-id: ${{ inputs.ami_id }}
ec2-instance-type: ${{ matrix.instance_config.type }}
subnet-id: ${{ secrets.AWS_SUBNET_ID }}
security-group-id: ${{ secrets.AWS_SG_ID }}
- name: Run benchmarks
uses: actions/github-script@v6
env:
RUNNER_NAME: ${{ steps.start-ec2-runner.outputs.label }}
USE_GPU: ${{ matrix.instance_config.gpu }}
with:
script: |
const runnerName = process.env.RUNNER_NAME;
const useGpu = process.env.USE_GPU === 'true';
await github.rest.actions.createWorkflowDispatch({
owner: context.repo.owner,
repo: context.repo.repo,
workflow_id: 'adhoc.yml',
ref: context.ref,
inputs: {
runner_name: runnerName,
sp1_ref: '${{ inputs.sp1_ref }}',
use_gpu: useGpu.toString(),
provers: '${{ inputs.provers }}',
programs: '${{ inputs.programs }}',
filename: '${{ inputs.filename }}',
trials: '${{ inputs.trials }}',
hashfns: '${{ inputs.hashfns }}',
shard_sizes: '${{ inputs.shard_sizes }}',
},
});
core.info('Triggered benchmark run on EC2 runner');
- name: Wait for benchmarks to complete
run: |
while true; do
status=$(gh run list --workflow=adhoc.yml --json conclusion -q '.[0].conclusion')
if [ "$status" = "success" ] || [ "$status" = "failure" ]; then
break
fi
sleep 60
done
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Stop EC2 runner
if: always()
uses: machulav/ec2-github-runner@v2
with:
mode: stop
github-token: ${{ secrets.GH_PAT }}
label: ${{ steps.start-ec2-runner.outputs.label }}
ec2-instance-id: ${{ steps.start-ec2-runner.outputs.ec2-instance-id }}
run-on-ec2:
name: Run Benchmarks on EC2
runs-on: ${{ github.event.inputs.runner_name }}
if: github.event.inputs.runner_name != ''
steps:
- name: Checkout sources
uses: actions/checkout@v4
- name: Update SP1 reference
run: |
chmod +x update_sp1_and_build.sh
SP1_REF=${{ github.event.inputs.sp1_ref }} RUN_BUILD=false ./update_sp1_and_build.sh
- name: Run docker build
run: |
docker build -t zkvm-perf --platform linux/amd64 -f Dockerfile.gpu --build-arg SP1_REF=${{ github.event.inputs.sp1_ref }} .
- name: Run Tests (docker)
run: |
docker run ${{ github.event.inputs.use_gpu == 'true' && '--gpus all' || '' }} --platform linux/amd64 \
-v /var/run/docker.sock:/var/run/docker.sock \
-v ./benchmarks:/usr/src/app/benchmarks \
-e RUST_BACKTRACE=full \
--network host \
zkvm-perf \
"python3 sweep.py --filename ${{ github.event.inputs.filename }} \
--trials ${{ github.event.inputs.trials }} \
${{ github.event.inputs.programs && format('--programs {0}', github.event.inputs.programs) || '' }} \
--provers ${{ github.event.inputs.provers }} \
--hashfns ${{ github.event.inputs.hashfns }} \
--shard-sizes ${{ github.event.inputs.shard_sizes }}"
- name: Print Results
run: |
cat benchmarks/*.csv