1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110name: Benchmarks
on:
push:
branches: [main]
workflow_dispatch: # Allow manual runs
# Only allow one benchmark run at a time (they're slow and resource-intensive)
concurrency:
group: benchmarks
cancel-in-progress: false
jobs:
benchmark:
name: Run Benchmarks
runs-on: ubuntu-latest
timeout-minutes: 30
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Need full history for commit info
- uses: pnpm/action-setup@v4
- uses: actions/setup-node@v4
with:
node-version: 24
cache: pnpm
- name: Install hyperfine
run: |
wget https://github.com/sharkdp/hyperfine/releases/download/v1.19.0/hyperfine_1.19.0_amd64.deb
echo "5ffe6996aefbbf547f383116eea53569263cc7cf6598ee53f0081e37b702055d hyperfine_1.19.0_amd64.deb" | sha256sum -c
sudo dpkg -i hyperfine_1.19.0_amd64.deb
# Install monorepo deps (builds vinext plugin via pnpm run build)
- run: pnpm install --frozen-lockfile
- name: Build vinext plugin
run: pnpm run build
# Generate the shared benchmark app (copies app/ into each project dir)
- name: Generate benchmark app
run: node benchmarks/generate-app.mjs
# Install benchmark-specific deps (outside pnpm workspace)
- name: Install benchmark dependencies
run: |
cd benchmarks/nextjs && npm install
cd ../vinext && npm install
cd ../vinext-rolldown && npm install
# Run the benchmarks
- name: Run benchmarks
run: node benchmarks/run.mjs --runs=5 --dev-runs=10
# Find the latest results JSON
- name: Locate results
id: results
run: |
RESULTS_FILE=$(ls -t benchmarks/results/bench-*.json | head -1)
echo "file=$RESULTS_FILE" >> "$GITHUB_OUTPUT"
echo "Found results: $RESULTS_FILE"
# Upload results as artifact (backup)
- name: Upload results artifact
uses: actions/upload-artifact@v4
with:
name: benchmark-results-${{ github.sha }}
path: benchmarks/results/bench-*.json
retention-days: 90
# Upload results to the benchmarks dashboard
- name: Upload to dashboard
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
env:
BENCHMARK_UPLOAD_TOKEN: ${{ secrets.BENCHMARK_UPLOAD_TOKEN }}
run: |
RESULTS_FILE="${{ steps.results.outputs.file }}"
COMMIT_SHA="${{ github.sha }}"
COMMIT_SHORT="${COMMIT_SHA:0:7}"
COMMIT_MSG=$(git log -1 --format='%s' "$COMMIT_SHA")
COMMIT_DATE=$(git log -1 --format='%aI' "$COMMIT_SHA")
# Read the benchmark results JSON and wrap with commit metadata
RESULTS_JSON=$(cat "$RESULTS_FILE")
jq -n \
--arg sha "$COMMIT_SHA" \
--arg short "$COMMIT_SHORT" \
--arg msg "$COMMIT_MSG" \
--arg date "$COMMIT_DATE" \
--argjson results "$RESULTS_JSON" \
'{
commitSha: $sha,
commitShort: $short,
commitMessage: $msg,
commitDate: $date,
results: $results
}' | curl -f -X POST \
-H "Authorization: Bearer $BENCHMARK_UPLOAD_TOKEN" \
-H "Content-Type: application/json" \
-d @- \
"https://benchmarks.vinext.workers.dev/api/upload"
# Post summary as a GitHub Actions summary
- name: Post summary
run: |
MD_FILE=$(ls -t benchmarks/results/bench-*.md | head -1)
cat "$MD_FILE" >> "$GITHUB_STEP_SUMMARY"