chore: generate CPU profiles when running benchmarks (#18043)

This causes `pnpm bench` and `pnpm bench:compare` to generate
`.cpuprofile` files for each benchmark, which should in theory make it
easier to understand how different branches affect performance (and find
opportunities for optimisation). These files can be opened directly in
VS Code and other editors, or in
[profiler.firefox.com](https://profiler.firefox.com),
[speedscope.app](https://www.speedscope.app), Chrome's performance
devtools and so on.
pull/18045/head
Rich Harris 3 weeks ago committed by GitHub
parent edcbb0e640
commit 99b1467ba4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -107,7 +107,7 @@ jobs:
- uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v4
- uses: actions/setup-node@v6
with:
node-version: 18
node-version: 24
cache: pnpm
- run: pnpm install --frozen-lockfile
- run: pnpm bench

2
.gitignore vendored

@ -23,4 +23,6 @@ coverage
tmp
benchmarking/.profiles
benchmarking/compare/.results
benchmarking/compare/.profiles

@ -17,6 +17,10 @@ fs.mkdirSync(outdir);
const branches = [];
let PROFILE_DIR = path.resolve(filename, '../.profiles');
if (fs.existsSync(PROFILE_DIR)) fs.rmSync(PROFILE_DIR, { recursive: true });
fs.mkdirSync(PROFILE_DIR, { recursive: true });
for (const arg of process.argv.slice(2)) {
if (arg.startsWith('--')) continue;
if (arg === filename) continue;
@ -44,7 +48,12 @@ for (const branch of branches) {
execSync(`git checkout ${branch}`);
await new Promise((fulfil, reject) => {
const child = fork(runner);
const child = fork(runner, [], {
env: {
...process.env,
BENCH_PROFILE_DIR: `${PROFILE_DIR}/${safe(branch)}`
}
});
child.on('message', (results) => {
fs.writeFileSync(`${outdir}/${branch}.json`, JSON.stringify(results, null, ' '));
@ -57,6 +66,10 @@ for (const branch of branches) {
console.groupEnd();
}
if (PROFILE_DIR !== null) {
console.log(`\nCPU profiles written to ${PROFILE_DIR}`);
}
const results = branches.map((branch) => {
return JSON.parse(fs.readFileSync(`${outdir}/${branch}.json`, 'utf-8'));
});
@ -101,3 +114,7 @@ for (let i = 0; i < results[0].length; i += 1) {
function char(i) {
return String.fromCharCode(97 + i);
}
function safe(name) {
return name.replace(/[^a-z0-9._-]+/gi, '_');
}

@ -1,12 +1,17 @@
import { reactivity_benchmarks } from '../benchmarks/reactivity/index.js';
import { with_cpu_profile } from '../utils.js';
const results = [];
const PROFILE_DIR = process.env.BENCH_PROFILE_DIR;
for (let i = 0; i < reactivity_benchmarks.length; i += 1) {
const benchmark = reactivity_benchmarks[i];
process.stderr.write(`Running ${i + 1}/${reactivity_benchmarks.length} ${benchmark.label} `);
results.push({ benchmark: benchmark.label, ...(await benchmark.fn()) });
results.push({
benchmark: benchmark.label,
...(await with_cpu_profile(PROFILE_DIR, benchmark.label, () => benchmark.fn()))
});
process.stderr.write('\x1b[2K\r');
}

@ -1,10 +1,13 @@
import * as $ from '../packages/svelte/src/internal/client/index.js';
import { reactivity_benchmarks } from './benchmarks/reactivity/index.js';
import { ssr_benchmarks } from './benchmarks/ssr/index.js';
import { with_cpu_profile } from './utils.js';
// e.g. `pnpm bench kairo` to only run the kairo benchmarks
const filters = process.argv.slice(2);
const PROFILE_DIR = './benchmarking/.profiles';
const suites = [
{
benchmarks: reactivity_benchmarks.filter(
@ -50,7 +53,7 @@ try {
console.log('='.repeat(TOTAL_WIDTH));
for (const benchmark of benchmarks) {
const results = await benchmark.fn();
const results = await with_cpu_profile(PROFILE_DIR, benchmark.label, () => benchmark.fn());
console.log(
pad_right(benchmark.label, COLUMN_WIDTHS[0]) +
pad_left(results.time.toFixed(2), COLUMN_WIDTHS[1]) +
@ -70,6 +73,10 @@ try {
);
console.log('='.repeat(TOTAL_WIDTH));
}
if (PROFILE_DIR !== null) {
console.log(`\nCPU profiles written to ${PROFILE_DIR}`);
}
} catch (e) {
// eslint-disable-next-line no-console
console.error(e);

@ -1,4 +1,7 @@
import { performance, PerformanceObserver } from 'node:perf_hooks';
import fs from 'node:fs';
import path from 'node:path';
import inspector from 'node:inspector/promises';
import v8 from 'v8-natives';
// Credit to https://github.com/milomg/js-reactivity-benchmark for the logic for timing + GC tracking.
@ -41,3 +44,37 @@ export async function fastest_test(times, fn) {
return results.reduce((a, b) => (a.time < b.time ? a : b));
}
function safe(name) {
return name.replace(/[^a-z0-9._-]+/gi, '_');
}
/**
* @template T
* @param {string | null} profile_dir
* @param {string} profile_name
* @param {() => T | Promise<T>} fn
* @returns {Promise<T>}
*/
export async function with_cpu_profile(profile_dir, profile_name, fn) {
if (profile_dir === null) {
return await fn();
}
fs.mkdirSync(profile_dir, { recursive: true });
const session = new inspector.Session();
session.connect();
await session.post('Profiler.enable');
await session.post('Profiler.start');
try {
return await fn();
} finally {
const { profile } = /** @type {{ profile: object }} */ (await session.post('Profiler.stop'));
const file = path.join(profile_dir, `${safe(profile_name)}.cpuprofile`);
fs.writeFileSync(file, JSON.stringify(profile));
session.disconnect();
}
}

Loading…
Cancel
Save