perf: add FPS, p95 frame time, and target thresholds to CI perf report (#10516)

## Summary

Enhances the CI performance report with explicit FPS metrics, percentile
frame times, and milestone target thresholds.

### Changes

**PerformanceHelper** (data collection):
- `measureFrameDurations()` now returns individual frame durations
instead of just the average, enabling percentile computation
- Computes `p95FrameDurationMs` from sorted frame durations
- Strips `allFrameDurationsMs` from serialized JSON to avoid bloating
artifacts

**perf-report.ts** (report rendering):
- **Headline summary** at top of report with key metrics per test
scenario
- **FPS display**: derives avg FPS and P5 FPS from frame duration
metrics
- **Target thresholds**: shows P5 FPS ≥ 52 target with / pass/fail
indicator
- **p95 frame time**: added as a tracked metric in the comparison table
- Metrics reordered to show frame time/FPS first (what people look for)

### Target

From the Nodes 2.0 Perf milestone: **P5 ≥ 52 FPS** on 245-node workflow
(equivalent to P95 frame time ≤ 19.2ms).

### Example headline output

```
> **vue-large-graph-pan**: 60 avg FPS · 58 P5 FPS  (target: ≥52) · 12ms TBT · 45.2 MB heap
> **canvas-zoom-sweep**: 45 avg FPS · 38 P5 FPS  (target: ≥52) · 85ms TBT · 52.1 MB heap
```

Follow-up to #10477 (merged).

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-10516-perf-add-FPS-p95-frame-time-and-target-thresholds-to-CI-perf-report-32e6d73d365081a2a2a6ceae7d6e9be5)
by [Unito](https://www.unito.io)

---------

Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
Christian Byrne
2026-03-28 23:29:19 -07:00
committed by GitHub
parent 391a6db056
commit e7c2cd04f4
3 changed files with 87 additions and 14 deletions

View File

@@ -30,6 +30,8 @@ export interface PerfMeasurement {
eventListeners: number
totalBlockingTimeMs: number
frameDurationMs: number
p95FrameDurationMs: number
allFrameDurationsMs: number[]
}
export class PerformanceHelper {
@@ -101,13 +103,13 @@ export class PerformanceHelper {
}
/**
* Measure average frame duration via rAF timing over a sample window.
* Returns average ms per frame (lower = better, 16.67 = 60fps).
* Measure individual frame durations via rAF timing over a sample window.
* Returns all per-frame durations so callers can compute avg, p95, etc.
*/
private async measureFrameDuration(sampleFrames = 10): Promise<number> {
private async measureFrameDurations(sampleFrames = 30): Promise<number[]> {
return this.page.evaluate((frames) => {
return new Promise<number>((resolve) => {
const timeout = setTimeout(() => resolve(0), 5000)
return new Promise<number[]>((resolve) => {
const timeout = setTimeout(() => resolve([]), 5000)
const timestamps: number[] = []
let count = 0
function tick(ts: number) {
@@ -118,11 +120,14 @@ export class PerformanceHelper {
} else {
clearTimeout(timeout)
if (timestamps.length < 2) {
resolve(0)
resolve([])
return
}
const total = timestamps[timestamps.length - 1] - timestamps[0]
resolve(total / (timestamps.length - 1))
const durations: number[] = []
for (let i = 1; i < timestamps.length; i++) {
durations.push(timestamps[i] - timestamps[i - 1])
}
resolve(durations)
}
}
requestAnimationFrame(tick)
@@ -177,11 +182,21 @@ export class PerformanceHelper {
return after[key] - before[key]
}
const [totalBlockingTimeMs, frameDurationMs] = await Promise.all([
const [totalBlockingTimeMs, allFrameDurationsMs] = await Promise.all([
this.collectTBT(),
this.measureFrameDuration()
this.measureFrameDurations()
])
const frameDurationMs =
allFrameDurationsMs.length > 0
? allFrameDurationsMs.reduce((a, b) => a + b, 0) /
allFrameDurationsMs.length
: 0
const sorted = [...allFrameDurationsMs].sort((a, b) => a - b)
const p95FrameDurationMs =
sorted.length > 0 ? sorted[Math.ceil(sorted.length * 0.95) - 1] : 0
return {
name,
durationMs: delta('Timestamp') * 1000,
@@ -197,7 +212,9 @@ export class PerformanceHelper {
scriptDurationMs: delta('ScriptDuration') * 1000,
eventListeners: delta('JSEventListeners'),
totalBlockingTimeMs,
frameDurationMs
frameDurationMs,
p95FrameDurationMs,
allFrameDurationsMs
}
}
}

View File

@@ -47,7 +47,8 @@ export function logMeasurement(
export function recordMeasurement(m: PerfMeasurement) {
mkdirSync(TEMP_DIR, { recursive: true })
const filename = `${m.name}-${Date.now()}.json`
writeFileSync(join(TEMP_DIR, filename), JSON.stringify(m))
const { allFrameDurationsMs: _, ...serializable } = m
writeFileSync(join(TEMP_DIR, filename), JSON.stringify(serializable))
}
export function writePerfReport(

View File

@@ -29,6 +29,8 @@ interface PerfMeasurement {
eventListeners: number
totalBlockingTimeMs: number
frameDurationMs: number
p95FrameDurationMs: number
allFrameDurationsMs?: number[]
}
interface PerfReport {
@@ -53,6 +55,7 @@ type MetricKey =
| 'eventListeners'
| 'totalBlockingTimeMs'
| 'frameDurationMs'
| 'p95FrameDurationMs'
| 'heapUsedBytes'
interface MetricDef {
@@ -64,6 +67,8 @@ interface MetricDef {
}
const REPORTED_METRICS: MetricDef[] = [
{ key: 'frameDurationMs', label: 'avg frame time', unit: 'ms' },
{ key: 'p95FrameDurationMs', label: 'p95 frame time', unit: 'ms' },
{ key: 'layoutDurationMs', label: 'layout duration', unit: 'ms' },
{
key: 'styleRecalcDurationMs',
@@ -80,12 +85,15 @@ const REPORTED_METRICS: MetricDef[] = [
{ key: 'taskDurationMs', label: 'task duration', unit: 'ms' },
{ key: 'scriptDurationMs', label: 'script duration', unit: 'ms' },
{ key: 'totalBlockingTimeMs', label: 'TBT', unit: 'ms' },
{ key: 'frameDurationMs', label: 'frame duration', unit: 'ms' },
{ key: 'heapUsedBytes', label: 'heap used', unit: 'bytes' },
{ key: 'domNodes', label: 'DOM nodes', unit: '', minAbsDelta: 5 },
{ key: 'eventListeners', label: 'event listeners', unit: '', minAbsDelta: 5 }
]
/** Target: P5 FPS ≥ 52 → P95 frame time ≤ 19.2ms */
const TARGET_P95_FRAME_MS = 19.2
const TARGET_P5_FPS = 52
function groupByName(
measurements: PerfMeasurement[]
): Map<string, PerfMeasurement[]> {
@@ -207,6 +215,46 @@ function formatBytes(bytes: number): string {
return `${(bytes / (1024 * 1024)).toFixed(1)} MB`
}
function frameTimeToFps(ms: number): number {
return ms > 0 ? 1000 / ms : 0
}
function renderHeadlineSummary(
prGroups: Map<string, PerfMeasurement[]>
): string[] {
const lines: string[] = []
const summaries: string[] = []
for (const [testName, prSamples] of prGroups) {
const avgFrame = medianMetric(prSamples, 'frameDurationMs')
const p95Frame = medianMetric(prSamples, 'p95FrameDurationMs')
const tbt = medianMetric(prSamples, 'totalBlockingTimeMs')
const heap = medianMetric(prSamples, 'heapUsedBytes')
const avgFps = avgFrame !== null ? frameTimeToFps(avgFrame) : null
const p5Fps = p95Frame !== null ? frameTimeToFps(p95Frame) : null
const parts: string[] = [`**${testName}**:`]
if (avgFps !== null) parts.push(`${avgFps.toFixed(1)} avg FPS`)
if (p5Fps !== null) {
const pass = p5Fps >= TARGET_P5_FPS
parts.push(
`${p5Fps.toFixed(1)} P5 FPS ${pass ? '✅' : '❌'} (target: ≥${TARGET_P5_FPS})`
)
}
if (tbt !== null) parts.push(`${tbt.toFixed(0)}ms TBT`)
if (heap !== null) parts.push(`${formatBytes(heap)} heap`)
if (parts.length > 1) summaries.push(parts.join(' · '))
}
if (summaries.length > 0) {
lines.push('> ' + summaries.join('\n> '), '')
}
return lines
}
function renderFullReport(
prGroups: Map<string, PerfMeasurement[]>,
baseline: PerfReport,
@@ -423,6 +471,7 @@ function main() {
const lines: string[] = []
lines.push('## ⚡ Performance Report\n')
lines.push(...renderHeadlineSummary(prGroups))
if (baseline && historical.length >= 2) {
lines.push(...renderFullReport(prGroups, baseline, historical))
@@ -432,9 +481,15 @@ function main() {
lines.push(...renderNoBaselineReport(prGroups))
}
const rawData = {
...current,
measurements: current.measurements.map(
({ allFrameDurationsMs: _, ...rest }) => rest
)
}
lines.push('\n<details><summary>Raw data</summary>\n')
lines.push('```json')
lines.push(JSON.stringify(current, null, 2))
lines.push(JSON.stringify(rawData, null, 2))
lines.push('```')
lines.push('\n</details>')