mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-02-27 18:24:11 +00:00
## Summary Add a permanent, non-failing performance regression detection system using Chrome DevTools Protocol metrics, with automatic PR commenting. ## Changes - **What**: Performance testing infrastructure — `PerformanceHelper` fixture class using CDP `Performance.getMetrics` to collect `RecalcStyleCount`, `LayoutCount`, `LayoutDuration`, `TaskDuration`, `JSHeapUsedSize`. Adds `@perf` Playwright project (Chromium-only, single-threaded, 60s timeout), 4 baseline perf tests, CI workflow with sticky PR comment reporting, and `perf-report.js` script for generating markdown comparison tables. ## Review Focus - `PerformanceHelper` uses `page.context().newCDPSession(page)` — CDP is Chromium-only, so perf metrics are not collected on Firefox. This is intentional since CDP gives us browser-level style recalc/layout counts that `performance.mark/measure` cannot capture. - The CI workflow uses `continue-on-error: true` so perf tests never block merging. - Baseline comparison uses `dawidd6/action-download-artifact` to download metrics from the target branch, following the same pattern as `pr-size-report.yaml`. ## Stack This is the foundation PR for the Firefox performance fix stack: 1. **→ This PR: perf testing infrastructure** 2. `perf/fix-cursor-cache` — cursor style caching (depends on this) 3. `perf/fix-subgraph-svg` — SVG pre-rasterization (depends on this) 4. `perf/fix-clippath-raf` — RAF batching for clip-path (depends on this) PRs 2-4 are independent of each other. ┆Issue is synchronized with this [Notion page](https://www.notion.so/PR-9170-feat-add-performance-testing-infrastructure-with-CDP-metrics-3116d73d3650817cb43def6f8e9917f8) by [Unito](https://www.unito.io) --------- Co-authored-by: GitHub Action <action@github.com> Co-authored-by: Alexander Brown <drjkl@comfy.org>
50 lines
1.3 KiB
TypeScript
50 lines
1.3 KiB
TypeScript
import { mkdirSync, readdirSync, readFileSync, writeFileSync } from 'fs'
|
|
import { join } from 'path'
|
|
|
|
import type { PerfMeasurement } from '../fixtures/helpers/PerformanceHelper'
|
|
|
|
export interface PerfReport {
|
|
timestamp: string
|
|
gitSha: string
|
|
branch: string
|
|
measurements: PerfMeasurement[]
|
|
}
|
|
|
|
const TEMP_DIR = join('test-results', 'perf-temp')
|
|
|
|
export function recordMeasurement(m: PerfMeasurement) {
|
|
mkdirSync(TEMP_DIR, { recursive: true })
|
|
const filename = `${m.name}-${Date.now()}.json`
|
|
writeFileSync(join(TEMP_DIR, filename), JSON.stringify(m))
|
|
}
|
|
|
|
export function writePerfReport(
|
|
gitSha = process.env.GITHUB_SHA ?? 'local',
|
|
branch = process.env.GITHUB_HEAD_REF ?? 'local'
|
|
) {
|
|
if (!readdirSync('test-results', { withFileTypes: true }).length) return
|
|
|
|
let tempFiles: string[]
|
|
try {
|
|
tempFiles = readdirSync(TEMP_DIR).filter((f) => f.endsWith('.json'))
|
|
} catch {
|
|
return
|
|
}
|
|
if (tempFiles.length === 0) return
|
|
|
|
const measurements: PerfMeasurement[] = tempFiles.map((f) =>
|
|
JSON.parse(readFileSync(join(TEMP_DIR, f), 'utf-8'))
|
|
)
|
|
|
|
const report: PerfReport = {
|
|
timestamp: new Date().toISOString(),
|
|
gitSha,
|
|
branch,
|
|
measurements
|
|
}
|
|
writeFileSync(
|
|
join('test-results', 'perf-metrics.json'),
|
|
JSON.stringify(report, null, 2)
|
|
)
|
|
}
|