mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-04-20 06:20:11 +00:00
feat: expand CDP perf metrics — add DOM nodes, script duration, event listeners (#9887)
## Summary Expands the performance testing infrastructure to collect 4 additional CDP metrics that are already returned by `Performance.getMetrics` but were not being read. This is a zero-cost expansion — no additional CDP calls, just reading more fields from the existing response. ## New Metrics | Metric | CDP Source | What It Detects | |---|---|---| | `domNodes` | `Nodes` | DOM node count delta — widget DOM leaks during node create/destroy | | `jsHeapTotalBytes` | `JSHeapTotalSize` | Total heap delta — combined with `heapDeltaBytes` shows GC pressure | | `scriptDurationMs` | `ScriptDuration` | JS execution time vs total task time — script vs rendering balance | | `eventListeners` | `JSEventListeners` | Listener count delta — detects listener accumulation across lifecycle | ## Changes ### `browser_tests/fixtures/helpers/PerformanceHelper.ts` - Added 4 fields to `PerfSnapshot` interface - Added 4 fields to `PerfMeasurement` interface - Wired through `getSnapshot()` and `stopMeasuring()` ### `scripts/perf-report.ts` - Added 4 fields to `PerfMeasurement` interface - Expanded `MetricKey` type and `REPORTED_METRICS` array with 3 new reported metrics (`domNodes`, `scriptDurationMs`, `eventListeners`) - `jsHeapTotalBytes` is collected but not in `REPORTED_METRICS` — it's used alongside `heapDeltaBytes` for GC pressure ratio analysis ## Why These 4 From a gap analysis of all ~30 CDP metrics, these were identified as highest priority for ComfyUI: - **`Nodes`** (P0): ComfyUI dynamically creates/destroys widget DOM. DOM bloat from leaked widgets is a key performance risk, especially for Vue Nodes 2.0. - **`ScriptDuration`** (P1): Separates JS execution from layout/paint. Reveals whether perf issues are script-heavy or rendering-heavy. - **`JSEventListeners`** (P1): Widget lifecycle can leak listeners across node add/remove cycles. - **`JSHeapTotalSize`** (P1): With `JSHeapUsedSize`, the ratio shows GC fragmentation pressure. ## Backward Compatibility The `PerfMeasurement` interface is extended (not changed). Old baseline `perf-metrics.json` files without these fields will have `undefined` values, which the report script handles gracefully (shows `—` for missing data). ┆Issue is synchronized with this [Notion page](https://www.notion.so/PR-9887-feat-expand-CDP-perf-metrics-add-DOM-nodes-script-duration-event-listeners-3226d73d3650818abea1d4a441667c38) by [Unito](https://www.unito.io) --------- Co-authored-by: GitHub Action <action@github.com> Co-authored-by: Alexander Brown <drjkl@comfy.org>
This commit is contained in:
@@ -8,6 +8,10 @@ interface PerfSnapshot {
|
||||
TaskDuration: number
|
||||
JSHeapUsedSize: number
|
||||
Timestamp: number
|
||||
Nodes: number
|
||||
JSHeapTotalSize: number
|
||||
ScriptDuration: number
|
||||
JSEventListeners: number
|
||||
}
|
||||
|
||||
export interface PerfMeasurement {
|
||||
@@ -19,6 +23,10 @@ export interface PerfMeasurement {
|
||||
layoutDurationMs: number
|
||||
taskDurationMs: number
|
||||
heapDeltaBytes: number
|
||||
domNodes: number
|
||||
jsHeapTotalBytes: number
|
||||
scriptDurationMs: number
|
||||
eventListeners: number
|
||||
}
|
||||
|
||||
export class PerformanceHelper {
|
||||
@@ -59,7 +67,11 @@ export class PerformanceHelper {
|
||||
LayoutDuration: get('LayoutDuration'),
|
||||
TaskDuration: get('TaskDuration'),
|
||||
JSHeapUsedSize: get('JSHeapUsedSize'),
|
||||
Timestamp: get('Timestamp')
|
||||
Timestamp: get('Timestamp'),
|
||||
Nodes: get('Nodes'),
|
||||
JSHeapTotalSize: get('JSHeapTotalSize'),
|
||||
ScriptDuration: get('ScriptDuration'),
|
||||
JSEventListeners: get('JSEventListeners')
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,7 +102,11 @@ export class PerformanceHelper {
|
||||
layouts: delta('LayoutCount'),
|
||||
layoutDurationMs: delta('LayoutDuration') * 1000,
|
||||
taskDurationMs: delta('TaskDuration') * 1000,
|
||||
heapDeltaBytes: delta('JSHeapUsedSize')
|
||||
heapDeltaBytes: delta('JSHeapUsedSize'),
|
||||
domNodes: delta('Nodes'),
|
||||
jsHeapTotalBytes: delta('JSHeapTotalSize'),
|
||||
scriptDurationMs: delta('ScriptDuration') * 1000,
|
||||
eventListeners: delta('JSEventListeners')
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,10 @@ interface PerfMeasurement {
|
||||
layoutDurationMs: number
|
||||
taskDurationMs: number
|
||||
heapDeltaBytes: number
|
||||
domNodes: number
|
||||
jsHeapTotalBytes: number
|
||||
scriptDurationMs: number
|
||||
eventListeners: number
|
||||
}
|
||||
|
||||
interface PerfReport {
|
||||
@@ -32,11 +36,20 @@ const CURRENT_PATH = 'test-results/perf-metrics.json'
|
||||
const BASELINE_PATH = 'temp/perf-baseline/perf-metrics.json'
|
||||
const HISTORY_DIR = 'temp/perf-history'
|
||||
|
||||
type MetricKey = 'styleRecalcs' | 'layouts' | 'taskDurationMs'
|
||||
type MetricKey =
|
||||
| 'styleRecalcs'
|
||||
| 'layouts'
|
||||
| 'taskDurationMs'
|
||||
| 'domNodes'
|
||||
| 'scriptDurationMs'
|
||||
| 'eventListeners'
|
||||
const REPORTED_METRICS: { key: MetricKey; label: string; unit: string }[] = [
|
||||
{ key: 'styleRecalcs', label: 'style recalcs', unit: '' },
|
||||
{ key: 'layouts', label: 'layouts', unit: '' },
|
||||
{ key: 'taskDurationMs', label: 'task duration', unit: 'ms' }
|
||||
{ key: 'taskDurationMs', label: 'task duration', unit: 'ms' },
|
||||
{ key: 'domNodes', label: 'DOM nodes', unit: '' },
|
||||
{ key: 'scriptDurationMs', label: 'script duration', unit: 'ms' },
|
||||
{ key: 'eventListeners', label: 'event listeners', unit: '' }
|
||||
]
|
||||
|
||||
function groupByName(
|
||||
@@ -76,9 +89,8 @@ function getHistoricalStats(
|
||||
const group = groupByName(r.measurements)
|
||||
const samples = group.get(testName)
|
||||
if (samples) {
|
||||
const mean =
|
||||
samples.reduce((sum, s) => sum + s[metric], 0) / samples.length
|
||||
values.push(mean)
|
||||
const mean = meanMetric(samples, metric)
|
||||
if (mean !== null) values.push(mean)
|
||||
}
|
||||
}
|
||||
return computeStats(values)
|
||||
@@ -98,8 +110,20 @@ function formatDelta(pct: number | null): string {
|
||||
return `${sign}${pct.toFixed(0)}%`
|
||||
}
|
||||
|
||||
function meanMetric(samples: PerfMeasurement[], key: MetricKey): number {
|
||||
return samples.reduce((sum, s) => sum + s[key], 0) / samples.length
|
||||
function getMetricValue(
|
||||
sample: PerfMeasurement,
|
||||
key: MetricKey
|
||||
): number | null {
|
||||
const value = sample[key]
|
||||
return Number.isFinite(value) ? value : null
|
||||
}
|
||||
|
||||
function meanMetric(samples: PerfMeasurement[], key: MetricKey): number | null {
|
||||
const values = samples
|
||||
.map((s) => getMetricValue(s, key))
|
||||
.filter((v): v is number => v !== null)
|
||||
if (values.length === 0) return null
|
||||
return values.reduce((sum, v) => sum + v, 0) / values.length
|
||||
}
|
||||
|
||||
function formatBytes(bytes: number): string {
|
||||
@@ -127,8 +151,8 @@ function renderFullReport(
|
||||
const baseSamples = baselineGroups.get(testName)
|
||||
|
||||
for (const { key, label, unit } of REPORTED_METRICS) {
|
||||
const prValues = prSamples.map((s) => s[key])
|
||||
const prMean = prValues.reduce((a, b) => a + b, 0) / prValues.length
|
||||
const prMean = meanMetric(prSamples, key)
|
||||
if (prMean === null) continue
|
||||
const histStats = getHistoricalStats(historical, testName, key)
|
||||
const cv = computeCV(histStats)
|
||||
|
||||
@@ -140,6 +164,12 @@ function renderFullReport(
|
||||
}
|
||||
|
||||
const baseVal = meanMetric(baseSamples, key)
|
||||
if (baseVal === null) {
|
||||
allRows.push(
|
||||
`| ${testName}: ${label} | — | ${formatValue(prMean, unit)} | new | — |`
|
||||
)
|
||||
continue
|
||||
}
|
||||
const deltaPct =
|
||||
baseVal === 0
|
||||
? prMean === 0
|
||||
@@ -218,8 +248,8 @@ function renderColdStartReport(
|
||||
const baseSamples = baselineGroups.get(testName)
|
||||
|
||||
for (const { key, label, unit } of REPORTED_METRICS) {
|
||||
const prValues = prSamples.map((s) => s[key])
|
||||
const prMean = prValues.reduce((a, b) => a + b, 0) / prValues.length
|
||||
const prMean = meanMetric(prSamples, key)
|
||||
if (prMean === null) continue
|
||||
|
||||
if (!baseSamples?.length) {
|
||||
lines.push(
|
||||
@@ -229,6 +259,12 @@ function renderColdStartReport(
|
||||
}
|
||||
|
||||
const baseVal = meanMetric(baseSamples, key)
|
||||
if (baseVal === null) {
|
||||
lines.push(
|
||||
`| ${testName}: ${label} | — | ${formatValue(prMean, unit)} | new |`
|
||||
)
|
||||
continue
|
||||
}
|
||||
const deltaPct =
|
||||
baseVal === 0
|
||||
? prMean === 0
|
||||
@@ -254,18 +290,14 @@ function renderNoBaselineReport(
|
||||
'|--------|-------|'
|
||||
)
|
||||
for (const [testName, prSamples] of prGroups) {
|
||||
const prMean = (key: MetricKey) =>
|
||||
prSamples.reduce((sum, s) => sum + s[key], 0) / prSamples.length
|
||||
|
||||
lines.push(
|
||||
`| ${testName}: style recalcs | ${prMean('styleRecalcs').toFixed(0)} |`
|
||||
)
|
||||
lines.push(`| ${testName}: layouts | ${prMean('layouts').toFixed(0)} |`)
|
||||
lines.push(
|
||||
`| ${testName}: task duration | ${prMean('taskDurationMs').toFixed(0)}ms |`
|
||||
)
|
||||
for (const { key, label, unit } of REPORTED_METRICS) {
|
||||
const prMean = meanMetric(prSamples, key)
|
||||
if (prMean === null) continue
|
||||
lines.push(`| ${testName}: ${label} | ${formatValue(prMean, unit)} |`)
|
||||
}
|
||||
const heapMean =
|
||||
prSamples.reduce((sum, s) => sum + s.heapDeltaBytes, 0) / prSamples.length
|
||||
prSamples.reduce((sum, s) => sum + (s.heapDeltaBytes ?? 0), 0) /
|
||||
prSamples.length
|
||||
lines.push(`| ${testName}: heap delta | ${formatBytes(heapMean)} |`)
|
||||
}
|
||||
return lines
|
||||
|
||||
Reference in New Issue
Block a user