feat(queue): introduce queue notification banners and remove completion summary flow (#8740)

## Summary
Replace the old completion-summary overlay path with queue notification
banners for queueing/completed/failed lifecycle feedback.

## Key changes
- Added `QueueNotificationBanner`, `QueueNotificationBannerHost`,
stories, and tests.
- Added `useQueueNotificationBanners` to handle:
  - immediate `queuedPending` on `promptQueueing`
  - transition to `queued` on `promptQueued` (request-id aware)
  - completed/failed notification sequencing from finished batch history
  - timed notification queueing/dismissal
- Removed completion-summary implementation:
  - `useCompletionSummary`
  - `CompletionSummaryBanner`
  - `QueueOverlayEmpty`
- Simplified `QueueProgressOverlay` to `hidden | active | expanded`
states.
- Top menu behavior:
  - restored `QueueInlineProgressSummary` as separate UI
  - ordering is inline summary first, notification banner below
- notification banner remains under the top menu section (not teleported
to floating actionbar target)
- Kept established API-event signaling pattern
(`promptQueueing`/`promptQueued`) instead of introducing a separate bus.
- Updated tests for top-menu visibility/ordering and notification
behavior across QPOV2 enabled/disabled.

## Notes
- Completion notifications now support stacked thumbnails (cap: 3).
-
https://www.figma.com/design/LVilZgHGk5RwWOkVN6yCEK/Queue-Progress-Modal?node-id=3843-20314&m=dev

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-8740-feat-Queue-Notification-Toasts-3016d73d3650814c8a50d9567a40f44d)
by [Unito](https://www.unito.io)
This commit is contained in:
Benjamin Lu
2026-02-14 12:14:55 -08:00
committed by GitHub
parent 27da781029
commit fcb4341c98
20 changed files with 1267 additions and 884 deletions

View File

@@ -131,12 +131,16 @@ interface QueuePromptOptions {
/** Dictionary of Frontend-generated API calls */
interface FrontendApiCalls {
graphChanged: ComfyWorkflowJSON
promptQueued: { number: number; batchCount: number }
promptQueueing: { requestId: number; batchCount: number; number?: number }
promptQueued: { number: number; batchCount: number; requestId?: number }
graphCleared: never
reconnecting: never
reconnected: never
}
export type PromptQueueingEventPayload = FrontendApiCalls['promptQueueing']
export type PromptQueuedEventPayload = FrontendApiCalls['promptQueued']
/** Dictionary of calls originating from ComfyUI core */
interface BackendApiCalls {
progress: ProgressWsMessage

View File

@@ -146,8 +146,10 @@ export class ComfyApp {
private queueItems: {
number: number
batchCount: number
requestId: number
queueNodeIds?: NodeExecutionId[]
}[] = []
private nextQueueRequestId = 1
/**
* If the queue is currently being processed
*/
@@ -1379,7 +1381,12 @@ export class ComfyApp {
batchCount: number = 1,
queueNodeIds?: NodeExecutionId[]
): Promise<boolean> {
this.queueItems.push({ number, batchCount, queueNodeIds })
const requestId = this.nextQueueRequestId++
this.queueItems.push({ number, batchCount, queueNodeIds, requestId })
api.dispatchCustomEvent('promptQueueing', {
requestId,
batchCount
})
// Only have one action process the items so each one gets a unique seed correctly
if (this.processingQueue) {
@@ -1396,7 +1403,9 @@ export class ComfyApp {
try {
while (this.queueItems.length) {
const { number, batchCount, queueNodeIds } = this.queueItems.pop()!
const { number, batchCount, queueNodeIds, requestId } =
this.queueItems.pop()!
let queuedCount = 0
const previewMethod = useSettingStore().get(
'Comfy.Execution.PreviewMethod'
)
@@ -1462,6 +1471,8 @@ export class ComfyApp {
break
}
queuedCount++
// Allow widgets to run callbacks after a prompt has been queued
// e.g. random seed after every gen
executeWidgetsCallback(queuedNodes, 'afterQueued', {
@@ -1470,11 +1481,18 @@ export class ComfyApp {
this.canvas.draw(true, true)
await this.ui.queue.update()
}
if (queuedCount > 0) {
api.dispatchCustomEvent('promptQueued', {
number,
batchCount: queuedCount,
requestId
})
}
}
} finally {
this.processingQueue = false
}
api.dispatchCustomEvent('promptQueued', { number, batchCount })
return !executionStore.lastNodeErrors
}