mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-02-08 17:10:07 +00:00
[feat] Move partial execution to the backend and make work with subgraphs (#4624)
This commit is contained in:
@@ -35,11 +35,13 @@ import type {
|
||||
NodeId
|
||||
} from '@/schemas/comfyWorkflowSchema'
|
||||
import type { ComfyNodeDef } from '@/schemas/nodeDefSchema'
|
||||
import type { NodeExecutionId } from '@/types/nodeIdentification'
|
||||
import { WorkflowTemplates } from '@/types/workflowTemplateTypes'
|
||||
|
||||
interface QueuePromptRequestBody {
|
||||
client_id: string
|
||||
prompt: ComfyApiWorkflow
|
||||
partial_execution_targets?: NodeExecutionId[]
|
||||
extra_data: {
|
||||
extra_pnginfo: {
|
||||
workflow: ComfyWorkflowJSON
|
||||
@@ -80,6 +82,18 @@ interface QueuePromptRequestBody {
|
||||
number?: number
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for queuePrompt method
|
||||
*/
|
||||
interface QueuePromptOptions {
|
||||
/**
|
||||
* Optional list of node execution IDs to execute (partial execution).
|
||||
* Each ID represents a node's position in nested subgraphs.
|
||||
* Format: Colon-separated path of node IDs (e.g., "123:456:789")
|
||||
*/
|
||||
partialExecutionTargets?: NodeExecutionId[]
|
||||
}
|
||||
|
||||
/** Dictionary of Frontend-generated API calls */
|
||||
interface FrontendApiCalls {
|
||||
graphChanged: ComfyWorkflowJSON
|
||||
@@ -610,18 +624,23 @@ export class ComfyApi extends EventTarget {
|
||||
/**
|
||||
* Queues a prompt to be executed
|
||||
* @param {number} number The index at which to queue the prompt, passing -1 will insert the prompt at the front of the queue
|
||||
* @param {object} prompt The prompt data to queue
|
||||
* @param {object} data The prompt data to queue
|
||||
* @param {QueuePromptOptions} options Optional execution options
|
||||
* @throws {PromptExecutionError} If the prompt fails to execute
|
||||
*/
|
||||
async queuePrompt(
|
||||
number: number,
|
||||
data: { output: ComfyApiWorkflow; workflow: ComfyWorkflowJSON }
|
||||
data: { output: ComfyApiWorkflow; workflow: ComfyWorkflowJSON },
|
||||
options?: QueuePromptOptions
|
||||
): Promise<PromptResponse> {
|
||||
const { output: prompt, workflow } = data
|
||||
|
||||
const body: QueuePromptRequestBody = {
|
||||
client_id: this.clientId ?? '', // TODO: Unify clientId access
|
||||
prompt,
|
||||
...(options?.partialExecutionTargets && {
|
||||
partial_execution_targets: options.partialExecutionTargets
|
||||
}),
|
||||
extra_data: {
|
||||
auth_token_comfy_org: this.authToken,
|
||||
api_key_comfy_org: this.apiKey,
|
||||
|
||||
@@ -59,6 +59,7 @@ import { useColorPaletteStore } from '@/stores/workspace/colorPaletteStore'
|
||||
import { useWorkspaceStore } from '@/stores/workspaceStore'
|
||||
import type { ComfyExtension, MissingNodeType } from '@/types/comfy'
|
||||
import { ExtensionManager } from '@/types/extensionTypes'
|
||||
import type { NodeExecutionId } from '@/types/nodeIdentification'
|
||||
import { ColorAdjustOptions, adjustColor } from '@/utils/colorUtil'
|
||||
import { graphToPrompt } from '@/utils/executionUtil'
|
||||
import {
|
||||
@@ -127,7 +128,7 @@ export class ComfyApp {
|
||||
#queueItems: {
|
||||
number: number
|
||||
batchCount: number
|
||||
queueNodeIds?: NodeId[]
|
||||
queueNodeIds?: NodeExecutionId[]
|
||||
}[] = []
|
||||
/**
|
||||
* If the queue is currently being processed
|
||||
@@ -1239,20 +1240,16 @@ export class ComfyApp {
|
||||
})
|
||||
}
|
||||
|
||||
async graphToPrompt(
|
||||
graph = this.graph,
|
||||
options: { queueNodeIds?: NodeId[] } = {}
|
||||
) {
|
||||
async graphToPrompt(graph = this.graph) {
|
||||
return graphToPrompt(graph, {
|
||||
sortNodes: useSettingStore().get('Comfy.Workflow.SortNodeIdOnSave'),
|
||||
queueNodeIds: options.queueNodeIds
|
||||
sortNodes: useSettingStore().get('Comfy.Workflow.SortNodeIdOnSave')
|
||||
})
|
||||
}
|
||||
|
||||
async queuePrompt(
|
||||
number: number,
|
||||
batchCount: number = 1,
|
||||
queueNodeIds?: NodeId[]
|
||||
queueNodeIds?: NodeExecutionId[]
|
||||
): Promise<boolean> {
|
||||
this.#queueItems.push({ number, batchCount, queueNodeIds })
|
||||
|
||||
@@ -1281,11 +1278,13 @@ export class ComfyApp {
|
||||
executeWidgetsCallback(subgraph.nodes, 'beforeQueued')
|
||||
}
|
||||
|
||||
const p = await this.graphToPrompt(this.graph, { queueNodeIds })
|
||||
const p = await this.graphToPrompt(this.graph)
|
||||
try {
|
||||
api.authToken = comfyOrgAuthToken
|
||||
api.apiKey = comfyOrgApiKey ?? undefined
|
||||
const res = await api.queuePrompt(number, p)
|
||||
const res = await api.queuePrompt(number, p, {
|
||||
partialExecutionTargets: queueNodeIds
|
||||
})
|
||||
delete api.authToken
|
||||
delete api.apiKey
|
||||
executionStore.lastNodeErrors = res.node_errors ?? null
|
||||
|
||||
Reference in New Issue
Block a user