refactor: rename internal promptId/PromptId to jobId/JobId (#8730)

## Summary

Rename all internal TypeScript usage of legacy `promptId`/`PromptId`
naming to `jobId`/`JobId` across ~38 files for consistency with the
domain model.

## Changes

- **What**: Renamed internal variable names, type aliases, function
names, class getters, interface fields, and comments from
`promptId`/`PromptId` to `jobId`/`JobId`. Wire-protocol field names
(`prompt_id` in Zod schemas and `e.detail.prompt_id` accesses) are
intentionally preserved since they match the backend API contract.

## Review Focus

- All changes are pure renames with no behavioral changes
- Wire-protocol fields (`prompt_id`) are deliberately unchanged to
maintain backend compatibility
- Test fixtures updated to use consistent `job-id` naming

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-8730-refactor-rename-internal-promptId-PromptId-to-jobId-JobId-3016d73d3650813ca40ce337f7c5271a)
by [Unito](https://www.unito.io)
This commit is contained in:
Christian Byrne
2026-02-20 02:10:53 -08:00
committed by GitHub
parent 541ad387b9
commit 473713cf02
39 changed files with 455 additions and 402 deletions

View File

@@ -339,7 +339,7 @@ describe('TopMenuSection', () => {
const pinia = createTestingPinia({ createSpy: vi.fn })
configureSettings(pinia, true)
const executionStore = useExecutionStore(pinia)
executionStore.activePromptId = 'prompt-1'
executionStore.activeJobId = 'job-1'
const ComfyActionbarStub = createComfyActionbarStub(actionbarTarget)
@@ -429,7 +429,7 @@ describe('TopMenuSection', () => {
const pinia = createTestingPinia({ createSpy: vi.fn })
configureSettings(pinia, true)
const executionStore = useExecutionStore(pinia)
executionStore.activePromptId = 'prompt-1'
executionStore.activeJobId = 'job-1'
const ComfyActionbarStub = createComfyActionbarStub(actionbarTarget)

View File

@@ -290,12 +290,12 @@ const showQueueContextMenu = (event: MouseEvent) => {
}
const handleClearQueue = async () => {
const pendingPromptIds = queueStore.pendingTasks
.map((task) => task.promptId)
const pendingJobIds = queueStore.pendingTasks
.map((task) => task.jobId)
.filter((id): id is string => typeof id === 'string' && id.length > 0)
await commandStore.execute('Comfy.ClearPendingTasks')
executionStore.clearInitializationByPromptIds(pendingPromptIds)
executionStore.clearInitializationByJobIds(pendingJobIds)
}
const openCustomNodeManager = async () => {

View File

@@ -204,22 +204,22 @@ const {
const displayedJobGroups = computed(() => groupedJobItems.value)
const onCancelItem = wrapWithErrorHandlingAsync(async (item: JobListItem) => {
const promptId = item.taskRef?.promptId
if (!promptId) return
const jobId = item.taskRef?.jobId
if (!jobId) return
if (item.state === 'running' || item.state === 'initialization') {
// Running/initializing jobs: interrupt execution
// Cloud backend uses deleteItem, local uses interrupt
if (isCloud) {
await api.deleteItem('queue', promptId)
await api.deleteItem('queue', jobId)
} else {
await api.interrupt(promptId)
await api.interrupt(jobId)
}
executionStore.clearInitializationByPromptId(promptId)
executionStore.clearInitializationByJobId(jobId)
await queueStore.update()
} else if (item.state === 'pending') {
// Pending jobs: remove from queue
await api.deleteItem('queue', promptId)
await api.deleteItem('queue', jobId)
await queueStore.update()
}
})
@@ -249,11 +249,11 @@ const openAssetsSidebar = () => {
const focusAssetInSidebar = async (item: JobListItem) => {
const task = item.taskRef
const promptId = task?.promptId
const jobId = task?.jobId
const preview = task?.previewOutput
if (!promptId || !preview) return
if (!jobId || !preview) return
const assetId = String(promptId)
const assetId = String(jobId)
openAssetsSidebar()
await nextTick()
await assetsStore.updateHistory()
@@ -275,37 +275,37 @@ const inspectJobAsset = wrapWithErrorHandlingAsync(
)
const cancelQueuedWorkflows = wrapWithErrorHandlingAsync(async () => {
// Capture pending promptIds before clearing
const pendingPromptIds = queueStore.pendingTasks
.map((task) => task.promptId)
// Capture pending jobIds before clearing
const pendingJobIds = queueStore.pendingTasks
.map((task) => task.jobId)
.filter((id): id is string => typeof id === 'string' && id.length > 0)
await commandStore.execute('Comfy.ClearPendingTasks')
// Clear initialization state for removed prompts
executionStore.clearInitializationByPromptIds(pendingPromptIds)
// Clear initialization state for removed jobs
executionStore.clearInitializationByJobIds(pendingJobIds)
})
const interruptAll = wrapWithErrorHandlingAsync(async () => {
const tasks = queueStore.runningTasks
const promptIds = tasks
.map((task) => task.promptId)
const jobIds = tasks
.map((task) => task.jobId)
.filter((id): id is string => typeof id === 'string' && id.length > 0)
if (!promptIds.length) return
if (!jobIds.length) return
// Cloud backend supports cancelling specific jobs via /queue delete,
// while /interrupt always targets the "first" job. Use the targeted API
// on cloud to ensure we cancel the workflow the user clicked.
if (isCloud) {
await Promise.all(promptIds.map((id) => api.deleteItem('queue', id)))
executionStore.clearInitializationByPromptIds(promptIds)
await Promise.all(jobIds.map((id) => api.deleteItem('queue', id)))
executionStore.clearInitializationByJobIds(jobIds)
await queueStore.update()
return
}
await Promise.all(promptIds.map((id) => api.interrupt(id)))
executionStore.clearInitializationByPromptIds(promptIds)
await Promise.all(jobIds.map((id) => api.interrupt(id)))
executionStore.clearInitializationByJobIds(jobIds)
await queueStore.update()
})

View File

@@ -37,7 +37,7 @@ function resetStores() {
queue.runningTasks = []
queue.historyTasks = []
exec.nodeProgressStatesByPrompt = {}
exec.nodeProgressStatesByJob = {}
}
function makeTask(
@@ -145,10 +145,10 @@ export const Queued: Story = {
makePendingTask('job-older-2', 101, Date.now() - 30_000)
)
// Queued at (in metadata on prompt[4])
// Queued at (in metadata on job tuple)
// One running workflow
exec.nodeProgressStatesByPrompt = {
exec.nodeProgressStatesByJob = {
p1: {
'1': {
value: 1,
@@ -198,7 +198,7 @@ export const QueuedParallel: Story = {
]
// Two parallel workflows running
exec.nodeProgressStatesByPrompt = {
exec.nodeProgressStatesByJob = {
p1: {
'1': {
value: 1,
@@ -248,7 +248,7 @@ export const Running: Story = {
makeHistoryTask('hist-r3', 252, 60, true)
]
exec.nodeProgressStatesByPrompt = {
exec.nodeProgressStatesByJob = {
p1: {
'1': {
value: 5,
@@ -293,7 +293,7 @@ export const QueuedZeroAheadSingleRunning: Story = {
queue.runningTasks = [makeRunningTaskWithStart('running-1', 505, 20)]
exec.nodeProgressStatesByPrompt = {
exec.nodeProgressStatesByJob = {
p1: {
'1': {
value: 1,
@@ -341,7 +341,7 @@ export const QueuedZeroAheadMultiRunning: Story = {
makeRunningTaskWithStart('running-b', 507, 10)
]
exec.nodeProgressStatesByPrompt = {
exec.nodeProgressStatesByJob = {
p1: {
'1': {
value: 2,

View File

@@ -139,7 +139,7 @@ const copyJobId = () => void copyToClipboard(jobIdValue.value)
const taskForJob = computed(() => {
const pid = props.jobId
const findIn = (arr: TaskItemImpl[]) =>
arr.find((t) => String(t.promptId ?? '') === String(pid))
arr.find((t) => String(t.jobId ?? '') === String(pid))
return (
findIn(queueStore.pendingTasks) ||
findIn(queueStore.runningTasks) ||
@@ -151,9 +151,7 @@ const taskForJob = computed(() => {
const jobState = computed(() => {
const task = taskForJob.value
if (!task) return null
const isInitializing = executionStore.isPromptInitializing(
String(task?.promptId)
)
const isInitializing = executionStore.isJobInitializing(String(task?.jobId))
return jobStateFromTask(task, isInitializing)
})

View File

@@ -8,13 +8,13 @@ import { useJobErrorReporting } from '@/components/queue/job/useJobErrorReportin
import type { ExecutionError } from '@/platform/remote/comfyui/jobs/jobTypes'
const createTaskWithError = (
promptId: string,
jobId: string,
errorMessage?: string,
executionError?: ExecutionError,
createTime?: number
): TaskItemImpl =>
({
promptId,
jobId,
errorMessage,
executionError,
createTime: createTime ?? Date.now()

View File

@@ -80,7 +80,7 @@ const sampleAssets: AssetItem[] = [
size: 1887437,
tags: [],
user_metadata: {
promptId: 'job-running-1',
jobId: 'job-running-1',
nodeId: 12,
executionTimeInSeconds: 1.84
}

View File

@@ -9,7 +9,7 @@
>
<div class="flex items-center gap-2">
<span class="font-bold">{{ $t('assetBrowser.jobId') }}:</span>
<span class="text-sm">{{ folderPromptId?.substring(0, 8) }}</span>
<span class="text-sm">{{ folderJobId?.substring(0, 8) }}</span>
<button
class="m-0 cursor-pointer border-0 bg-transparent p-0 outline-0"
role="button"
@@ -273,10 +273,10 @@ const executionStore = useExecutionStore()
const settingStore = useSettingStore()
const activeTab = ref<'input' | 'output'>('output')
const folderPromptId = ref<string | null>(null)
const folderJobId = ref<string | null>(null)
const folderExecutionTime = ref<number | undefined>(undefined)
const expectedFolderCount = ref(0)
const isInFolderView = computed(() => folderPromptId.value !== null)
const isInFolderView = computed(() => folderJobId.value !== null)
const viewMode = useStorage<'list' | 'grid'>(
'Comfy.Assets.Sidebar.ViewMode',
'grid'
@@ -559,13 +559,13 @@ const handleBulkDelete = async (assets: AssetItem[]) => {
}
const handleClearQueue = async () => {
const pendingPromptIds = queueStore.pendingTasks
.map((task) => task.promptId)
const pendingJobIds = queueStore.pendingTasks
.map((task) => task.jobId)
.filter((id): id is string => typeof id === 'string' && id.length > 0)
await commandStore.execute('Comfy.ClearPendingTasks')
executionStore.clearInitializationByPromptIds(pendingPromptIds)
executionStore.clearInitializationByJobIds(pendingJobIds)
}
const handleBulkAddToWorkflow = async (assets: AssetItem[]) => {
@@ -628,14 +628,14 @@ const enterFolderView = async (asset: AssetItem) => {
return
}
const { promptId, executionTimeInSeconds } = metadata
const { jobId, executionTimeInSeconds } = metadata
if (!promptId) {
if (!jobId) {
console.warn('Missing required folder view data')
return
}
folderPromptId.value = promptId
folderJobId.value = jobId
folderExecutionTime.value = executionTimeInSeconds
expectedFolderCount.value = metadata.outputCount ?? 0
@@ -653,7 +653,7 @@ const enterFolderView = async (asset: AssetItem) => {
}
const exitFolderView = () => {
folderPromptId.value = null
folderJobId.value = null
folderExecutionTime.value = undefined
expectedFolderCount.value = 0
folderAssets.value = []
@@ -679,9 +679,9 @@ const handleEmptySpaceClick = () => {
}
const copyJobId = async () => {
if (folderPromptId.value) {
if (folderJobId.value) {
try {
await navigator.clipboard.writeText(folderPromptId.value)
await navigator.clipboard.writeText(folderJobId.value)
toast.add({
severity: 'success',
summary: t('mediaAsset.jobIdToast.copied'),

View File

@@ -6,10 +6,11 @@ import type { Ref } from 'vue'
import { useJobList } from '@/composables/queue/useJobList'
import type { JobState } from '@/types/queue'
import type { BuildJobDisplayCtx } from '@/utils/queueDisplay'
import { buildJobDisplay } from '@/utils/queueDisplay'
import type { TaskItemImpl } from '@/stores/queueStore'
type TestTask = {
promptId: string
jobId: string
queueIndex: number
mockState: JobState
executionTime?: number
@@ -69,7 +70,7 @@ vi.mock('@/composables/queue/useQueueProgress', () => ({
vi.mock('@/utils/queueDisplay', () => ({
buildJobDisplay: vi.fn(
(task: TaskItemImpl, state: JobState, options: BuildJobDisplayCtx) => ({
primary: `Job ${task.promptId}`,
primary: `Job ${task.jobId}`,
secondary: `${state} meta`,
iconName: `${state}-icon`,
iconImageUrl: undefined,
@@ -108,21 +109,21 @@ vi.mock('@/stores/queueStore', () => ({
}))
let executionStoreMock: {
activePromptId: string | null
activeJobId: string | null
executingNode: null | { title?: string; type?: string }
isPromptInitializing: (promptId?: string | number) => boolean
isJobInitializing: (jobId?: string | number) => boolean
}
let isPromptInitializingMock: (promptId?: string | number) => boolean
let isJobInitializingMock: (jobId?: string | number) => boolean
const ensureExecutionStore = () => {
if (!isPromptInitializingMock) {
isPromptInitializingMock = vi.fn(() => false)
if (!isJobInitializingMock) {
isJobInitializingMock = vi.fn(() => false)
}
if (!executionStoreMock) {
executionStoreMock = reactive({
activePromptId: null as string | null,
activeJobId: null as string | null,
executingNode: null as null | { title?: string; type?: string },
isPromptInitializing: (promptId?: string | number) =>
isPromptInitializingMock(promptId)
isJobInitializing: (jobId?: string | number) =>
isJobInitializingMock(jobId)
})
}
return executionStoreMock
@@ -172,8 +173,7 @@ vi.mock('@/platform/workflow/management/stores/workflowStore', () => ({
const createTask = (
overrides: Partial<TestTask> & { mockState?: JobState } = {}
): TestTask => ({
promptId:
overrides.promptId ?? `task-${Math.random().toString(36).slice(2, 7)}`,
jobId: overrides.jobId ?? `task-${Math.random().toString(36).slice(2, 7)}`,
queueIndex: overrides.queueIndex ?? 0,
mockState: overrides.mockState ?? 'pending',
executionTime: overrides.executionTime,
@@ -201,7 +201,7 @@ const resetStores = () => {
queueStore.historyTasks = []
const executionStore = ensureExecutionStore()
executionStore.activePromptId = null
executionStore.activeJobId = null
executionStore.executingNode = null
const jobPreviewStore = ensureJobPreviewStore()
@@ -219,9 +219,9 @@ const resetStores = () => {
localeRef.value = 'en-US'
tMock.mockClear()
if (isPromptInitializingMock) {
vi.mocked(isPromptInitializingMock).mockReset()
vi.mocked(isPromptInitializingMock).mockReturnValue(false)
if (isJobInitializingMock) {
vi.mocked(isJobInitializingMock).mockReset()
vi.mocked(isJobInitializingMock).mockReturnValue(false)
}
}
@@ -255,10 +255,82 @@ describe('useJobList', () => {
return api!
}
it('tracks recently added pending jobs and clears the hint after expiry', async () => {
vi.useFakeTimers()
queueStoreMock.pendingTasks = [
createTask({ jobId: '1', queueIndex: 1, mockState: 'pending' })
]
const { jobItems } = initComposable()
await flush()
jobItems.value
expect(buildJobDisplay).toHaveBeenCalledWith(
expect.anything(),
'pending',
expect.objectContaining({ showAddedHint: true })
)
vi.mocked(buildJobDisplay).mockClear()
await vi.advanceTimersByTimeAsync(3000)
await flush()
jobItems.value
expect(buildJobDisplay).toHaveBeenCalledWith(
expect.anything(),
'pending',
expect.objectContaining({ showAddedHint: false })
)
})
it('removes pending hint immediately when the task leaves the queue', async () => {
vi.useFakeTimers()
const taskId = '2'
queueStoreMock.pendingTasks = [
createTask({ jobId: taskId, queueIndex: 1, mockState: 'pending' })
]
const { jobItems } = initComposable()
await flush()
jobItems.value
queueStoreMock.pendingTasks = []
await flush()
expect(vi.getTimerCount()).toBe(0)
vi.mocked(buildJobDisplay).mockClear()
queueStoreMock.pendingTasks = [
createTask({ jobId: taskId, queueIndex: 2, mockState: 'pending' })
]
await flush()
jobItems.value
expect(buildJobDisplay).toHaveBeenCalledWith(
expect.anything(),
'pending',
expect.objectContaining({ showAddedHint: true })
)
})
it('cleans up timeouts on unmount', async () => {
vi.useFakeTimers()
queueStoreMock.pendingTasks = [
createTask({ jobId: '3', queueIndex: 1, mockState: 'pending' })
]
initComposable()
await flush()
expect(vi.getTimerCount()).toBeGreaterThan(0)
wrapper?.unmount()
wrapper = null
await flush()
expect(vi.getTimerCount()).toBe(0)
})
it('sorts all tasks by create time', async () => {
queueStoreMock.pendingTasks = [
createTask({
promptId: 'p',
jobId: 'p',
queueIndex: 1,
mockState: 'pending',
createTime: 3000
@@ -266,7 +338,7 @@ describe('useJobList', () => {
]
queueStoreMock.runningTasks = [
createTask({
promptId: 'r',
jobId: 'r',
queueIndex: 5,
mockState: 'running',
createTime: 2000
@@ -274,7 +346,7 @@ describe('useJobList', () => {
]
queueStoreMock.historyTasks = [
createTask({
promptId: 'h',
jobId: 'h',
queueIndex: 3,
mockState: 'completed',
createTime: 1000,
@@ -285,7 +357,7 @@ describe('useJobList', () => {
const { allTasksSorted } = initComposable()
await flush()
expect(allTasksSorted.value.map((task) => task.promptId)).toEqual([
expect(allTasksSorted.value.map((task) => task.jobId)).toEqual([
'p',
'r',
'h'
@@ -294,9 +366,9 @@ describe('useJobList', () => {
it('filters by job tab and resets failed tab when failures disappear', async () => {
queueStoreMock.historyTasks = [
createTask({ promptId: 'c', queueIndex: 3, mockState: 'completed' }),
createTask({ promptId: 'f', queueIndex: 2, mockState: 'failed' }),
createTask({ promptId: 'p', queueIndex: 1, mockState: 'pending' })
createTask({ jobId: 'c', queueIndex: 3, mockState: 'completed' }),
createTask({ jobId: 'f', queueIndex: 2, mockState: 'failed' }),
createTask({ jobId: 'p', queueIndex: 1, mockState: 'pending' })
]
const instance = initComposable()
@@ -304,15 +376,15 @@ describe('useJobList', () => {
instance.selectedJobTab.value = 'Completed'
await flush()
expect(instance.filteredTasks.value.map((t) => t.promptId)).toEqual(['c'])
expect(instance.filteredTasks.value.map((t) => t.jobId)).toEqual(['c'])
instance.selectedJobTab.value = 'Failed'
await flush()
expect(instance.filteredTasks.value.map((t) => t.promptId)).toEqual(['f'])
expect(instance.filteredTasks.value.map((t) => t.jobId)).toEqual(['f'])
expect(instance.hasFailedJobs.value).toBe(true)
queueStoreMock.historyTasks = [
createTask({ promptId: 'c', queueIndex: 3, mockState: 'completed' })
createTask({ jobId: 'c', queueIndex: 3, mockState: 'completed' })
]
await flush()
@@ -323,13 +395,13 @@ describe('useJobList', () => {
it('filters by active workflow when requested', async () => {
queueStoreMock.pendingTasks = [
createTask({
promptId: 'wf-1',
jobId: 'wf-1',
queueIndex: 2,
mockState: 'pending',
workflowId: 'workflow-1'
}),
createTask({
promptId: 'wf-2',
jobId: 'wf-2',
queueIndex: 1,
mockState: 'pending',
workflowId: 'workflow-2'
@@ -346,28 +418,26 @@ describe('useJobList', () => {
workflowStoreMock.activeWorkflow = { activeState: { id: 'workflow-1' } }
await flush()
expect(instance.filteredTasks.value.map((t) => t.promptId)).toEqual([
'wf-1'
])
expect(instance.filteredTasks.value.map((t) => t.jobId)).toEqual(['wf-1'])
})
it('hydrates job items with active progress and compute hours', async () => {
queueStoreMock.runningTasks = [
createTask({
promptId: 'active',
jobId: 'active',
queueIndex: 3,
mockState: 'running',
executionTime: 7_200_000
}),
createTask({
promptId: 'other',
jobId: 'other',
queueIndex: 2,
mockState: 'running',
executionTime: 3_600_000
})
]
executionStoreMock.activePromptId = 'active'
executionStoreMock.activeJobId = 'active'
executionStoreMock.executingNode = { title: 'Render Node' }
totalPercent.value = 80
currentNodePercent.value = 40
@@ -390,7 +460,7 @@ describe('useJobList', () => {
it('assigns preview urls for running jobs when previews enabled', async () => {
queueStoreMock.runningTasks = [
createTask({
promptId: 'live-preview',
jobId: 'live-preview',
queueIndex: 1,
mockState: 'running'
})
@@ -409,7 +479,7 @@ describe('useJobList', () => {
it('omits preview urls when previews are disabled', async () => {
queueStoreMock.runningTasks = [
createTask({
promptId: 'disabled-preview',
jobId: 'disabled-preview',
queueIndex: 1,
mockState: 'running'
})
@@ -450,28 +520,28 @@ describe('useJobList', () => {
vi.setSystemTime(new Date('2024-01-10T12:00:00Z'))
queueStoreMock.historyTasks = [
createTask({
promptId: 'today-small',
jobId: 'today-small',
queueIndex: 4,
mockState: 'completed',
executionEndTimestamp: Date.now(),
executionTime: 2_000
}),
createTask({
promptId: 'today-large',
jobId: 'today-large',
queueIndex: 3,
mockState: 'completed',
executionEndTimestamp: Date.now(),
executionTime: 5_000
}),
createTask({
promptId: 'yesterday',
jobId: 'yesterday',
queueIndex: 2,
mockState: 'failed',
executionEndTimestamp: Date.now() - 86_400_000,
executionTime: 1_000
}),
createTask({
promptId: 'undated',
jobId: 'undated',
queueIndex: 1,
mockState: 'pending'
})

View File

@@ -127,7 +127,7 @@ export function useJobList() {
watch(
() =>
queueStore.pendingTasks
.map((task) => taskIdToKey(task.promptId))
.map((task) => taskIdToKey(task.jobId))
.filter((id): id is string => !!id),
(pendingIds) => {
const pendingSet = new Set(pendingIds)
@@ -158,7 +158,7 @@ export function useJobList() {
const shouldShowAddedHint = (task: TaskItemImpl, state: JobState) => {
if (state !== 'pending') return false
const id = taskIdToKey(task.promptId)
const id = taskIdToKey(task.jobId)
if (!id) return false
return recentlyAddedPendingIds.value.has(id)
}
@@ -183,8 +183,8 @@ export function useJobList() {
})
const undatedLabel = computed(() => t('queue.jobList.undated') || 'Undated')
const isJobInitializing = (promptId: string | number | undefined) =>
executionStore.isPromptInitializing(promptId)
const isJobInitializing = (jobId: string | number | undefined) =>
executionStore.isJobInitializing(jobId)
const currentNodeName = computed(() => {
return resolveNodeDisplayName(executionStore.executingNode, {
@@ -212,7 +212,7 @@ export function useJobList() {
const tasksWithJobState = computed<TaskWithState[]>(() =>
allTasksSorted.value.map((task) => ({
task,
state: jobStateFromTask(task, isJobInitializing(task?.promptId))
state: jobStateFromTask(task, isJobInitializing(task?.jobId))
}))
)
@@ -255,10 +255,9 @@ export function useJobList() {
const jobItems = computed<JobListItem[]>(() => {
return filteredTaskEntries.value.map(({ task, state }) => {
const isActive =
String(task.promptId ?? '') ===
String(executionStore.activePromptId ?? '')
String(task.jobId ?? '') === String(executionStore.activeJobId ?? '')
const showAddedHint = shouldShowAddedHint(task, state)
const promptKey = taskIdToKey(task.promptId)
const promptKey = taskIdToKey(task.jobId)
const promptPreviewUrl =
state === 'running' && jobPreviewStore.isPreviewEnabled && promptKey
? jobPreviewStore.previewsByPromptId[promptKey]
@@ -277,7 +276,7 @@ export function useJobList() {
})
return {
id: String(task.promptId),
id: String(task.jobId),
title: display.primary,
meta: display.secondary,
state,
@@ -334,7 +333,7 @@ export function useJobList() {
groupIdx = groups.length - 1
index.set(key, groupIdx)
}
const ji = jobItemById.value.get(String(task.promptId))
const ji = jobItemById.value.get(String(task.jobId))
if (ji) groups[groupIdx].items.push(ji)
}

View File

@@ -72,8 +72,7 @@ const interruptMock = vi.fn()
const deleteItemMock = vi.fn()
vi.mock('@/scripts/api', () => ({
api: {
interrupt: (runningPromptId: string | null) =>
interruptMock(runningPromptId),
interrupt: (runningJobId: string | null) => interruptMock(runningJobId),
deleteItem: (type: string, id: string) => deleteItemMock(type, id)
}
}))
@@ -120,7 +119,7 @@ vi.mock('@/stores/queueStore', () => ({
}))
const executionStoreMock = {
clearInitializationByPromptId: vi.fn()
clearInitializationByJobId: vi.fn()
}
vi.mock('@/stores/executionStore', () => ({
useExecutionStore: () => executionStoreMock

View File

@@ -84,7 +84,7 @@ export function useJobMenu(
} else if (target.state === 'pending') {
await api.deleteItem('queue', target.id)
}
executionStore.clearInitializationByPromptId(target.id)
executionStore.clearInitializationByJobId(target.id)
await queueStore.update()
}

View File

@@ -17,7 +17,7 @@ const executionStore = reactive<{
executingNode: unknown
executingNodeProgress: number
nodeProgressStates: Record<string, unknown>
activePrompt: {
activeJob: {
workflow: {
changeTracker: {
activeState: {
@@ -32,7 +32,7 @@ const executionStore = reactive<{
executingNode: null,
executingNodeProgress: 0,
nodeProgressStates: {},
activePrompt: null
activeJob: null
})
vi.mock('@/stores/executionStore', () => ({
useExecutionStore: () => executionStore
@@ -76,7 +76,7 @@ describe('useBrowserTabTitle', () => {
executionStore.executingNode = null
executionStore.executingNodeProgress = 0
executionStore.nodeProgressStates = {}
executionStore.activePrompt = null
executionStore.activeJob = null
// reset setting and workflow stores
vi.mocked(settingStore.get).mockReturnValue('Enabled')
@@ -187,7 +187,7 @@ describe('useBrowserTabTitle', () => {
executionStore.nodeProgressStates = {
'1': { state: 'running', value: 5, max: 10, node: '1', prompt_id: 'test' }
}
executionStore.activePrompt = {
executionStore.activeJob = {
workflow: {
changeTracker: {
activeState: {

View File

@@ -77,7 +77,7 @@ export const useBrowserTabTitle = () => {
const [nodeId, state] = runningNodes[0]
const progress = Math.round((state.value / state.max) * 100)
const nodeType =
executionStore.activePrompt?.workflow?.changeTracker?.activeState.nodes.find(
executionStore.activeJob?.workflow?.changeTracker?.activeState.nodes.find(
(n) => String(n.id) === nodeId
)?.type || 'Node'

View File

@@ -312,7 +312,7 @@ export function useCoreCommands(): ComfyCommand[] {
label: 'Interrupt',
category: 'essentials' as const,
function: async () => {
await api.interrupt(executionStore.activePromptId)
await api.interrupt(executionStore.activeJobId)
toastStore.add({
severity: 'info',
summary: t('g.interrupted'),

View File

@@ -27,7 +27,7 @@ export function mapTaskOutputToAssetItem(
output: ResultItemImpl
): AssetItem {
const metadata: OutputAssetMetadata = {
promptId: taskItem.promptId,
jobId: taskItem.jobId,
nodeId: output.nodeId,
subfolder: output.subfolder,
executionTimeInSeconds: taskItem.executionTimeInSeconds,
@@ -36,7 +36,7 @@ export function mapTaskOutputToAssetItem(
}
return {
id: taskItem.promptId,
id: taskItem.jobId,
name: output.filename,
size: 0,
created_at: taskItem.executionStartTimestamp

View File

@@ -46,12 +46,12 @@ export function useMediaAssetActions() {
assetType: string
): Promise<void> => {
if (assetType === 'output') {
const promptId =
getOutputAssetMetadata(asset.user_metadata)?.promptId || asset.id
if (!promptId) {
throw new Error('Unable to extract prompt ID from asset')
const jobId =
getOutputAssetMetadata(asset.user_metadata)?.jobId || asset.id
if (!jobId) {
throw new Error('Unable to extract job ID from asset')
}
await api.deleteItem('history', promptId)
await api.deleteItem('history', jobId)
} else {
// Input assets can only be deleted in cloud environment
if (!isCloud) {
@@ -141,16 +141,16 @@ export function useMediaAssetActions() {
for (const asset of assets) {
if (getAssetType(asset) === 'output') {
const metadata = getOutputAssetMetadata(asset.user_metadata)
const promptId = metadata?.promptId || asset.id
if (!jobIds.includes(promptId)) {
jobIds.push(promptId)
const jobId = metadata?.jobId || asset.id
if (!jobIds.includes(jobId)) {
jobIds.push(jobId)
}
if (metadata?.promptId && asset.name) {
if (!jobAssetNameFilters[metadata.promptId]) {
jobAssetNameFilters[metadata.promptId] = []
if (metadata?.jobId && asset.name) {
if (!jobAssetNameFilters[metadata.jobId]) {
jobAssetNameFilters[metadata.jobId] = []
}
if (!jobAssetNameFilters[metadata.promptId].includes(asset.name)) {
jobAssetNameFilters[metadata.promptId].push(asset.name)
if (!jobAssetNameFilters[metadata.jobId].includes(asset.name)) {
jobAssetNameFilters[metadata.jobId].push(asset.name)
}
}
} else {
@@ -191,11 +191,11 @@ export function useMediaAssetActions() {
if (!targetAsset) return
const metadata = getOutputAssetMetadata(targetAsset.user_metadata)
const promptId =
metadata?.promptId ||
const jobId =
metadata?.jobId ||
(getAssetType(targetAsset) === 'output' ? targetAsset.id : undefined)
if (!promptId) {
if (!jobId) {
toast.add({
severity: 'warn',
summary: t('g.warning'),
@@ -205,7 +205,7 @@ export function useMediaAssetActions() {
return
}
await copyToClipboard(promptId)
await copyToClipboard(jobId)
}
/**

View File

@@ -40,7 +40,7 @@ function createAsset(overrides: Partial<AssetItem> = {}): AssetItem {
tags: [],
created_at: '2025-01-01T00:00:00.000Z',
user_metadata: {
promptId: 'prompt-1',
jobId: 'job-1',
nodeId: 'node-1',
subfolder: 'outputs'
},
@@ -74,7 +74,7 @@ describe('useOutputStacks', () => {
await toggleStack(parent)
expect(mocks.resolveOutputAssetItems).toHaveBeenCalledWith(
expect.objectContaining({ promptId: 'prompt-1' }),
expect.objectContaining({ jobId: 'job-1' }),
{
createdAt: parent.created_at,
excludeOutputKey: 'node-1-outputs-parent.png'

View File

@@ -19,25 +19,25 @@ type UseOutputStacksOptions = {
}
export function useOutputStacks({ assets }: UseOutputStacksOptions) {
const expandedStackPromptIds = ref<Set<string>>(new Set())
const stackChildrenByPromptId = ref<Record<string, AssetItem[]>>({})
const loadingStackPromptIds = ref<Set<string>>(new Set())
const expandedStackJobIds = ref<Set<string>>(new Set())
const stackChildrenByJobId = ref<Record<string, AssetItem[]>>({})
const loadingStackJobIds = ref<Set<string>>(new Set())
const assetItems = computed<OutputStackListItem[]>(() => {
const items: OutputStackListItem[] = []
for (const asset of assets.value) {
const promptId = getStackPromptId(asset)
const jobId = getStackJobId(asset)
items.push({
key: `asset-${asset.id}`,
asset
})
if (!promptId || !expandedStackPromptIds.value.has(promptId)) {
if (!jobId || !expandedStackJobIds.value.has(jobId)) {
continue
}
const children = stackChildrenByPromptId.value[promptId] ?? []
const children = stackChildrenByJobId.value[jobId] ?? []
for (const child of children) {
items.push({
key: `asset-${child.id}`,
@@ -54,55 +54,55 @@ export function useOutputStacks({ assets }: UseOutputStacksOptions) {
assetItems.value.map((item) => item.asset)
)
function getStackPromptId(asset: AssetItem): string | null {
function getStackJobId(asset: AssetItem): string | null {
const metadata = getOutputAssetMetadata(asset.user_metadata)
return metadata?.promptId ?? null
return metadata?.jobId ?? null
}
function isStackExpanded(asset: AssetItem): boolean {
const promptId = getStackPromptId(asset)
if (!promptId) return false
return expandedStackPromptIds.value.has(promptId)
const jobId = getStackJobId(asset)
if (!jobId) return false
return expandedStackJobIds.value.has(jobId)
}
async function toggleStack(asset: AssetItem) {
const promptId = getStackPromptId(asset)
if (!promptId) return
const jobId = getStackJobId(asset)
if (!jobId) return
if (expandedStackPromptIds.value.has(promptId)) {
const next = new Set(expandedStackPromptIds.value)
next.delete(promptId)
expandedStackPromptIds.value = next
if (expandedStackJobIds.value.has(jobId)) {
const next = new Set(expandedStackJobIds.value)
next.delete(jobId)
expandedStackJobIds.value = next
return
}
if (!stackChildrenByPromptId.value[promptId]?.length) {
if (loadingStackPromptIds.value.has(promptId)) {
if (!stackChildrenByJobId.value[jobId]?.length) {
if (loadingStackJobIds.value.has(jobId)) {
return
}
const nextLoading = new Set(loadingStackPromptIds.value)
nextLoading.add(promptId)
loadingStackPromptIds.value = nextLoading
const nextLoading = new Set(loadingStackJobIds.value)
nextLoading.add(jobId)
loadingStackJobIds.value = nextLoading
const children = await resolveStackChildren(asset)
const afterLoading = new Set(loadingStackPromptIds.value)
afterLoading.delete(promptId)
loadingStackPromptIds.value = afterLoading
const afterLoading = new Set(loadingStackJobIds.value)
afterLoading.delete(jobId)
loadingStackJobIds.value = afterLoading
if (!children.length) {
return
}
stackChildrenByPromptId.value = {
...stackChildrenByPromptId.value,
[promptId]: children
stackChildrenByJobId.value = {
...stackChildrenByJobId.value,
[jobId]: children
}
}
const nextExpanded = new Set(expandedStackPromptIds.value)
nextExpanded.add(promptId)
expandedStackPromptIds.value = nextExpanded
const nextExpanded = new Set(expandedStackJobIds.value)
nextExpanded.add(jobId)
expandedStackJobIds.value = nextExpanded
}
async function resolveStackChildren(asset: AssetItem): Promise<AssetItem[]> {

View File

@@ -6,7 +6,7 @@ import type { ResultItemImpl } from '@/stores/queueStore'
* Extends Record<string, unknown> for compatibility with AssetItem schema
*/
export interface OutputAssetMetadata extends Record<string, unknown> {
promptId: string
jobId: string
nodeId: string | number
subfolder: string
executionTimeInSeconds?: number
@@ -24,7 +24,7 @@ function isOutputAssetMetadata(
): metadata is OutputAssetMetadata {
if (!metadata) return false
return (
typeof metadata.promptId === 'string' &&
typeof metadata.jobId === 'string' &&
(typeof metadata.nodeId === 'string' || typeof metadata.nodeId === 'number')
)
}

View File

@@ -49,7 +49,7 @@ describe('resolveOutputAssetItems', () => {
url: 'https://example.com/b.png'
})
const metadata: OutputAssetMetadata = {
promptId: 'prompt-1',
jobId: 'job-1',
nodeId: '1',
subfolder: 'sub',
executionTimeInSeconds: 12.5,
@@ -66,7 +66,7 @@ describe('resolveOutputAssetItems', () => {
expect(results).toHaveLength(1)
expect(results[0]).toEqual(
expect.objectContaining({
id: 'prompt-1-1-sub-a.png',
id: 'job-1-1-sub-a.png',
name: 'a.png',
created_at: '2025-01-01T00:00:00.000Z',
tags: ['output'],
@@ -75,7 +75,7 @@ describe('resolveOutputAssetItems', () => {
)
expect(results[0].user_metadata).toEqual(
expect.objectContaining({
promptId: 'prompt-1',
jobId: 'job-1',
nodeId: '1',
subfolder: 'sub',
executionTimeInSeconds: 12.5
@@ -95,7 +95,7 @@ describe('resolveOutputAssetItems', () => {
url: 'https://example.com/full.png'
})
const metadata: OutputAssetMetadata = {
promptId: 'prompt-2',
jobId: 'job-2',
nodeId: '1',
subfolder: 'sub',
outputCount: 3,
@@ -111,7 +111,7 @@ describe('resolveOutputAssetItems', () => {
const results = await resolveOutputAssetItems(metadata)
expect(mocks.getJobDetail).toHaveBeenCalledWith('prompt-2')
expect(mocks.getJobDetail).toHaveBeenCalledWith('job-2')
expect(mocks.getPreviewableOutputsFromJobDetail).toHaveBeenCalledWith(
jobDetail
)
@@ -129,7 +129,7 @@ describe('resolveOutputAssetItems', () => {
url: 'https://example.com/root.png'
})
const metadata: OutputAssetMetadata = {
promptId: 'prompt-root',
jobId: 'job-root',
nodeId: '1',
subfolder: '',
outputCount: 1,
@@ -144,7 +144,7 @@ describe('resolveOutputAssetItems', () => {
if (!asset) {
throw new Error('Expected a root output asset')
}
expect(asset.id).toBe('prompt-root-1--root.png')
expect(asset.id).toBe('job-root-1--root.png')
if (!asset.user_metadata) {
throw new Error('Expected output metadata')
}

View File

@@ -8,7 +8,7 @@ import {
import type { ResultItemImpl } from '@/stores/queueStore'
type OutputAssetMapOptions = {
promptId: string
jobId: string
outputs: readonly ResultItemImpl[]
createdAt?: string
executionTimeInSeconds?: number
@@ -51,7 +51,7 @@ export function getOutputKey({
}
function mapOutputsToAssetItems({
promptId,
jobId,
outputs,
createdAt,
executionTimeInSeconds,
@@ -67,14 +67,14 @@ function mapOutputsToAssetItems({
}
items.push({
id: `${promptId}-${outputKey}`,
id: `${jobId}-${outputKey}`,
name: output.filename,
size: 0,
created_at: createdAtValue,
tags: ['output'],
preview_url: output.url,
user_metadata: {
promptId,
jobId,
nodeId: output.nodeId,
subfolder: output.subfolder,
executionTimeInSeconds,
@@ -92,7 +92,7 @@ export async function resolveOutputAssetItems(
): Promise<AssetItem[]> {
let outputsToDisplay = metadata.allOutputs ?? []
if (shouldLoadFullOutputs(metadata.outputCount, outputsToDisplay.length)) {
const jobDetail = await getJobDetail(metadata.promptId)
const jobDetail = await getJobDetail(metadata.jobId)
const previewableOutputs = getPreviewableOutputsFromJobDetail(jobDetail)
if (previewableOutputs.length) {
outputsToDisplay = previewableOutputs
@@ -100,7 +100,7 @@ export async function resolveOutputAssetItems(
}
return mapOutputsToAssetItems({
promptId: metadata.promptId,
jobId: metadata.jobId,
outputs: outputsToDisplay,
createdAt,
executionTimeInSeconds: metadata.executionTimeInSeconds,

View File

@@ -8,7 +8,7 @@
import type { ComfyWorkflowJSON } from '@/platform/workflow/validation/schemas/workflowSchema'
import { validateComfyWorkflow } from '@/platform/workflow/validation/schemas/workflowSchema'
import type { PromptId } from '@/schemas/apiSchema'
import type { JobId } from '@/schemas/apiSchema'
import type {
JobDetail,
@@ -119,19 +119,19 @@ export async function fetchQueue(
*/
export async function fetchJobDetail(
fetchApi: (url: string) => Promise<Response>,
promptId: PromptId
jobId: JobId
): Promise<JobDetail | undefined> {
try {
const res = await fetchApi(`/jobs/${encodeURIComponent(promptId)}`)
const res = await fetchApi(`/jobs/${encodeURIComponent(jobId)}`)
if (!res.ok) {
console.warn(`Job not found for prompt ${promptId}`)
console.warn(`Job not found for job ${jobId}`)
return undefined
}
return zJobDetail.parse(await res.json())
} catch (error) {
console.error(`Failed to fetch job detail for prompt ${promptId}:`, error)
console.error(`Failed to fetch job detail for job ${jobId}:`, error)
return undefined
}
}

View File

@@ -21,7 +21,7 @@ const mockWorkflow: ComfyWorkflowJSON = {
// Jobs API detail response structure (matches actual /jobs/{id} response)
// workflow is nested at: workflow.extra_data.extra_pnginfo.workflow
const mockJobDetailResponse: JobDetail = {
id: 'test-prompt-id',
id: 'test-job-id',
status: 'completed',
create_time: 1234567890,
update_time: 1234567900,
@@ -43,15 +43,15 @@ const mockJobDetailResponse: JobDetail = {
}
describe('fetchJobDetail', () => {
it('should fetch job detail from /jobs/{prompt_id} endpoint', async () => {
it('should fetch job detail from /jobs/{job_id} endpoint', async () => {
const mockFetchApi = vi.fn().mockResolvedValue({
ok: true,
json: async () => mockJobDetailResponse
})
await fetchJobDetail(mockFetchApi, 'test-prompt-id')
await fetchJobDetail(mockFetchApi, 'test-job-id')
expect(mockFetchApi).toHaveBeenCalledWith('/jobs/test-prompt-id')
expect(mockFetchApi).toHaveBeenCalledWith('/jobs/test-job-id')
})
it('should return job detail with workflow and outputs', async () => {
@@ -60,10 +60,10 @@ describe('fetchJobDetail', () => {
json: async () => mockJobDetailResponse
})
const result = await fetchJobDetail(mockFetchApi, 'test-prompt-id')
const result = await fetchJobDetail(mockFetchApi, 'test-job-id')
expect(result).toBeDefined()
expect(result?.id).toBe('test-prompt-id')
expect(result?.id).toBe('test-job-id')
expect(result?.outputs).toEqual(mockJobDetailResponse.outputs)
expect(result?.workflow).toBeDefined()
})
@@ -82,7 +82,7 @@ describe('fetchJobDetail', () => {
it('should handle fetch errors gracefully', async () => {
const mockFetchApi = vi.fn().mockRejectedValue(new Error('Network error'))
const result = await fetchJobDetail(mockFetchApi, 'test-prompt-id')
const result = await fetchJobDetail(mockFetchApi, 'test-job-id')
expect(result).toBeUndefined()
})
@@ -95,7 +95,7 @@ describe('fetchJobDetail', () => {
}
})
const result = await fetchJobDetail(mockFetchApi, 'test-prompt-id')
const result = await fetchJobDetail(mockFetchApi, 'test-job-id')
expect(result).toBeUndefined()
})

View File

@@ -19,7 +19,7 @@ import { getJobWorkflow } from '@/services/jobOutputCache'
* @returns WorkflowSource with workflow and generated filename
*
* @example
* const asset = { name: 'output.png', user_metadata: { promptId: '123' } }
* const asset = { name: 'output.png', user_metadata: { jobId: '123' } }
* const { workflow, filename } = await extractWorkflowFromAsset(asset)
*/
export async function extractWorkflowFromAsset(asset: AssetItem): Promise<{
@@ -30,8 +30,8 @@ export async function extractWorkflowFromAsset(asset: AssetItem): Promise<{
// For output assets: use jobs API (with caching and validation)
const metadata = getOutputAssetMetadata(asset.user_metadata)
if (metadata?.promptId) {
const workflow = await getJobWorkflow(metadata.promptId)
if (metadata?.jobId) {
const workflow = await getJobWorkflow(metadata.jobId)
return { workflow: workflow ?? null, filename: baseFilename }
}

View File

@@ -135,7 +135,7 @@ function allOutputs(item?: AssetItem): MaybeRef<ResultItemImpl[]> {
return user_metadata.allOutputs
const outputRef = useAsyncState(
getJobDetail(user_metadata.promptId).then((jobDetail) => {
getJobDetail(user_metadata.jobId).then((jobDetail) => {
if (!jobDetail?.outputs) return []
return Object.entries(jobDetail.outputs).flatMap(flattenNodeOutput)
}),

View File

@@ -8,8 +8,8 @@ import { NodeBadgeMode } from '@/types/nodeSource'
import { LinkReleaseTriggerAction } from '@/types/searchBoxTypes'
const zNodeType = z.string()
const zPromptId = z.string()
export type PromptId = z.infer<typeof zPromptId>
const zJobId = z.string()
export type JobId = z.infer<typeof zJobId>
export const resultItemType = z.enum(['input', 'output', 'temp'])
export type ResultItemType = z.infer<typeof resultItemType>
@@ -52,7 +52,7 @@ const zStatusWsMessage = z.object({
const zProgressWsMessage = z.object({
value: z.number().int(),
max: z.number().int(),
prompt_id: zPromptId,
prompt_id: zJobId,
node: zNodeId
})
@@ -61,21 +61,21 @@ const zNodeProgressState = z.object({
max: z.number(),
state: z.enum(['pending', 'running', 'finished', 'error']),
node_id: zNodeId,
prompt_id: zPromptId,
prompt_id: zJobId,
display_node_id: zNodeId.optional(),
parent_node_id: zNodeId.optional(),
real_node_id: zNodeId.optional()
})
const zProgressStateWsMessage = z.object({
prompt_id: zPromptId,
prompt_id: zJobId,
nodes: z.record(zNodeId, zNodeProgressState)
})
const zExecutingWsMessage = z.object({
node: zNodeId,
display_node: zNodeId,
prompt_id: zPromptId
prompt_id: zJobId
})
const zExecutedWsMessage = zExecutingWsMessage.extend({
@@ -84,7 +84,7 @@ const zExecutedWsMessage = zExecutingWsMessage.extend({
})
const zExecutionWsMessageBase = z.object({
prompt_id: zPromptId,
prompt_id: zJobId,
timestamp: z.number().int()
})

View File

@@ -157,14 +157,14 @@ interface BackendApiCalls {
logs: LogsWsMessage
/** Binary preview/progress data */
b_preview: Blob
/** Binary preview with metadata (node_id, prompt_id) */
/** Binary preview with metadata (node_id, job_id) */
b_preview_with_metadata: {
blob: Blob
nodeId: string
parentNodeId: string
displayNodeId: string
realNodeId: string
promptId: string
jobId: string
}
progress_text: ProgressTextWsMessage
progress_state: ProgressStateWsMessage
@@ -646,7 +646,7 @@ export class ComfyApi extends EventTarget {
displayNodeId: metadata.display_node_id,
parentNodeId: metadata.parent_node_id,
realNodeId: metadata.real_node_id,
promptId: metadata.prompt_id
jobId: metadata.prompt_id
})
// Also dispatch legacy b_preview for backward compatibility
@@ -943,7 +943,7 @@ export class ComfyApi extends EventTarget {
/**
* Gets detailed job info including outputs and workflow
* @param jobId The job/prompt ID
* @param jobId The job ID
* @returns Full job details or undefined if not found
*/
async getJobDetail(jobId: string): Promise<JobDetail | undefined> {
@@ -996,14 +996,14 @@ export class ComfyApi extends EventTarget {
}
/**
* Interrupts the execution of the running prompt. If runningPromptId is provided,
* Interrupts the execution of the running job. If runningJobId is provided,
* it is included in the payload as a helpful hint to the backend.
* @param {string | null} [runningPromptId] Optional Running Prompt ID to interrupt
* @param {string | null} [runningJobId] Optional Running Job ID to interrupt
*/
async interrupt(runningPromptId: string | null) {
async interrupt(runningJobId: string | null) {
await this._postItem(
'interrupt',
runningPromptId ? { prompt_id: runningPromptId } : undefined
runningJobId ? { prompt_id: runningJobId } : undefined
)
}

View File

@@ -725,11 +725,11 @@ export class ComfyApp {
api.addEventListener('b_preview_with_metadata', ({ detail }) => {
// Enhanced preview with explicit node context
const { blob, displayNodeId, promptId } = detail
const { blob, displayNodeId, jobId } = detail
const { setNodePreviewsByExecutionId, revokePreviewsByExecutionId } =
useNodeOutputStore()
const blobUrl = createSharedObjectUrl(blob)
useJobPreviewStore().setPreviewUrl(promptId, blobUrl)
useJobPreviewStore().setPreviewUrl(jobId, blobUrl)
// Ensure clean up if `executing` event is missed.
revokePreviewsByExecutionId(displayNodeId)
// Preview cleanup is handled in progress_state event to support multiple concurrent previews
@@ -1446,7 +1446,7 @@ export class ComfyApp {
} else {
try {
if (res.prompt_id) {
executionStore.storePrompt({
executionStore.storeJob({
id: res.prompt_id,
nodes: Object.keys(p.output),
workflow: useWorkspaceStore().workflow

View File

@@ -339,7 +339,7 @@ export class ChangeTracker {
api.addEventListener('executed', (e: CustomEvent<ExecutedWsMessage>) => {
const detail = e.detail
const workflow =
useExecutionStore().queuedPrompts[detail.prompt_id]?.workflow
useExecutionStore().queuedJobs[detail.prompt_id]?.workflow
const changeTracker = workflow?.changeTracker
if (!changeTracker) return
changeTracker.nodeOutputs ??= {}

View File

@@ -46,7 +46,7 @@ export function findActiveIndex(
export async function getOutputsForTask(
task: TaskItemImpl
): Promise<ResultItemImpl[] | null> {
const requestId = String(task.promptId)
const requestId = String(task.jobId)
latestTaskRequestId = requestId
const outputsCount = task.outputsCount ?? 0

View File

@@ -90,10 +90,10 @@ vi.mock('@/stores/queueStore', () => ({
url: string
}
| undefined
public promptId: string
public jobId: string
constructor(public job: JobListItem) {
this.promptId = job.id
this.jobId = job.id
this.flatOutputs = [
{
supportsPreview: true,
@@ -123,9 +123,9 @@ vi.mock('@/platform/assets/composables/media/assetMappers', () => ({
preview_url: `http://test.com/${name}`
})),
mapTaskOutputToAssetItem: vi.fn((task, output) => {
const index = parseInt(task.promptId.split('_')[1]) || 0
const index = parseInt(task.jobId.split('_')[1]) || 0
return {
id: task.promptId,
id: task.jobId,
name: output.filename,
size: 0,
created_at: new Date(Date.now() - index * 1000).toISOString(),

View File

@@ -132,7 +132,7 @@ describe('useExecutionStore - NodeLocatorId conversions', () => {
})
})
describe('useExecutionStore - reconcileInitializingPrompts', () => {
describe('useExecutionStore - reconcileInitializingJobs', () => {
let store: ReturnType<typeof useExecutionStore>
beforeEach(() => {
@@ -141,36 +141,36 @@ describe('useExecutionStore - reconcileInitializingPrompts', () => {
store = useExecutionStore()
})
it('should remove prompt IDs not present in active jobs', () => {
store.initializingPromptIds = new Set(['job-1', 'job-2', 'job-3'])
it('should remove job IDs not present in active jobs', () => {
store.initializingJobIds = new Set(['job-1', 'job-2', 'job-3'])
store.reconcileInitializingPrompts(new Set(['job-1']))
store.reconcileInitializingJobs(new Set(['job-1']))
expect(store.initializingPromptIds).toEqual(new Set(['job-1']))
expect(store.initializingJobIds).toEqual(new Set(['job-1']))
})
it('should be a no-op when all initializing IDs are active', () => {
store.initializingPromptIds = new Set(['job-1', 'job-2'])
store.initializingJobIds = new Set(['job-1', 'job-2'])
store.reconcileInitializingPrompts(new Set(['job-1', 'job-2', 'job-3']))
store.reconcileInitializingJobs(new Set(['job-1', 'job-2', 'job-3']))
expect(store.initializingPromptIds).toEqual(new Set(['job-1', 'job-2']))
expect(store.initializingJobIds).toEqual(new Set(['job-1', 'job-2']))
})
it('should be a no-op when there are no initializing prompts', () => {
store.initializingPromptIds = new Set()
it('should be a no-op when there are no initializing jobs', () => {
store.initializingJobIds = new Set()
store.reconcileInitializingPrompts(new Set(['job-1']))
store.reconcileInitializingJobs(new Set(['job-1']))
expect(store.initializingPromptIds).toEqual(new Set())
expect(store.initializingJobIds).toEqual(new Set())
})
it('should clear all initializing IDs when no active jobs exist', () => {
store.initializingPromptIds = new Set(['job-1', 'job-2'])
store.initializingJobIds = new Set(['job-1', 'job-2'])
store.reconcileInitializingPrompts(new Set())
store.reconcileInitializingJobs(new Set())
expect(store.initializingPromptIds).toEqual(new Set())
expect(store.initializingJobIds).toEqual(new Set())
})
})

View File

@@ -37,7 +37,7 @@ import type { NodeLocatorId } from '@/types/nodeIdentification'
import { createNodeLocatorId } from '@/types/nodeIdentification'
import { forEachNode, getNodeByExecutionId } from '@/utils/graphTraversalUtil'
interface QueuedPrompt {
interface QueuedJob {
/**
* The nodes that are queued to be executed. The key is the node id and the
* value is a boolean indicating if the node has been executed.
@@ -111,23 +111,23 @@ export const useExecutionStore = defineStore('execution', () => {
const canvasStore = useCanvasStore()
const clientId = ref<string | null>(null)
const activePromptId = ref<string | null>(null)
const queuedPrompts = ref<Record<NodeId, QueuedPrompt>>({})
const activeJobId = ref<string | null>(null)
const queuedJobs = ref<Record<NodeId, QueuedJob>>({})
const lastNodeErrors = ref<Record<NodeId, NodeError> | null>(null)
const lastExecutionError = ref<ExecutionErrorWsMessage | null>(null)
const lastPromptError = ref<PromptError | null>(null)
// This is the progress of all nodes in the currently executing workflow
const nodeProgressStates = ref<Record<string, NodeProgressState>>({})
const nodeProgressStatesByPrompt = ref<
const nodeProgressStatesByJob = ref<
Record<string, Record<string, NodeProgressState>>
>({})
/**
* Map of prompt_id to workflow ID for quick lookup across the app.
* Map of job ID to workflow ID for quick lookup across the app.
*/
const promptIdToWorkflowId = ref<Map<string, string>>(new Map())
const jobIdToWorkflowId = ref<Map<string, string>>(new Map())
const initializingPromptIds = ref<Set<string>>(new Set())
const initializingJobIds = ref<Set<string>>(new Set())
const mergeExecutionProgressStates = (
currentState: NodeProgressState | undefined,
@@ -201,7 +201,7 @@ export const useExecutionStore = defineStore('execution', () => {
const executingNode = computed<ComfyNode | null>(() => {
if (!executingNodeId.value) return null
const workflow: ComfyWorkflow | undefined = activePrompt.value?.workflow
const workflow: ComfyWorkflow | undefined = activeJob.value?.workflow
if (!workflow) return null
const canvasState: ComfyWorkflowJSON | null =
@@ -222,24 +222,24 @@ export const useExecutionStore = defineStore('execution', () => {
: null
)
const activePrompt = computed<QueuedPrompt | undefined>(
() => queuedPrompts.value[activePromptId.value ?? '']
const activeJob = computed<QueuedJob | undefined>(
() => queuedJobs.value[activeJobId.value ?? '']
)
const totalNodesToExecute = computed<number>(() => {
if (!activePrompt.value) return 0
return Object.values(activePrompt.value.nodes).length
if (!activeJob.value) return 0
return Object.values(activeJob.value.nodes).length
})
const isIdle = computed<boolean>(() => !activePromptId.value)
const isIdle = computed<boolean>(() => !activeJobId.value)
const nodesExecuted = computed<number>(() => {
if (!activePrompt.value) return 0
return Object.values(activePrompt.value.nodes).filter(Boolean).length
if (!activeJob.value) return 0
return Object.values(activeJob.value.nodes).filter(Boolean).length
})
const executionProgress = computed<number>(() => {
if (!activePrompt.value) return 0
if (!activeJob.value) return 0
const total = totalNodesToExecute.value
const done = nodesExecuted.value
return total > 0 ? done / total : 0
@@ -291,65 +291,64 @@ export const useExecutionStore = defineStore('execution', () => {
function handleExecutionStart(e: CustomEvent<ExecutionStartWsMessage>) {
lastExecutionError.value = null
lastPromptError.value = null
activePromptId.value = e.detail.prompt_id
queuedPrompts.value[activePromptId.value] ??= { nodes: {} }
clearInitializationByPromptId(activePromptId.value)
activeJobId.value = e.detail.prompt_id
queuedJobs.value[activeJobId.value] ??= { nodes: {} }
clearInitializationByJobId(activeJobId.value)
}
function handleExecutionCached(e: CustomEvent<ExecutionCachedWsMessage>) {
if (!activePrompt.value) return
if (!activeJob.value) return
for (const n of e.detail.nodes) {
activePrompt.value.nodes[n] = true
activeJob.value.nodes[n] = true
}
}
function handleExecutionInterrupted(
e: CustomEvent<ExecutionInterruptedWsMessage>
) {
const pid = e.detail.prompt_id
if (activePromptId.value)
clearInitializationByPromptId(activePromptId.value)
resetExecutionState(pid)
const jobId = e.detail.prompt_id
if (activeJobId.value) clearInitializationByJobId(activeJobId.value)
resetExecutionState(jobId)
}
function handleExecuted(e: CustomEvent<ExecutedWsMessage>) {
if (!activePrompt.value) return
activePrompt.value.nodes[e.detail.node] = true
if (!activeJob.value) return
activeJob.value.nodes[e.detail.node] = true
}
function handleExecutionSuccess(e: CustomEvent<ExecutionSuccessWsMessage>) {
if (isCloud && activePromptId.value) {
if (isCloud && activeJobId.value) {
useTelemetry()?.trackExecutionSuccess({
jobId: activePromptId.value
jobId: activeJobId.value
})
}
const pid = e.detail.prompt_id
resetExecutionState(pid)
const jobId = e.detail.prompt_id
resetExecutionState(jobId)
}
function handleExecuting(e: CustomEvent<NodeId | null>): void {
// Clear the current node progress when a new node starts executing
_executingNodeProgress.value = null
if (!activePrompt.value) return
if (!activeJob.value) return
// Update the executing nodes list
if (typeof e.detail !== 'string') {
if (activePromptId.value) {
delete queuedPrompts.value[activePromptId.value]
if (activeJobId.value) {
delete queuedJobs.value[activeJobId.value]
}
activePromptId.value = null
activeJobId.value = null
}
}
function handleProgressState(e: CustomEvent<ProgressStateWsMessage>) {
const { nodes, prompt_id: pid } = e.detail
const { nodes, prompt_id: jobId } = e.detail
// Revoke previews for nodes that are starting to execute
const previousForPrompt = nodeProgressStatesByPrompt.value[pid] || {}
const previousForJob = nodeProgressStatesByJob.value[jobId] || {}
for (const nodeId in nodes) {
const nodeState = nodes[nodeId]
if (nodeState.state === 'running' && !previousForPrompt[nodeId]) {
if (nodeState.state === 'running' && !previousForJob[nodeId]) {
// This node just started executing, revoke its previews
// Note that we're doing the *actual* node id instead of the display node id
// here intentionally. That way, we don't clear the preview every time a new node
@@ -360,9 +359,9 @@ export const useExecutionStore = defineStore('execution', () => {
}
// Update the progress states for all nodes
nodeProgressStatesByPrompt.value = {
...nodeProgressStatesByPrompt.value,
[pid]: nodes
nodeProgressStatesByJob.value = {
...nodeProgressStatesByJob.value,
[jobId]: nodes
}
nodeProgressStates.value = nodes
@@ -401,13 +400,13 @@ export const useExecutionStore = defineStore('execution', () => {
error: e.detail.exception_message
})
}
clearInitializationByPromptId(e.detail.prompt_id)
clearInitializationByJobId(e.detail.prompt_id)
resetExecutionState(e.detail.prompt_id)
}
/**
* Notification handler used for frontend/cloud initialization tracking.
* Marks a prompt as initializing when cloud notifies it is waiting for a machine.
* Marks a job as initializing when cloud notifies it is waiting for a machine.
*/
function handleNotification(e: CustomEvent<NotificationWsMessage>) {
const payload = e.detail
@@ -416,62 +415,60 @@ export const useExecutionStore = defineStore('execution', () => {
if (!id) return
// Until cloud implements a proper message
if (text.includes('Waiting for a machine')) {
const next = new Set(initializingPromptIds.value)
const next = new Set(initializingJobIds.value)
next.add(id)
initializingPromptIds.value = next
initializingJobIds.value = next
}
}
function clearInitializationByPromptId(promptId: string | null) {
if (!promptId) return
if (!initializingPromptIds.value.has(promptId)) return
const next = new Set(initializingPromptIds.value)
next.delete(promptId)
initializingPromptIds.value = next
function clearInitializationByJobId(jobId: string | null) {
if (!jobId) return
if (!initializingJobIds.value.has(jobId)) return
const next = new Set(initializingJobIds.value)
next.delete(jobId)
initializingJobIds.value = next
}
function clearInitializationByPromptIds(promptIds: string[]) {
if (!promptIds.length) return
const current = initializingPromptIds.value
const toRemove = promptIds.filter((id) => current.has(id))
function clearInitializationByJobIds(jobIds: string[]) {
if (!jobIds.length) return
const current = initializingJobIds.value
const toRemove = jobIds.filter((id) => current.has(id))
if (!toRemove.length) return
const next = new Set(current)
for (const id of toRemove) {
next.delete(id)
}
initializingPromptIds.value = next
initializingJobIds.value = next
}
function reconcileInitializingPrompts(activeJobIds: Set<string>) {
const orphaned = [...initializingPromptIds.value].filter(
function reconcileInitializingJobs(activeJobIds: Set<string>) {
const orphaned = [...initializingJobIds.value].filter(
(id) => !activeJobIds.has(id)
)
clearInitializationByPromptIds(orphaned)
clearInitializationByJobIds(orphaned)
}
function isPromptInitializing(
promptId: string | number | undefined
): boolean {
if (!promptId) return false
return initializingPromptIds.value.has(String(promptId))
function isJobInitializing(jobId: string | number | undefined): boolean {
if (!jobId) return false
return initializingJobIds.value.has(String(jobId))
}
/**
* Reset execution-related state after a run completes or is stopped.
*/
function resetExecutionState(pid?: string | null) {
function resetExecutionState(jobIdParam?: string | null) {
nodeProgressStates.value = {}
const promptId = pid ?? activePromptId.value ?? null
if (promptId) {
const map = { ...nodeProgressStatesByPrompt.value }
delete map[promptId]
nodeProgressStatesByPrompt.value = map
useJobPreviewStore().clearPreview(promptId)
const jobId = jobIdParam ?? activeJobId.value ?? null
if (jobId) {
const map = { ...nodeProgressStatesByJob.value }
delete map[jobId]
nodeProgressStatesByJob.value = map
useJobPreviewStore().clearPreview(jobId)
}
if (activePromptId.value) {
delete queuedPrompts.value[activePromptId.value]
if (activeJobId.value) {
delete queuedJobs.value[activeJobId.value]
}
activePromptId.value = null
activeJobId.value = null
_executingNodeProgress.value = null
lastPromptError.value = null
}
@@ -495,7 +492,7 @@ export const useExecutionStore = defineStore('execution', () => {
useNodeProgressText().showTextPreview(node, text)
}
function storePrompt({
function storeJob({
nodes,
id,
workflow
@@ -504,31 +501,28 @@ export const useExecutionStore = defineStore('execution', () => {
id: string
workflow: ComfyWorkflow
}) {
queuedPrompts.value[id] ??= { nodes: {} }
const queuedPrompt = queuedPrompts.value[id]
queuedPrompt.nodes = {
queuedJobs.value[id] ??= { nodes: {} }
const queuedJob = queuedJobs.value[id]
queuedJob.nodes = {
...nodes.reduce((p: Record<string, boolean>, n) => {
p[n] = false
return p
}, {}),
...queuedPrompt.nodes
...queuedJob.nodes
}
queuedPrompt.workflow = workflow
queuedJob.workflow = workflow
const wid = workflow?.activeState?.id ?? workflow?.initialState?.id
if (wid) {
promptIdToWorkflowId.value.set(String(id), String(wid))
jobIdToWorkflowId.value.set(String(id), String(wid))
}
}
/**
* Register or update a mapping from prompt_id to workflow ID.
* Register or update a mapping from job ID to workflow ID.
*/
function registerPromptWorkflowIdMapping(
promptId: string,
workflowId: string
) {
if (!promptId || !workflowId) return
promptIdToWorkflowId.value.set(String(promptId), String(workflowId))
function registerJobWorkflowIdMapping(jobId: string, workflowId: string) {
if (!jobId || !workflowId) return
jobIdToWorkflowId.value.set(String(jobId), String(workflowId))
}
/**
@@ -543,11 +537,9 @@ export const useExecutionStore = defineStore('execution', () => {
return executionId
}
const runningPromptIds = computed<string[]>(() => {
const runningJobIds = computed<string[]>(() => {
const result: string[] = []
for (const [pid, nodes] of Object.entries(
nodeProgressStatesByPrompt.value
)) {
for (const [pid, nodes] of Object.entries(nodeProgressStatesByJob.value)) {
if (Object.values(nodes).some((n) => n.state === 'running')) {
result.push(pid)
}
@@ -556,7 +548,7 @@ export const useExecutionStore = defineStore('execution', () => {
})
const runningWorkflowCount = computed<number>(
() => runningPromptIds.value.length
() => runningJobIds.value.length
)
/** Map of node errors indexed by locator ID. */
@@ -699,8 +691,8 @@ export const useExecutionStore = defineStore('execution', () => {
return {
isIdle,
clientId,
activePromptId,
queuedPrompts,
activeJobId,
queuedJobs,
lastNodeErrors,
lastExecutionError,
lastPromptError,
@@ -708,7 +700,7 @@ export const useExecutionStore = defineStore('execution', () => {
lastExecutionErrorNodeId,
executingNodeId,
executingNodeIds,
activePrompt,
activeJob,
totalNodesToExecute,
nodesExecuted,
executionProgress,
@@ -716,25 +708,25 @@ export const useExecutionStore = defineStore('execution', () => {
executingNodeProgress,
nodeProgressStates,
nodeLocationProgressStates,
nodeProgressStatesByPrompt,
runningPromptIds,
nodeProgressStatesByJob,
runningJobIds,
runningWorkflowCount,
initializingPromptIds,
isPromptInitializing,
clearInitializationByPromptId,
clearInitializationByPromptIds,
reconcileInitializingPrompts,
initializingJobIds,
isJobInitializing,
clearInitializationByJobId,
clearInitializationByJobIds,
reconcileInitializingJobs,
bindExecutionEvents,
unbindExecutionEvents,
storePrompt,
registerPromptWorkflowIdMapping,
storeJob,
registerJobWorkflowIdMapping,
uniqueExecutingNodeIdStrings,
// Raw executing progress data for backward compatibility in ComfyApp.
_executingNodeProgress,
// NodeLocatorId conversion helpers
executionIdToNodeLocatorId,
nodeLocatorIdToExecutionId,
promptIdToWorkflowId,
jobIdToWorkflowId,
// Node error lookup helpers
getNodeErrors,
slotHasError,

View File

@@ -31,7 +31,7 @@ const mockWorkflow: ComfyWorkflowJSON = {
// Mock job detail response (matches actual /jobs/{id} API response structure)
// workflow is nested at: workflow.extra_data.extra_pnginfo.workflow
const mockJobDetail = {
id: 'test-prompt-id',
id: 'test-job-id',
status: 'completed' as const,
create_time: Date.now(),
update_time: Date.now(),
@@ -86,7 +86,7 @@ describe('TaskItemImpl.loadWorkflow - workflow fetching', () => {
})
it('should fetch workflow from API for history tasks', async () => {
const job = createHistoryJob('test-prompt-id')
const job = createHistoryJob('test-job-id')
const task = new TaskItemImpl(job)
vi.spyOn(jobOutputCache, 'getJobDetail').mockResolvedValue(
@@ -95,12 +95,12 @@ describe('TaskItemImpl.loadWorkflow - workflow fetching', () => {
await task.loadWorkflow(mockApp)
expect(jobOutputCache.getJobDetail).toHaveBeenCalledWith('test-prompt-id')
expect(jobOutputCache.getJobDetail).toHaveBeenCalledWith('test-job-id')
expect(mockApp.loadGraphData).toHaveBeenCalledWith(mockWorkflow)
})
it('should not load workflow when fetch returns undefined', async () => {
const job = createHistoryJob('test-prompt-id')
const job = createHistoryJob('test-job-id')
const task = new TaskItemImpl(job)
vi.spyOn(jobOutputCache, 'getJobDetail').mockResolvedValue(undefined)
@@ -112,7 +112,7 @@ describe('TaskItemImpl.loadWorkflow - workflow fetching', () => {
})
it('should only fetch for history tasks, not running tasks', async () => {
const job = createRunningJob('test-prompt-id')
const job = createRunningJob('test-job-id')
const runningTask = new TaskItemImpl(job)
vi.spyOn(jobOutputCache, 'getJobDetail').mockResolvedValue(
@@ -126,7 +126,7 @@ describe('TaskItemImpl.loadWorkflow - workflow fetching', () => {
})
it('should handle fetch errors gracefully by returning undefined', async () => {
const job = createHistoryJob('test-prompt-id')
const job = createHistoryJob('test-job-id')
const task = new TaskItemImpl(job)
vi.spyOn(jobOutputCache, 'getJobDetail').mockResolvedValue(undefined)

View File

@@ -67,7 +67,7 @@ vi.mock('@/scripts/api', () => ({
describe('TaskItemImpl', () => {
it('should remove animated property from outputs during construction', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job, {
'node-1': {
images: [{ filename: 'test.png', type: 'output', subfolder: '' }],
@@ -83,7 +83,7 @@ describe('TaskItemImpl', () => {
})
it('should handle outputs without animated property', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job, {
'node-1': {
images: [{ filename: 'test.png', type: 'output', subfolder: '' }]
@@ -95,7 +95,7 @@ describe('TaskItemImpl', () => {
})
it('should recognize webm video from core', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job, {
'node-1': {
video: [{ filename: 'test.webm', type: 'output', subfolder: '' }]
@@ -112,7 +112,7 @@ describe('TaskItemImpl', () => {
// https://github.com/Kosinkadink/ComfyUI-VideoHelperSuite/blob/0a75c7958fe320efcb052f1d9f8451fd20c730a8/videohelpersuite/nodes.py#L578-L590
it('should recognize webm video from VHS', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job, {
'node-1': {
gifs: [
@@ -136,7 +136,7 @@ describe('TaskItemImpl', () => {
})
it('should recognize mp4 video from core', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job, {
'node-1': {
images: [
@@ -167,7 +167,7 @@ describe('TaskItemImpl', () => {
audioFormats.forEach(({ extension, mimeType }) => {
it(`should recognize ${extension} audio`, () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job, {
'node-1': {
audio: [
@@ -193,14 +193,14 @@ describe('TaskItemImpl', () => {
describe('error extraction getters', () => {
it('errorMessage returns undefined when no execution_error', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job)
expect(taskItem.errorMessage).toBeUndefined()
})
it('errorMessage returns the exception_message from execution_error', () => {
const job: JobListItem = {
...createHistoryJob(0, 'prompt-id'),
...createHistoryJob(0, 'job-id'),
status: 'failed',
execution_error: {
node_id: 'node-1',
@@ -217,7 +217,7 @@ describe('TaskItemImpl', () => {
})
it('executionError returns undefined when no execution_error', () => {
const job = createHistoryJob(0, 'prompt-id')
const job = createHistoryJob(0, 'job-id')
const taskItem = new TaskItemImpl(job)
expect(taskItem.executionError).toBeUndefined()
})
@@ -234,7 +234,7 @@ describe('TaskItemImpl', () => {
current_outputs: {}
}
const job: JobListItem = {
...createHistoryJob(0, 'prompt-id'),
...createHistoryJob(0, 'job-id'),
status: 'failed',
execution_error: errorDetail
}
@@ -292,9 +292,9 @@ describe('useQueueStore', () => {
expect(store.runningTasks).toHaveLength(1)
expect(store.pendingTasks).toHaveLength(2)
expect(store.runningTasks[0].promptId).toBe('run-1')
expect(store.pendingTasks[0].promptId).toBe('pend-2')
expect(store.pendingTasks[1].promptId).toBe('pend-1')
expect(store.runningTasks[0].jobId).toBe('run-1')
expect(store.pendingTasks[0].jobId).toBe('pend-2')
expect(store.pendingTasks[1].jobId).toBe('pend-1')
})
it('should load history tasks from API', async () => {
@@ -307,8 +307,8 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(2)
expect(store.historyTasks[0].promptId).toBe('hist-1')
expect(store.historyTasks[1].promptId).toBe('hist-2')
expect(store.historyTasks[0].jobId).toBe('hist-1')
expect(store.historyTasks[1].jobId).toBe('hist-2')
})
it('should set loading state correctly', async () => {
@@ -378,7 +378,7 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(1)
expect(store.historyTasks[0].promptId).toBe('prompt-uuid-aaa')
expect(store.historyTasks[0].jobId).toBe('prompt-uuid-aaa')
const hist2 = createHistoryJob(51, 'prompt-uuid-bbb')
mockGetHistory.mockResolvedValue([hist2])
@@ -386,7 +386,7 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(1)
expect(store.historyTasks[0].promptId).toBe('prompt-uuid-bbb')
expect(store.historyTasks[0].jobId).toBe('prompt-uuid-bbb')
expect(store.historyTasks[0].queueIndex).toBe(51)
})
@@ -406,10 +406,10 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(2)
const promptIds = store.historyTasks.map((t) => t.promptId)
expect(promptIds).toContain('second-prompt-at-101')
expect(promptIds).toContain('prompt-at-99')
expect(promptIds).not.toContain('first-prompt-at-100')
const jobIds = store.historyTasks.map((t) => t.jobId)
expect(jobIds).toContain('second-prompt-at-101')
expect(jobIds).toContain('prompt-at-99')
expect(jobIds).not.toContain('first-prompt-at-100')
})
it('should handle multiple queueIndex collisions simultaneously', async () => {
@@ -430,13 +430,13 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(3)
const promptIds = store.historyTasks.map((t) => t.promptId)
expect(promptIds).toEqual(['new-at-32', 'new-at-31', 'keep-at-30'])
const jobIds = store.historyTasks.map((t) => t.jobId)
expect(jobIds).toEqual(['new-at-32', 'new-at-31', 'keep-at-30'])
})
})
describe('update() - history reconciliation', () => {
it('should keep existing items still on server (by promptId)', async () => {
it('should keep existing items still on server (by jobId)', async () => {
const hist1 = createHistoryJob(10, 'existing-1')
const hist2 = createHistoryJob(9, 'existing-2')
@@ -452,9 +452,9 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(3)
expect(store.historyTasks.map((t) => t.promptId)).toContain('existing-1')
expect(store.historyTasks.map((t) => t.promptId)).toContain('existing-2')
expect(store.historyTasks.map((t) => t.promptId)).toContain('new-1')
expect(store.historyTasks.map((t) => t.jobId)).toContain('existing-1')
expect(store.historyTasks.map((t) => t.jobId)).toContain('existing-2')
expect(store.historyTasks.map((t) => t.jobId)).toContain('new-1')
})
it('should remove items no longer on server', async () => {
@@ -472,7 +472,7 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(1)
expect(store.historyTasks[0].promptId).toBe('keep-me')
expect(store.historyTasks[0].jobId).toBe('keep-me')
})
it('should add new items from server', async () => {
@@ -490,8 +490,8 @@ describe('useQueueStore', () => {
await store.update()
expect(store.historyTasks).toHaveLength(3)
expect(store.historyTasks.map((t) => t.promptId)).toContain('new-1')
expect(store.historyTasks.map((t) => t.promptId)).toContain('new-2')
expect(store.historyTasks.map((t) => t.jobId)).toContain('new-1')
expect(store.historyTasks.map((t) => t.jobId)).toContain('new-2')
})
it('should recreate TaskItemImpl when outputs_count changes', async () => {
@@ -831,7 +831,7 @@ describe('useQueueStore', () => {
await secondUpdate
expect(store.pendingTasks).toHaveLength(1)
expect(store.pendingTasks[0].promptId).toBe('new-job')
expect(store.pendingTasks[0].jobId).toBe('new-job')
resolveFirst!({
Running: [],
@@ -840,7 +840,7 @@ describe('useQueueStore', () => {
await firstUpdate
expect(store.pendingTasks).toHaveLength(1)
expect(store.pendingTasks[0].promptId).toBe('new-job')
expect(store.pendingTasks[0].jobId).toBe('new-job')
})
it('should set isLoading to false only for the latest request', async () => {
@@ -897,13 +897,13 @@ describe('useQueueStore', () => {
await secondUpdate
expect(store.pendingTasks).toHaveLength(1)
expect(store.pendingTasks[0].promptId).toBe('new-job')
expect(store.pendingTasks[0].jobId).toBe('new-job')
expect(store.isLoading).toBe(false)
await expect(firstUpdate).rejects.toThrow('stale network error')
expect(store.pendingTasks).toHaveLength(1)
expect(store.pendingTasks[0].promptId).toBe('new-job')
expect(store.pendingTasks[0].jobId).toBe('new-job')
expect(store.isLoading).toBe(false)
})
})

View File

@@ -309,14 +309,14 @@ export class TaskItemImpl {
}
get key() {
return this.promptId + this.displayStatus
return this.jobId + this.displayStatus
}
get queueIndex() {
return this.job.priority
}
get promptId() {
get jobId() {
return this.job.id
}
@@ -405,7 +405,7 @@ export class TaskItemImpl {
if (!this.isHistory) {
return this
}
const jobDetail = await getJobDetail(this.promptId)
const jobDetail = await getJobDetail(this.jobId)
if (!jobDetail?.outputs) {
return this
@@ -421,7 +421,7 @@ export class TaskItemImpl {
}
// Single fetch for both workflow and outputs (with caching)
const jobDetail = await getJobDetail(this.promptId)
const jobDetail = await getJobDetail(this.jobId)
const workflowData = await extractWorkflow(jobDetail)
if (!workflowData) {
@@ -460,7 +460,7 @@ export class TaskItemImpl {
new TaskItemImpl(
{
...this.job,
id: `${this.promptId}-${i}`
id: `${this.jobId}-${i}`
},
{
[output.nodeId]: {
@@ -527,13 +527,10 @@ export const useQueueStore = defineStore('queue', () => {
const appearedTasks = [...pendingTasks.value, ...runningTasks.value]
const executionStore = useExecutionStore()
appearedTasks.forEach((task) => {
const promptIdString = String(task.promptId)
const jobIdString = String(task.jobId)
const workflowId = task.workflowId
if (workflowId && promptIdString) {
executionStore.registerPromptWorkflowIdMapping(
promptIdString,
workflowId
)
if (workflowId && jobIdString) {
executionStore.registerJobWorkflowIdMapping(jobIdString, workflowId)
}
})
@@ -546,7 +543,7 @@ export const useQueueStore = defineStore('queue', () => {
...queue.Running.map((j) => j.id),
...queue.Pending.map((j) => j.id)
])
executionStore.reconcileInitializingPrompts(activeJobIds)
executionStore.reconcileInitializingJobs(activeJobIds)
}
// Sort by create_time descending and limit to maxItems
@@ -556,12 +553,12 @@ export const useQueueStore = defineStore('queue', () => {
// Reuse existing TaskItemImpl instances or create new
// Must recreate if outputs_count changed (e.g., API started returning it)
const existingByPromptId = new Map(
currentHistory.map((impl) => [impl.promptId, impl])
const existingByJobId = new Map(
currentHistory.map((impl) => [impl.jobId, impl])
)
historyTasks.value = sortedHistory.map((job) => {
const existing = existingByPromptId.get(job.id)
const existing = existingByJobId.get(job.id)
if (!existing) return new TaskItemImpl(job)
// Recreate if outputs_count changed to ensure lazy loading works
if (existing.outputsCount !== (job.outputs_count ?? undefined)) {
@@ -590,7 +587,7 @@ export const useQueueStore = defineStore('queue', () => {
}
const deleteTask = async (task: TaskItemImpl) => {
await api.deleteItem(task.apiTaskType, task.promptId)
await api.deleteItem(task.apiTaskType, task.jobId)
await update()
}

View File

@@ -43,7 +43,7 @@ export const iconForJobState = (state: JobState): string => {
const buildTitle = (task: TaskItemImpl, t: (k: string) => string): string => {
const prefix = t('g.job')
const shortId = String(task.promptId ?? '').split('-')[0]
const shortId = String(task.jobId ?? '').split('-')[0]
const idx = task.queueIndex
if (typeof idx === 'number') return `${prefix} #${idx}`
if (shortId) return `${prefix} ${shortId}`

View File

@@ -132,12 +132,10 @@ if (isDesktop) {
(newTasks, oldTasks) => {
// Report tasks that previously running but are now completed (i.e. in history)
const oldRunningTaskIds = new Set(
oldTasks.filter((task) => task.isRunning).map((task) => task.promptId)
oldTasks.filter((task) => task.isRunning).map((task) => task.jobId)
)
newTasks
.filter(
(task) => oldRunningTaskIds.has(task.promptId) && task.isHistory
)
.filter((task) => oldRunningTaskIds.has(task.jobId) && task.isHistory)
.forEach((task) => {
electronAPI().Events.incrementUserProperty(
`execution:${task.displayStatus.toLowerCase()}`,