mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-04-16 20:51:04 +00:00
Compare commits
10 Commits
test/layou
...
prompt-tab
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8a5a6d0f11 | ||
|
|
f500e0dde7 | ||
|
|
8a26ac632d | ||
|
|
324ef4fab1 | ||
|
|
de6eb5a7e7 | ||
|
|
fb96e64d82 | ||
|
|
4730a53b3d | ||
|
|
ef6030da0f | ||
|
|
6b11e5aea6 | ||
|
|
4f505dc80b |
@@ -732,6 +732,8 @@ export class ComfyApp {
|
||||
})
|
||||
|
||||
api.addEventListener('executed', ({ detail }) => {
|
||||
if (!useExecutionStore().isJobForActiveWorkflow(detail.prompt_id)) return
|
||||
|
||||
const nodeOutputStore = useNodeOutputStore()
|
||||
const executionId = String(detail.display_node || detail.node)
|
||||
|
||||
@@ -774,6 +776,8 @@ export class ComfyApp {
|
||||
})
|
||||
|
||||
api.addEventListener('b_preview_with_metadata', ({ detail }) => {
|
||||
if (!useExecutionStore().isJobForActiveWorkflow(detail.jobId)) return
|
||||
|
||||
// Enhanced preview with explicit node context
|
||||
const { blob, displayNodeId, jobId } = detail
|
||||
const { setNodePreviewsByExecutionId, revokePreviewsByExecutionId } =
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ref } from 'vue'
|
||||
import { app } from '@/scripts/app'
|
||||
import { MAX_PROGRESS_JOBS, useExecutionStore } from '@/stores/executionStore'
|
||||
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
|
||||
@@ -25,6 +26,9 @@ import type { LGraphCanvas } from '@/lib/litegraph/src/LGraphCanvas'
|
||||
import { createMockLGraphNode } from '@/utils/__tests__/litegraphTestUtils'
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
|
||||
// Reactive ref so the watcher on activeWorkflow?.path fires in tests
|
||||
const mockActiveWorkflow = ref<{ path: string } | null>(null)
|
||||
|
||||
// Mock the workflowStore
|
||||
vi.mock('@/platform/workflow/management/stores/workflowStore', async () => {
|
||||
const { ComfyWorkflow } = await vi.importActual<typeof WorkflowStoreModule>(
|
||||
@@ -35,7 +39,10 @@ vi.mock('@/platform/workflow/management/stores/workflowStore', async () => {
|
||||
useWorkflowStore: vi.fn(() => ({
|
||||
nodeExecutionIdToNodeLocatorId: mockNodeExecutionIdToNodeLocatorId,
|
||||
nodeIdToNodeLocatorId: mockNodeIdToNodeLocatorId,
|
||||
nodeLocatorIdToNodeExecutionId: mockNodeLocatorIdToNodeExecutionId
|
||||
nodeLocatorIdToNodeExecutionId: mockNodeLocatorIdToNodeExecutionId,
|
||||
get activeWorkflow() {
|
||||
return mockActiveWorkflow.value
|
||||
}
|
||||
}))
|
||||
}
|
||||
})
|
||||
@@ -754,3 +761,391 @@ describe('useMissingNodesErrorStore - setMissingNodeTypes', () => {
|
||||
expect(store.missingNodesError?.nodeTypes).toEqual(input)
|
||||
})
|
||||
})
|
||||
|
||||
describe('useExecutionStore - isJobForActiveWorkflow', () => {
|
||||
let store: ReturnType<typeof useExecutionStore>
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockActiveWorkflow.value = null
|
||||
apiEventHandlers.clear()
|
||||
setActivePinia(createTestingPinia({ stubActions: false }))
|
||||
store = useExecutionStore()
|
||||
store.bindExecutionEvents()
|
||||
})
|
||||
|
||||
it('should return true when promptId is null (legacy message)', () => {
|
||||
expect(store.isJobForActiveWorkflow(null)).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true when promptId is undefined', () => {
|
||||
expect(store.isJobForActiveWorkflow(undefined)).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true when job is not in the session map (unknown job)', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
expect(store.isJobForActiveWorkflow('unknown-job')).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true when no active workflow is open', () => {
|
||||
mockActiveWorkflow.value = null
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
expect(store.isJobForActiveWorkflow('job-1')).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true when job path matches active workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
expect(store.isJobForActiveWorkflow('job-1')).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false when job path differs from active workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
expect(store.isJobForActiveWorkflow('job-1')).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('useExecutionStore - WS message filtering by workflow tab', () => {
|
||||
let store: ReturnType<typeof useExecutionStore>
|
||||
|
||||
function fireEvent<T>(name: string, detail: T) {
|
||||
const handler = apiEventHandlers.get(name)
|
||||
if (!handler) throw new Error(`${name} handler not bound`)
|
||||
handler(new CustomEvent(name, { detail }))
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockActiveWorkflow.value = null
|
||||
apiEventHandlers.clear()
|
||||
setActivePinia(createTestingPinia({ stubActions: false }))
|
||||
store = useExecutionStore()
|
||||
store.bindExecutionEvents()
|
||||
})
|
||||
|
||||
describe('handleExecuted filtering', () => {
|
||||
it('should update nodes when job matches active workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
// Start execution to set activeJobId
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-1',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
expect(store.activeJobId).toBe('job-1')
|
||||
|
||||
// Fire executed for a node
|
||||
fireEvent('executed', {
|
||||
node: 'node-1',
|
||||
display_node: 'node-1',
|
||||
prompt_id: 'job-1',
|
||||
output: { images: [] }
|
||||
})
|
||||
|
||||
expect(store.activeJob?.nodes['node-1']).toBe(true)
|
||||
})
|
||||
|
||||
it('should ignore executed events from a different workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-1',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
|
||||
fireEvent('executed', {
|
||||
node: 'node-1',
|
||||
display_node: 'node-1',
|
||||
prompt_id: 'job-1',
|
||||
output: { images: [] }
|
||||
})
|
||||
|
||||
// Node should not be marked as executed since we're on workflow-b
|
||||
expect(store.activeJob?.nodes['node-1']).not.toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleExecutionCached filtering', () => {
|
||||
it('should ignore cached events from a different workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-1',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
|
||||
fireEvent('execution_cached', {
|
||||
prompt_id: 'job-1',
|
||||
timestamp: Date.now(),
|
||||
nodes: ['node-1', 'node-2']
|
||||
})
|
||||
|
||||
expect(store.activeJob?.nodes['node-1']).not.toBe(true)
|
||||
expect(store.activeJob?.nodes['node-2']).not.toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleProgress filtering', () => {
|
||||
it('should ignore progress from a different workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-1',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
|
||||
fireEvent('progress', {
|
||||
value: 5,
|
||||
max: 10,
|
||||
prompt_id: 'job-1',
|
||||
node: 'node-1'
|
||||
})
|
||||
|
||||
expect(store._executingNodeProgress).toBeNull()
|
||||
})
|
||||
|
||||
it('should update progress when job matches active workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-1',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
|
||||
fireEvent('progress', {
|
||||
value: 5,
|
||||
max: 10,
|
||||
prompt_id: 'job-1',
|
||||
node: 'node-1'
|
||||
})
|
||||
|
||||
expect(store._executingNodeProgress).toEqual({
|
||||
value: 5,
|
||||
max: 10,
|
||||
prompt_id: 'job-1',
|
||||
node: 'node-1'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleProgressState filtering', () => {
|
||||
it('should always update nodeProgressStatesByJob regardless of active workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
const nodes = {
|
||||
'node-1': {
|
||||
value: 5,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-1',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
|
||||
fireEvent('progress_state', { prompt_id: 'job-1', nodes })
|
||||
|
||||
// Per-job map should always be updated
|
||||
expect(store.nodeProgressStatesByJob['job-1']).toBeDefined()
|
||||
})
|
||||
|
||||
it('should NOT update nodeProgressStates when job is for a different workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
const nodes = {
|
||||
'node-1': {
|
||||
value: 5,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-1',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
|
||||
fireEvent('progress_state', { prompt_id: 'job-1', nodes })
|
||||
|
||||
// nodeProgressStates (the "current view") should NOT be updated
|
||||
expect(Object.keys(store.nodeProgressStates)).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should update nodeProgressStates when job matches active workflow', () => {
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
store.ensureSessionWorkflowPath('job-1', '/workflow-a')
|
||||
|
||||
const nodes = {
|
||||
'node-1': {
|
||||
value: 5,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-1',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
|
||||
fireEvent('progress_state', { prompt_id: 'job-1', nodes })
|
||||
|
||||
expect(store.nodeProgressStates['node-1']).toBeDefined()
|
||||
expect(store.nodeProgressStates['node-1'].state).toBe('running')
|
||||
})
|
||||
})
|
||||
|
||||
describe('multi-tab scenario', () => {
|
||||
it('should isolate progress between two workflows', () => {
|
||||
// Queue jobs from two different workflow tabs
|
||||
store.ensureSessionWorkflowPath('job-a', '/workflow-a')
|
||||
store.ensureSessionWorkflowPath('job-b', '/workflow-b')
|
||||
|
||||
// User is viewing workflow A
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
|
||||
// Start job-a
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-a',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
|
||||
// Progress from job-a should show
|
||||
fireEvent('progress', {
|
||||
value: 3,
|
||||
max: 10,
|
||||
prompt_id: 'job-a',
|
||||
node: 'node-1'
|
||||
})
|
||||
expect(store._executingNodeProgress?.value).toBe(3)
|
||||
|
||||
// Progress from job-b should NOT show (different workflow)
|
||||
fireEvent('progress', {
|
||||
value: 7,
|
||||
max: 10,
|
||||
prompt_id: 'job-b',
|
||||
node: 'node-1'
|
||||
})
|
||||
// Should still be 3 from job-a
|
||||
expect(store._executingNodeProgress?.value).toBe(3)
|
||||
})
|
||||
|
||||
it('should show correct progress after switching tabs', () => {
|
||||
store.ensureSessionWorkflowPath('job-a', '/workflow-a')
|
||||
store.ensureSessionWorkflowPath('job-b', '/workflow-b')
|
||||
|
||||
// Start job-a
|
||||
fireEvent('execution_start', {
|
||||
prompt_id: 'job-a',
|
||||
timestamp: Date.now()
|
||||
})
|
||||
|
||||
// User is on workflow A — progress from job-a appears
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
const nodesA = {
|
||||
'node-1': {
|
||||
value: 5,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-a',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
fireEvent('progress_state', { prompt_id: 'job-a', nodes: nodesA })
|
||||
expect(store.nodeProgressStates['node-1']?.value).toBe(5)
|
||||
|
||||
// Switch to workflow B — progress from job-a should no longer update nodeProgressStates
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
const nodesA2 = {
|
||||
'node-1': {
|
||||
value: 8,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-a',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
fireEvent('progress_state', { prompt_id: 'job-a', nodes: nodesA2 })
|
||||
// nodeProgressStates should NOT be updated (still old value from last render)
|
||||
expect(store.nodeProgressStates['node-1']?.value).toBe(5)
|
||||
|
||||
// But nodeProgressStatesByJob should be updated
|
||||
expect(store.nodeProgressStatesByJob['job-a']['node-1'].value).toBe(8)
|
||||
})
|
||||
})
|
||||
|
||||
describe('tab switch rehydration', () => {
|
||||
it('should rehydrate nodeProgressStates from the new workflow on tab switch', async () => {
|
||||
store.ensureSessionWorkflowPath('job-a', '/workflow-a')
|
||||
store.ensureSessionWorkflowPath('job-b', '/workflow-b')
|
||||
|
||||
// Populate per-job maps with progress data
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
const nodesA = {
|
||||
'node-1': {
|
||||
value: 3,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-a',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
fireEvent('progress_state', { prompt_id: 'job-a', nodes: nodesA })
|
||||
expect(store.nodeProgressStates['node-1']?.value).toBe(3)
|
||||
|
||||
mockActiveWorkflow.value = { path: '/workflow-b' }
|
||||
const nodesB = {
|
||||
'node-2': {
|
||||
value: 7,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-2',
|
||||
prompt_id: 'job-b',
|
||||
display_node_id: 'node-2'
|
||||
}
|
||||
}
|
||||
fireEvent('progress_state', { prompt_id: 'job-b', nodes: nodesB })
|
||||
expect(store.nodeProgressStates['node-2']?.value).toBe(7)
|
||||
|
||||
// Switch back to workflow A — watcher should rehydrate from job-a
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
await vi.dynamicImportSettled()
|
||||
// Wait for watcher to fire
|
||||
await new Promise((r) => setTimeout(r, 0))
|
||||
|
||||
expect(store.nodeProgressStates['node-1']?.value).toBe(3)
|
||||
expect(store.nodeProgressStates['node-2']).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should clear nodeProgressStates when switching to a workflow with no jobs', async () => {
|
||||
store.ensureSessionWorkflowPath('job-a', '/workflow-a')
|
||||
|
||||
mockActiveWorkflow.value = { path: '/workflow-a' }
|
||||
const nodesA = {
|
||||
'node-1': {
|
||||
value: 5,
|
||||
max: 10,
|
||||
state: 'running' as const,
|
||||
node_id: 'node-1',
|
||||
prompt_id: 'job-a',
|
||||
display_node_id: 'node-1'
|
||||
}
|
||||
}
|
||||
fireEvent('progress_state', { prompt_id: 'job-a', nodes: nodesA })
|
||||
expect(store.nodeProgressStates['node-1']?.value).toBe(5)
|
||||
|
||||
// Switch to a workflow with no queued jobs
|
||||
mockActiveWorkflow.value = { path: '/workflow-c' }
|
||||
await new Promise((r) => setTimeout(r, 0))
|
||||
|
||||
expect(Object.keys(store.nodeProgressStates)).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { defineStore } from 'pinia'
|
||||
import { computed, ref, shallowRef } from 'vue'
|
||||
import { computed, ref, watch } from 'vue'
|
||||
|
||||
import { useNodeProgressText } from '@/composables/node/useNodeProgressText'
|
||||
import { isCloud } from '@/platform/distribution/types'
|
||||
@@ -33,6 +33,7 @@ import { useExecutionErrorStore } from '@/stores/executionErrorStore'
|
||||
import type { NodeLocatorId } from '@/types/nodeIdentification'
|
||||
import { classifyCloudValidationError } from '@/utils/executionErrorUtil'
|
||||
import { executionIdToNodeLocatorId } from '@/utils/graphTraversalUtil'
|
||||
import { createSessionTabMap } from '@/utils/sessionTabMap'
|
||||
|
||||
interface QueuedJob {
|
||||
/**
|
||||
@@ -72,11 +73,8 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
*/
|
||||
const jobIdToWorkflowId = ref<Map<string, string>>(new Map())
|
||||
|
||||
/**
|
||||
* Map of job ID to workflow file path in the current session.
|
||||
* Only populated for jobs that are queued in this browser tab.
|
||||
*/
|
||||
const jobIdToSessionWorkflowPath = shallowRef<Map<string, string>>(new Map())
|
||||
const sessionJobPaths = createSessionTabMap('Comfy.Execution.JobPaths')
|
||||
const jobIdToSessionWorkflowPath = sessionJobPaths.map
|
||||
|
||||
const initializingJobIds = ref<Set<string>>(new Set())
|
||||
|
||||
@@ -255,11 +253,12 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
// before the HTTP response from queuePrompt triggers storeJob.
|
||||
if (!jobIdToSessionWorkflowPath.value.has(activeJobId.value)) {
|
||||
const path = queuedJobs.value[activeJobId.value]?.workflow?.path
|
||||
if (path) ensureSessionWorkflowPath(activeJobId.value, path)
|
||||
if (path) sessionJobPaths.set(activeJobId.value, path)
|
||||
}
|
||||
}
|
||||
|
||||
function handleExecutionCached(e: CustomEvent<ExecutionCachedWsMessage>) {
|
||||
if (!isJobForActiveWorkflow(e.detail.prompt_id)) return
|
||||
if (!activeJob.value) return
|
||||
for (const n of e.detail.nodes) {
|
||||
activeJob.value.nodes[n] = true
|
||||
@@ -275,6 +274,7 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
}
|
||||
|
||||
function handleExecuted(e: CustomEvent<ExecutedWsMessage>) {
|
||||
if (!isJobForActiveWorkflow(e.detail.prompt_id)) return
|
||||
if (!activeJob.value) return
|
||||
activeJob.value.nodes[e.detail.node] = true
|
||||
}
|
||||
@@ -335,26 +335,28 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
function handleProgressState(e: CustomEvent<ProgressStateWsMessage>) {
|
||||
const { nodes, prompt_id: jobId } = e.detail
|
||||
|
||||
// Revoke previews for nodes that are starting to execute
|
||||
// Update the per-job progress map (always, regardless of active tab)
|
||||
const previousForJob = nodeProgressStatesByJob.value[jobId] || {}
|
||||
for (const nodeId in nodes) {
|
||||
const nodeState = nodes[nodeId]
|
||||
if (nodeState.state === 'running' && !previousForJob[nodeId]) {
|
||||
// This node just started executing, revoke its previews
|
||||
// Note that we're doing the *actual* node id instead of the display node id
|
||||
// here intentionally. That way, we don't clear the preview every time a new node
|
||||
// within an expanded graph starts executing.
|
||||
const { revokePreviewsByExecutionId } = useNodeOutputStore()
|
||||
revokePreviewsByExecutionId(nodeId)
|
||||
}
|
||||
}
|
||||
|
||||
// Update the progress states for all nodes
|
||||
nodeProgressStatesByJob.value = {
|
||||
...nodeProgressStatesByJob.value,
|
||||
[jobId]: nodes
|
||||
}
|
||||
evictOldProgressJobs()
|
||||
|
||||
// Only update the "current view" progress if this job belongs to the active workflow tab
|
||||
if (!isJobForActiveWorkflow(jobId)) return
|
||||
|
||||
// Revoke previews for nodes that are starting to execute.
|
||||
// Gated behind isJobForActiveWorkflow so background jobs with overlapping
|
||||
// node IDs don't clear previews in the currently viewed workflow.
|
||||
for (const nodeId in nodes) {
|
||||
const nodeState = nodes[nodeId]
|
||||
if (nodeState.state === 'running' && !previousForJob[nodeId]) {
|
||||
const { revokePreviewsByExecutionId } = useNodeOutputStore()
|
||||
revokePreviewsByExecutionId(nodeId)
|
||||
}
|
||||
}
|
||||
|
||||
nodeProgressStates.value = nodes
|
||||
|
||||
// If we have progress for the currently executing node, update it for backwards compatibility
|
||||
@@ -370,6 +372,7 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
}
|
||||
|
||||
function handleProgress(e: CustomEvent<ProgressWsMessage>) {
|
||||
if (!isJobForActiveWorkflow(e.detail.prompt_id)) return
|
||||
_executingNodeProgress.value = e.detail
|
||||
}
|
||||
|
||||
@@ -557,25 +560,10 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
jobIdToWorkflowId.value.set(String(id), String(wid))
|
||||
}
|
||||
if (workflow?.path) {
|
||||
ensureSessionWorkflowPath(String(id), workflow.path)
|
||||
sessionJobPaths.set(String(id), workflow.path)
|
||||
}
|
||||
}
|
||||
|
||||
// ~0.65 MB at capacity (32 char GUID key + 50 char path value)
|
||||
const MAX_SESSION_PATH_ENTRIES = 4000
|
||||
|
||||
function ensureSessionWorkflowPath(jobId: string, path: string) {
|
||||
if (jobIdToSessionWorkflowPath.value.get(jobId) === path) return
|
||||
const next = new Map(jobIdToSessionWorkflowPath.value)
|
||||
next.set(jobId, path)
|
||||
while (next.size > MAX_SESSION_PATH_ENTRIES) {
|
||||
const oldest = next.keys().next().value
|
||||
if (oldest !== undefined) next.delete(oldest)
|
||||
else break
|
||||
}
|
||||
jobIdToSessionWorkflowPath.value = next
|
||||
}
|
||||
|
||||
/**
|
||||
* Register or update a mapping from job ID to workflow ID.
|
||||
*/
|
||||
@@ -617,6 +605,63 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
return jobIdToSessionWorkflowPath.value.get(activeJobId.value) === path
|
||||
})
|
||||
|
||||
/**
|
||||
* Check whether a job (by prompt_id) was initiated from the currently
|
||||
* active workflow tab. Used to filter incoming WS messages so that
|
||||
* visual state (node outputs, previews, progress indicators) only
|
||||
* applies to the workflow the user is looking at.
|
||||
*
|
||||
* Returns `true` (permissive) when:
|
||||
* - promptId is null/undefined (legacy message without prompt_id)
|
||||
* - promptId is not in the session map (job from before this session
|
||||
* or from another browser tab — graceful degradation)
|
||||
* - No active workflow is open
|
||||
*/
|
||||
function isJobForActiveWorkflow(
|
||||
promptId: string | null | undefined
|
||||
): boolean {
|
||||
if (!promptId) return true
|
||||
const jobPath = jobIdToSessionWorkflowPath.value.get(promptId)
|
||||
if (!jobPath) return true
|
||||
const activePath = workflowStore.activeWorkflow?.path
|
||||
if (!activePath) return true
|
||||
return jobPath === activePath
|
||||
}
|
||||
|
||||
// Rehydrate the "current view" progress when the user switches workflow tabs
|
||||
// so stale progress from the previous tab is not displayed.
|
||||
watch(
|
||||
() => workflowStore.activeWorkflow?.path,
|
||||
(newPath) => {
|
||||
_executingNodeProgress.value = null
|
||||
if (!newPath) {
|
||||
nodeProgressStates.value = {}
|
||||
return
|
||||
}
|
||||
// Find the most recent job that belongs to the new active workflow
|
||||
const jobEntries = Object.entries(nodeProgressStatesByJob.value)
|
||||
for (let i = jobEntries.length - 1; i >= 0; i--) {
|
||||
const [jobId, states] = jobEntries[i]
|
||||
if (jobIdToSessionWorkflowPath.value.get(jobId) === newPath) {
|
||||
nodeProgressStates.value = states
|
||||
const firstRunning = Object.values(states).find(
|
||||
(state) => state.state === 'running'
|
||||
)
|
||||
if (firstRunning) {
|
||||
_executingNodeProgress.value = {
|
||||
value: firstRunning.value,
|
||||
max: firstRunning.max,
|
||||
prompt_id: firstRunning.prompt_id,
|
||||
node: firstRunning.display_node_id || firstRunning.node_id
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
nodeProgressStates.value = {}
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
isIdle,
|
||||
clientId,
|
||||
@@ -637,6 +682,7 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
runningWorkflowCount,
|
||||
initializingJobIds,
|
||||
isActiveWorkflowRunning,
|
||||
isJobForActiveWorkflow,
|
||||
isJobInitializing,
|
||||
clearInitializationByJobId,
|
||||
clearInitializationByJobIds,
|
||||
@@ -652,6 +698,6 @@ export const useExecutionStore = defineStore('execution', () => {
|
||||
nodeLocatorIdToExecutionId,
|
||||
jobIdToWorkflowId,
|
||||
jobIdToSessionWorkflowPath,
|
||||
ensureSessionWorkflowPath
|
||||
ensureSessionWorkflowPath: sessionJobPaths.set
|
||||
}
|
||||
})
|
||||
|
||||
166
src/utils/sessionTabMap.test.ts
Normal file
166
src/utils/sessionTabMap.test.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
|
||||
import { createSessionTabMap } from '@/utils/sessionTabMap'
|
||||
|
||||
const PREFIX = 'test-prefix'
|
||||
|
||||
beforeEach(() => {
|
||||
sessionStorage.clear()
|
||||
;(window as { name: string }).name = 'test-client'
|
||||
})
|
||||
|
||||
describe('createSessionTabMap', () => {
|
||||
describe('basic operations', () => {
|
||||
it('stores a value readable via map.value.get', () => {
|
||||
const { map, set } = createSessionTabMap(PREFIX)
|
||||
set('node-1', 'tab-a')
|
||||
expect(map.value.get('node-1')).toBe('tab-a')
|
||||
})
|
||||
|
||||
it('overwrites an existing key with a new value', () => {
|
||||
const { map, set } = createSessionTabMap(PREFIX)
|
||||
set('node-1', 'tab-a')
|
||||
set('node-1', 'tab-b')
|
||||
expect(map.value.get('node-1')).toBe('tab-b')
|
||||
expect(map.value.size).toBe(1)
|
||||
})
|
||||
|
||||
it('is a no-op when setting the same key/value pair', () => {
|
||||
const { map, set } = createSessionTabMap(PREFIX)
|
||||
set('node-1', 'tab-a')
|
||||
const refAfterFirst = map.value
|
||||
|
||||
set('node-1', 'tab-a')
|
||||
expect(map.value).toBe(refAfterFirst)
|
||||
})
|
||||
})
|
||||
|
||||
describe('LRU eviction', () => {
|
||||
it('evicts oldest entries when exceeding maxEntries', () => {
|
||||
const { map, set } = createSessionTabMap(PREFIX, 3)
|
||||
set('a', '1')
|
||||
set('b', '2')
|
||||
set('c', '3')
|
||||
set('d', '4')
|
||||
|
||||
expect(map.value.size).toBe(3)
|
||||
expect(map.value.has('a')).toBe(false)
|
||||
expect(map.value.get('b')).toBe('2')
|
||||
expect(map.value.get('c')).toBe('3')
|
||||
expect(map.value.get('d')).toBe('4')
|
||||
})
|
||||
|
||||
it('refreshes key position on update, evicting the actual oldest', () => {
|
||||
const { map, set } = createSessionTabMap(PREFIX, 3)
|
||||
set('a', '1')
|
||||
set('b', '2')
|
||||
set('c', '3')
|
||||
|
||||
// Update 'a' with a new value makes it newest; 'b' is now oldest
|
||||
set('a', 'updated')
|
||||
set('d', '4')
|
||||
|
||||
expect(map.value.size).toBe(3)
|
||||
expect(map.value.has('b')).toBe(false)
|
||||
expect(map.value.get('a')).toBe('updated')
|
||||
expect(map.value.get('c')).toBe('3')
|
||||
expect(map.value.get('d')).toBe('4')
|
||||
})
|
||||
})
|
||||
|
||||
describe('sessionStorage persistence', () => {
|
||||
it('persists data to sessionStorage under the correct key', () => {
|
||||
const { set } = createSessionTabMap(PREFIX)
|
||||
set('node-1', 'tab-a')
|
||||
|
||||
const raw = sessionStorage.getItem(`${PREFIX}:test-client`)
|
||||
expect(raw).not.toBeNull()
|
||||
|
||||
const entries: [string, string][] = JSON.parse(raw!)
|
||||
expect(entries).toEqual([['node-1', 'tab-a']])
|
||||
})
|
||||
|
||||
it('persists multiple entries in insertion order', () => {
|
||||
const { set } = createSessionTabMap(PREFIX)
|
||||
set('x', '1')
|
||||
set('y', '2')
|
||||
|
||||
const entries: [string, string][] = JSON.parse(
|
||||
sessionStorage.getItem(`${PREFIX}:test-client`)!
|
||||
)
|
||||
expect(entries).toEqual([
|
||||
['x', '1'],
|
||||
['y', '2']
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('restore on creation', () => {
|
||||
it('restores previously persisted data into the new map', () => {
|
||||
const entries: [string, string][] = [
|
||||
['node-1', 'tab-a'],
|
||||
['node-2', 'tab-b']
|
||||
]
|
||||
sessionStorage.setItem(`${PREFIX}:test-client`, JSON.stringify(entries))
|
||||
|
||||
const { map } = createSessionTabMap(PREFIX)
|
||||
expect(map.value.get('node-1')).toBe('tab-a')
|
||||
expect(map.value.get('node-2')).toBe('tab-b')
|
||||
expect(map.value.size).toBe(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('migration', () => {
|
||||
it('migrates data from a different client key with the same prefix', () => {
|
||||
const entries: [string, string][] = [['node-1', 'tab-a']]
|
||||
sessionStorage.setItem(`${PREFIX}:client-1`, JSON.stringify(entries))
|
||||
;(window as { name: string }).name = 'client-2'
|
||||
|
||||
const { map } = createSessionTabMap(PREFIX)
|
||||
|
||||
expect(map.value.get('node-1')).toBe('tab-a')
|
||||
// Old key is removed
|
||||
expect(sessionStorage.getItem(`${PREFIX}:client-1`)).toBeNull()
|
||||
// Data is persisted under the new key
|
||||
expect(sessionStorage.getItem(`${PREFIX}:client-2`)).not.toBeNull()
|
||||
})
|
||||
|
||||
it('does not migrate data from a different prefix', () => {
|
||||
sessionStorage.setItem(
|
||||
'other-prefix:client-1',
|
||||
JSON.stringify([['x', '1']])
|
||||
)
|
||||
;(window as { name: string }).name = 'client-2'
|
||||
|
||||
const { map } = createSessionTabMap(PREFIX)
|
||||
expect(map.value.size).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('graceful degradation', () => {
|
||||
it('works in-memory when window.name is empty', () => {
|
||||
;(window as { name: string }).name = ''
|
||||
|
||||
const { map, set } = createSessionTabMap(PREFIX)
|
||||
set('node-1', 'tab-a')
|
||||
|
||||
expect(map.value.get('node-1')).toBe('tab-a')
|
||||
})
|
||||
})
|
||||
|
||||
describe('reactivity', () => {
|
||||
it('produces a new Map reference on each set call', () => {
|
||||
const { map, set } = createSessionTabMap(PREFIX)
|
||||
const ref1 = map.value
|
||||
|
||||
set('a', '1')
|
||||
const ref2 = map.value
|
||||
|
||||
set('b', '2')
|
||||
const ref3 = map.value
|
||||
|
||||
expect(ref1).not.toBe(ref2)
|
||||
expect(ref2).not.toBe(ref3)
|
||||
})
|
||||
})
|
||||
})
|
||||
77
src/utils/sessionTabMap.ts
Normal file
77
src/utils/sessionTabMap.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import type { ShallowRef } from 'vue'
|
||||
|
||||
import { shallowRef } from 'vue'
|
||||
|
||||
export interface SessionTabMap {
|
||||
readonly map: ShallowRef<Map<string, string>>
|
||||
set(key: string, value: string): void
|
||||
}
|
||||
|
||||
export function createSessionTabMap(
|
||||
prefix: string,
|
||||
maxEntries: number = 200
|
||||
): SessionTabMap {
|
||||
const capacity = Math.max(0, Math.floor(maxEntries))
|
||||
const map = shallowRef<Map<string, string>>(restore(prefix))
|
||||
|
||||
function set(key: string, value: string): void {
|
||||
if (map.value.get(key) === value) return
|
||||
const next = new Map(map.value)
|
||||
next.delete(key)
|
||||
next.set(key, value)
|
||||
|
||||
while (next.size > capacity) {
|
||||
const oldest = next.keys().next().value
|
||||
if (oldest === undefined) break
|
||||
next.delete(oldest)
|
||||
}
|
||||
|
||||
map.value = next
|
||||
persist(prefix, next)
|
||||
}
|
||||
|
||||
return { map, set }
|
||||
}
|
||||
|
||||
function storageKey(prefix: string): string | null {
|
||||
const clientId = window.name
|
||||
return clientId ? `${prefix}:${clientId}` : null
|
||||
}
|
||||
|
||||
function persist(prefix: string, data: Map<string, string>): void {
|
||||
const key = storageKey(prefix)
|
||||
if (!key) return
|
||||
try {
|
||||
sessionStorage.setItem(key, JSON.stringify(Array.from(data.entries())))
|
||||
} catch {
|
||||
// Graceful degradation
|
||||
}
|
||||
}
|
||||
|
||||
function restore(prefix: string): Map<string, string> {
|
||||
const key = storageKey(prefix)
|
||||
if (!key) return new Map()
|
||||
try {
|
||||
const raw = sessionStorage.getItem(key)
|
||||
if (raw) return new Map(JSON.parse(raw) as [string, string][])
|
||||
return migrate(prefix, key)
|
||||
} catch {
|
||||
return new Map()
|
||||
}
|
||||
}
|
||||
|
||||
function migrate(prefix: string, newKey: string): Map<string, string> {
|
||||
const searchPrefix = `${prefix}:`
|
||||
for (let i = 0; i < sessionStorage.length; i++) {
|
||||
const existingKey = sessionStorage.key(i)
|
||||
if (!existingKey?.startsWith(searchPrefix) || existingKey === newKey)
|
||||
continue
|
||||
const raw = sessionStorage.getItem(existingKey)
|
||||
if (!raw) continue
|
||||
const migrated = new Map(JSON.parse(raw) as [string, string][])
|
||||
persist(prefix, migrated)
|
||||
sessionStorage.removeItem(existingKey)
|
||||
return migrated
|
||||
}
|
||||
return new Map()
|
||||
}
|
||||
Reference in New Issue
Block a user