Compare commits

...

25 Commits

Author SHA1 Message Date
Richard Yu
f3e5b281dd wip2 2025-09-02 21:12:16 -07:00
Richard Yu
d83af149b0 wip 2025-09-02 16:56:40 -07:00
Jennifer Weber
4899c9d25b translations for human friendly auth errors 2025-08-29 21:53:23 -07:00
Jennifer Weber
0bd3c1271d Small fixes after rebase 2025-08-29 11:10:21 -07:00
Jennifer Weber
6eb91e4aed Show signin and signup errors on form 2025-08-29 02:32:06 -07:00
Jennifer Weber
3b3071c975 Fix for maintining the new item optimization in queue store 2025-08-29 02:32:06 -07:00
Jennifer Weber
68f0275a83 Fix for history items sometimes not appearing again
New items from the history endpoint were being ignored due to the sorting based on priority, and left out of the merge
Fixed by removing that optimization so they all go through merge.
2025-08-29 02:32:06 -07:00
Jennifer Weber
a0d66bb0d7 Fix for depulicating tasks in queuestore by promptId to take into account sorting differences 2025-08-29 02:32:06 -07:00
Jennifer Weber
1292ae0f14 Add error log when templates are not found 2025-08-29 02:32:03 -07:00
Christian Byrne
8da2b304ef allow updating outputs on custom nodes and inside subgraphs (#4963) 2025-08-29 02:30:34 -07:00
Jennifer Weber
0950da0b43 Update logic for dev server url after cloud https changes
default to staging http for now
env var can be overrden for local in the .env file
2025-08-29 02:30:34 -07:00
Deep Roy
86e2b1fc61 Add analytics for workflow loading (#4966)
Needs to land after https://github.com/Comfy-Org/cloud/pull/398

## Description

- Adds a postCloudAnalytics method in `api.ts`
- Adds a workflow_loaded event
- The event contains 
  - the source (not file type, more like workflow format) one of: 
    - apiJson (I think this is the "prompt" format?)
    - graph (the richest type)
    - template: don't fully understand this but it works
- The actual data for the workflow, depends on the source type
- If available, missingModels and missingNodeTypes, so we can easily
query those

This talks to a new endpoint on the ingest server that is being added.  

## Tests
Tested manually with:
- loading an image from civitAI with missing models
- loading an image from comfy examples with no missing models
- opening a json file in the prompt format (I asked claude to generate
one - this is the format handled by the loadApiJson function)
- opening a template file (claude generated one - this is the format
handled by loadTemplateJson function)
- Testing these for both dragAndDrop and (menu --> open --> open
workflow)

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-4966-Add-analytics-for-workflow-loading-24e6d73d36508170acacefb3125b7017)
by [Unito](https://www.unito.io)
2025-08-29 02:30:34 -07:00
bymyself
4a612b09ed feat: Configure vite dev server for staging cloud testing
- Hardcode DEV_SERVER_COMFYUI_URL to staging cloud URL
- Enable Vue DevTools by default for better DX
- Add SSL certificate handling for all proxy endpoints
- Add optional API key support via STAGING_API_KEY env var
- Bypass multi-user auth to simulate single-user mode
- Add comments explaining the staging setup

This allows developers to test frontend changes against the staging
cloud backend by simply running npm run dev without any env configuration.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-08-29 02:30:34 -07:00
Robin Huang
4a3c3d9c97 Use hostname to determine environment. 2025-08-29 02:30:34 -07:00
Richard Yu
c3c59988f4 sort history by exec start time rather than priority 2025-08-29 02:30:34 -07:00
Richard Yu
e6d3e94a34 Add "as TaskPrompt" 2025-08-29 02:30:34 -07:00
Richard Yu
1c0c501105 update api.ts to handle prompt formats 2025-08-29 02:30:34 -07:00
Richard Yu
980b727ff8 [fix] handle cancelling pending jobs 2025-08-29 02:30:34 -07:00
Robin Huang
40c47a8e67 Fix type error. 2025-08-29 02:30:34 -07:00
Robin Huang
f0f4313afa Add 2025-08-29 02:30:34 -07:00
Robin Huang
cb5894a100 Enable sentry integrations. 2025-08-29 02:30:34 -07:00
Richard Yu
7649feb47f [feat] Update history API to v2 array format and add comprehensive tests
- Migrate from object-based to array-based history response format
- Update /history endpoint to /history_v2 with max_items parameter
- Add lazy loading of workflows via /history_v2/:prompt_id endpoint
- Implement comprehensive browser tests for history API functionality
- Add unit tests for API methods and queue store
- Update TaskItemImpl to support history workflow loading
- Add proper error handling and edge case coverage
- Follow established test patterns for better maintainability

This change improves performance by reducing initial payload size
and enables on-demand workflow loading for history items.
2025-08-29 02:30:31 -07:00
Robin Huang
c27edb7e94 Add notifications via websocket. 2025-08-29 02:25:37 -07:00
Robin Huang
23e881e220 Prevent access without login. 2025-08-29 02:25:37 -07:00
Robin Huang
c5c06b6ba8 Add client_id to query param. 2025-08-29 02:25:37 -07:00
36 changed files with 3413 additions and 125 deletions

View File

@@ -34,17 +34,23 @@ const getContentType = (filename: string, fileType: OutputFileType) => {
}
const setQueueIndex = (task: TaskItem) => {
task.prompt[0] = TaskHistory.queueIndex++
task.prompt.priority = TaskHistory.queueIndex++
}
const setPromptId = (task: TaskItem) => {
task.prompt[1] = uuidv4()
if (!task.prompt.prompt_id || task.prompt.prompt_id === 'prompt-id') {
task.prompt.prompt_id = uuidv4()
}
}
export default class TaskHistory {
static queueIndex = 0
static readonly defaultTask: Readonly<HistoryTaskItem> = {
prompt: [0, 'prompt-id', {}, { client_id: uuidv4() }, []],
prompt: {
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: uuidv4() }
},
outputs: {},
status: {
status_str: 'success',
@@ -66,10 +72,37 @@ export default class TaskHistory {
)
private async handleGetHistory(route: Route) {
const url = route.request().url()
// Handle history_v2/:prompt_id endpoint
const promptIdMatch = url.match(/history_v2\/([^?]+)/)
if (promptIdMatch) {
const promptId = promptIdMatch[1]
const task = this.tasks.find((t) => t.prompt.prompt_id === promptId)
const response: Record<string, any> = {}
if (task) {
response[promptId] = task
}
return route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify(response)
})
}
// Handle history_v2 list endpoint
// Convert HistoryTaskItem to RawHistoryItem format expected by API
const rawHistoryItems = this.tasks.map((task) => ({
prompt_id: task.prompt.prompt_id,
prompt: task.prompt,
status: task.status,
outputs: task.outputs,
...(task.meta && { meta: task.meta })
}))
return route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify(this.tasks)
body: JSON.stringify({ history: rawHistoryItems })
})
}
@@ -93,7 +126,7 @@ export default class TaskHistory {
async setupRoutes() {
return this.comfyPage.page.route(
/.*\/api\/(view|history)(\?.*)?$/,
/.*\/api\/(view|history_v2)(\/[^?]*)?(\?.*)?$/,
async (route) => {
const request = route.request()
const method = request.method()

View File

@@ -0,0 +1,131 @@
import {
comfyExpect as expect,
comfyPageFixture as test
} from '../fixtures/ComfyPage'
test.describe('History API v2', () => {
const TEST_PROMPT_ID = 'test-prompt-id'
const TEST_CLIENT_ID = 'test-client'
test('Can fetch history with new v2 format', async ({ comfyPage }) => {
// Set up mocked history with tasks
await comfyPage.setupHistory().withTask(['example.webp']).setupRoutes()
// Verify history_v2 API response format
const result = await comfyPage.page.evaluate(async () => {
try {
const response = await window['app'].api.getHistory()
return { success: true, data: response }
} catch (error) {
console.error('Failed to fetch history:', error)
return { success: false, error: error.message }
}
})
expect(result.success).toBe(true)
expect(result.data).toHaveProperty('History')
expect(Array.isArray(result.data.History)).toBe(true)
expect(result.data.History.length).toBeGreaterThan(0)
const historyItem = result.data.History[0]
// Verify the new prompt structure (object instead of array)
expect(historyItem.prompt).toHaveProperty('priority')
expect(historyItem.prompt).toHaveProperty('prompt_id')
expect(historyItem.prompt).toHaveProperty('extra_data')
expect(typeof historyItem.prompt.priority).toBe('number')
expect(typeof historyItem.prompt.prompt_id).toBe('string')
expect(historyItem.prompt.extra_data).toHaveProperty('client_id')
})
test('Can load workflow from history using history_v2 endpoint', async ({
comfyPage
}) => {
// Simple mock workflow for testing
const mockWorkflow = {
version: 0.4,
nodes: [{ id: 1, type: 'TestNode', pos: [100, 100], size: [200, 100] }],
links: [],
groups: [],
config: {},
extra: {}
}
// Set up history with workflow data
await comfyPage
.setupHistory()
.withTask(['example.webp'], 'images', {
prompt: {
priority: 0,
prompt_id: TEST_PROMPT_ID,
extra_data: {
client_id: TEST_CLIENT_ID,
extra_pnginfo: { workflow: mockWorkflow }
}
}
})
.setupRoutes()
// Load initial workflow to clear canvas
await comfyPage.loadWorkflow('simple_slider')
await comfyPage.nextFrame()
// Load workflow from history
const loadResult = await comfyPage.page.evaluate(async (promptId) => {
try {
const workflow =
await window['app'].api.getWorkflowFromHistory(promptId)
if (workflow) {
await window['app'].loadGraphData(workflow)
return { success: true }
}
return { success: false, error: 'No workflow found' }
} catch (error) {
console.error('Failed to load workflow from history:', error)
return { success: false, error: error.message }
}
}, TEST_PROMPT_ID)
expect(loadResult.success).toBe(true)
// Verify workflow loaded correctly
await comfyPage.nextFrame()
const nodeInfo = await comfyPage.page.evaluate(() => {
try {
const graph = window['app'].graph
return {
success: true,
nodeCount: graph.nodes?.length || 0,
firstNodeType: graph.nodes?.[0]?.type || null
}
} catch (error) {
return { success: false, error: error.message }
}
})
expect(nodeInfo.success).toBe(true)
expect(nodeInfo.nodeCount).toBe(1)
expect(nodeInfo.firstNodeType).toBe('TestNode')
})
test('Handles missing workflow data gracefully', async ({ comfyPage }) => {
// Set up empty history routes
await comfyPage.setupHistory().setupRoutes()
// Test loading from history with invalid prompt_id
const result = await comfyPage.page.evaluate(async () => {
try {
const workflow =
await window['app'].api.getWorkflowFromHistory('invalid-id')
return { success: true, workflow }
} catch (error) {
console.error('Expected error for invalid prompt_id:', error)
return { success: false, error: error.message }
}
})
// Should handle gracefully without throwing
expect(result.success).toBe(true)
expect(result.workflow).toBeNull()
})
})

View File

@@ -187,6 +187,7 @@ test.describe('Workflows sidebar', () => {
test('Can save workflow as with same name', async ({ comfyPage }) => {
await comfyPage.menu.topbar.saveWorkflow('workflow5.json')
await comfyPage.nextFrame()
expect(await comfyPage.menu.workflowsTab.getOpenedWorkflowNames()).toEqual([
'workflow5.json'
])

View File

@@ -32,12 +32,16 @@
</Message>
<!-- Form -->
<SignInForm v-if="isSignIn" @submit="signInWithEmail" />
<SignInForm
v-if="isSignIn"
:auth-error="authError"
@submit="signInWithEmail"
/>
<template v-else>
<Message v-if="userIsInChina" severity="warn" class="mb-4">
{{ t('auth.signup.regionRestrictionChina') }}
</Message>
<SignUpForm v-else @submit="signUpWithEmail" />
<SignUpForm v-else :auth-error="authError" @submit="signUpWithEmail" />
</template>
<!-- Divider -->
@@ -149,6 +153,7 @@ import { useI18n } from 'vue-i18n'
import { useFirebaseAuthActions } from '@/composables/auth/useFirebaseAuthActions'
import { COMFY_PLATFORM_BASE_URL } from '@/config/comfyApi'
import { SignInData, SignUpData } from '@/schemas/signInSchema'
import { translateAuthError } from '@/utils/authErrorTranslation'
import { isInChina } from '@/utils/networkUtil'
import ApiKeyForm from './signin/ApiKeyForm.vue'
@@ -164,32 +169,58 @@ const authActions = useFirebaseAuthActions()
const isSecureContext = window.isSecureContext
const isSignIn = ref(true)
const showApiKeyForm = ref(false)
const authError = ref('')
const toggleState = () => {
isSignIn.value = !isSignIn.value
showApiKeyForm.value = false
authError.value = ''
}
// Custom error handler for inline display
const inlineErrorHandler = (error: unknown) => {
// Set inline error with auth error translation
authError.value = translateAuthError(error)
// Also show toast (original behavior)
authActions.reportError(error)
}
const signInWithGoogle = async () => {
if (await authActions.signInWithGoogle()) {
authError.value = ''
if (await authActions.signInWithGoogle(inlineErrorHandler)()) {
onSuccess()
}
}
const signInWithGithub = async () => {
if (await authActions.signInWithGithub()) {
authError.value = ''
if (await authActions.signInWithGithub(inlineErrorHandler)()) {
onSuccess()
}
}
const signInWithEmail = async (values: SignInData) => {
if (await authActions.signInWithEmail(values.email, values.password)) {
authError.value = ''
if (
await authActions.signInWithEmail(
values.email,
values.password,
inlineErrorHandler
)()
) {
onSuccess()
}
}
const signUpWithEmail = async (values: SignUpData) => {
if (await authActions.signUpWithEmail(values.email, values.password)) {
authError.value = ''
if (
await authActions.signUpWithEmail(
values.email,
values.password,
inlineErrorHandler
)()
) {
onSuccess()
}
}

View File

@@ -59,6 +59,11 @@
}}</small>
</div>
<!-- Auth Error Message -->
<Message v-if="authError" severity="error">
{{ authError }}
</Message>
<!-- Submit Button -->
<ProgressSpinner v-if="loading" class="w-8 h-8" />
<Button
@@ -75,6 +80,7 @@ import { Form, FormSubmitEvent } from '@primevue/forms'
import { zodResolver } from '@primevue/forms/resolvers/zod'
import Button from 'primevue/button'
import InputText from 'primevue/inputtext'
import Message from 'primevue/message'
import Password from 'primevue/password'
import ProgressSpinner from 'primevue/progressspinner'
import { useToast } from 'primevue/usetoast'
@@ -92,6 +98,10 @@ const toast = useToast()
const { t } = useI18n()
defineProps<{
authError?: string
}>()
const emit = defineEmits<{
submit: [values: SignInData]
}>()

View File

@@ -49,6 +49,11 @@
}}</small>
</FormField>
<!-- Auth Error Message -->
<Message v-if="authError" severity="error">
{{ authError }}
</Message>
<!-- Submit Button -->
<Button
type="submit"
@@ -64,6 +69,7 @@ import { zodResolver } from '@primevue/forms/resolvers/zod'
import Button from 'primevue/button'
import Checkbox from 'primevue/checkbox'
import InputText from 'primevue/inputtext'
import Message from 'primevue/message'
import { useI18n } from 'vue-i18n'
import { type SignUpData, signUpSchema } from '@/schemas/signInSchema'
@@ -72,6 +78,10 @@ import PasswordFields from './PasswordFields.vue'
const { t } = useI18n()
defineProps<{
authError?: string
}>()
const emit = defineEmits<{
submit: [values: SignUpData]
}>()

View File

@@ -15,9 +15,9 @@
<script setup lang="ts">
import Tag from 'primevue/tag'
// Global variable from vite build defined in global.d.ts
// eslint-disable-next-line no-undef
const isStaging = !__USE_PROD_CONFIG__
import { isProductionEnvironment } from '@/config/environment'
const isStaging = !isProductionEnvironment()
</script>
<style scoped>

View File

@@ -106,8 +106,8 @@ import NoResultsPlaceholder from '@/components/common/NoResultsPlaceholder.vue'
import VirtualGrid from '@/components/common/VirtualGrid.vue'
import { ComfyNode } from '@/schemas/comfyWorkflowSchema'
import { api } from '@/scripts/api'
import { app } from '@/scripts/app'
import { useLitegraphService } from '@/services/litegraphService'
import { useWorkflowService } from '@/services/workflowService'
import { useCommandStore } from '@/stores/commandStore'
import {
ResultItemImpl,
@@ -126,6 +126,7 @@ const toast = useToast()
const queueStore = useQueueStore()
const settingStore = useSettingStore()
const commandStore = useCommandStore()
const workflowService = useWorkflowService()
const { t } = useI18n()
// Expanded view: show all outputs in a flat list.
@@ -208,8 +209,16 @@ const menuItems = computed<MenuItem[]>(() => {
{
label: t('g.loadWorkflow'),
icon: 'pi pi-file-export',
command: () => menuTargetTask.value?.loadWorkflow(app),
disabled: !menuTargetTask.value?.workflow
command: () => {
if (menuTargetTask.value) {
void workflowService.loadTaskWorkflow(menuTargetTask.value)
}
},
disabled: !(
menuTargetTask.value?.workflow ||
(menuTargetTask.value?.isHistory &&
menuTargetTask.value?.prompt.prompt_id)
)
},
{
label: t('g.goToNode'),

View File

@@ -100,27 +100,33 @@ export const useFirebaseAuthActions = () => {
return await authStore.fetchBalance()
}, reportError)
const signInWithGoogle = wrapWithErrorHandlingAsync(async () => {
return await authStore.loginWithGoogle()
}, reportError)
const signInWithGoogle = (errorHandler = reportError) =>
wrapWithErrorHandlingAsync(async () => {
return await authStore.loginWithGoogle()
}, errorHandler)
const signInWithGithub = wrapWithErrorHandlingAsync(async () => {
return await authStore.loginWithGithub()
}, reportError)
const signInWithGithub = (errorHandler = reportError) =>
wrapWithErrorHandlingAsync(async () => {
return await authStore.loginWithGithub()
}, errorHandler)
const signInWithEmail = wrapWithErrorHandlingAsync(
async (email: string, password: string) => {
const signInWithEmail = (
email: string,
password: string,
errorHandler = reportError
) =>
wrapWithErrorHandlingAsync(async () => {
return await authStore.login(email, password)
},
reportError
)
}, errorHandler)
const signUpWithEmail = wrapWithErrorHandlingAsync(
async (email: string, password: string) => {
const signUpWithEmail = (
email: string,
password: string,
errorHandler = reportError
) =>
wrapWithErrorHandlingAsync(async () => {
return await authStore.register(email, password)
},
reportError
)
}, errorHandler)
const updatePassword = wrapWithErrorHandlingAsync(
async (newPassword: string) => {
@@ -146,6 +152,7 @@ export const useFirebaseAuthActions = () => {
signInWithEmail,
signUpWithEmail,
updatePassword,
accessError
accessError,
reportError
}
}

View File

@@ -18,9 +18,268 @@ import {
type ComfyWidgetConstructorV2,
addValueControlWidgets
} from '@/scripts/widgets'
import { fileNameMappingService } from '@/services/fileNameMappingService'
import { useRemoteWidget } from './useRemoteWidget'
// Common file extensions that indicate file inputs
const FILE_EXTENSIONS = [
'.jpg',
'.jpeg',
'.png',
'.gif',
'.webp',
'.bmp',
'.tiff',
'.svg',
'.safetensors',
'.ckpt',
'.pt',
'.pth',
'.bin'
]
/**
* Check if options contain filename-like values
*/
function hasFilenameOptions(options: any[]): boolean {
return options.some((opt: any) => {
if (typeof opt !== 'string') return false
// Check for common file extensions
const hasExtension = FILE_EXTENSIONS.some((ext) =>
opt.toLowerCase().endsWith(ext)
)
// Check for hash-like filenames (ComfyUI hashed files)
const isHashLike = /^[a-f0-9]{8,}\./i.test(opt)
return hasExtension || isHashLike
})
}
/**
* Apply filename mapping to a widget using a simplified approach
*/
function applyFilenameMappingToWidget(
widget: IComboWidget,
node: LGraphNode,
inputSpec: ComboInputSpec
) {
// Simple approach: just override _displayValue for text display
// Leave all widget functionality intact
console.debug(
`[FilenameMapping] STARTING applyFilenameMappingToWidget for:`,
{
inputName: inputSpec.name,
widgetName: widget.name,
currentOptions: widget.options,
currentValues: Array.isArray(widget.options?.values)
? widget.options.values.slice(0, 3)
: widget.options?.values || 'none'
}
)
// Override serializeValue to ensure hash is used for API
;(widget as any).serializeValue = function () {
// Always return the actual widget value (hash) for serialization
return widget.value
}
// Override _displayValue to show human-readable names
Object.defineProperty(widget, '_displayValue', {
get() {
if ((widget as any).computedDisabled) return ''
// Get current hash value
const hashValue = widget.value
if (typeof hashValue !== 'string') return String(hashValue)
// Try to get human-readable name from cache (deduplicated for display)
const mapping = fileNameMappingService.getCachedMapping('input', true)
const humanName = mapping[hashValue]
// Return human name for display, fallback to hash
return humanName || hashValue
},
configurable: true
})
// Also override the options.values to show human names in dropdown
const originalOptions = widget.options as any
// Store original values array - maintain the same array reference
const rawValues = Array.isArray(originalOptions.values)
? originalOptions.values
: []
console.debug('[FilenameMapping] Initial raw values:', rawValues)
// Create a computed property that returns mapped values
Object.defineProperty(widget.options, 'values', {
get() {
if (!Array.isArray(rawValues)) return rawValues
// Map values to human-readable names (deduplicated for dropdown display)
const mapping = fileNameMappingService.getCachedMapping('input', true)
const mapped = rawValues.map((value: any) => {
if (typeof value === 'string') {
const humanName = mapping[value]
if (humanName) {
console.debug(`[FilenameMapping] Mapped ${value} -> ${humanName}`)
return humanName
}
}
return value
})
console.debug('[FilenameMapping] Returning mapped values:', mapped)
return mapped
},
set(newValues) {
// Update raw values array in place to maintain reference
rawValues.length = 0
if (Array.isArray(newValues)) {
rawValues.push(...newValues)
}
console.debug('[FilenameMapping] Values set to:', rawValues)
// Trigger UI update
node.setDirtyCanvas?.(true, true)
node.graph?.setDirtyCanvas?.(true, true)
},
configurable: true,
enumerable: true
})
// Add helper methods for managing the raw values
;(widget as any).getRawValues = function () {
return rawValues
}
// Add a method to force refresh the dropdown
;(widget as any).refreshMappings = function () {
console.debug('[FilenameMapping] Force refreshing dropdown')
// Force litegraph to re-read the values
const currentValues = widget.options.values
console.debug('[FilenameMapping] Current mapped values:', currentValues)
// Trigger UI update
node.setDirtyCanvas?.(true, true)
node.graph?.setDirtyCanvas?.(true, true)
}
// Override incrementValue and decrementValue for arrow key navigation
;(widget as any).incrementValue = function (options: any) {
// Get the current human-readable value (deduplicated)
const mapping = fileNameMappingService.getCachedMapping('input', true)
const currentHumanName = mapping[widget.value] || widget.value
// Get the values array (which contains human names through our proxy)
const rawValues = widget.options?.values
if (!rawValues || typeof rawValues === 'function') return
const values = Array.isArray(rawValues)
? rawValues
: Object.values(rawValues)
const currentIndex = values.indexOf(currentHumanName as any)
if (currentIndex >= 0 && currentIndex < values.length - 1) {
// Get next value and set it (setValue will handle conversion)
const nextValue = values[currentIndex + 1]
;(widget as any).setValue(nextValue, options)
}
}
;(widget as any).decrementValue = function (options: any) {
// Get the current human-readable value (deduplicated)
const mapping = fileNameMappingService.getCachedMapping('input', true)
const currentHumanName = mapping[widget.value] || widget.value
// Get the values array (which contains human names through our proxy)
const rawValues = widget.options?.values
if (!rawValues || typeof rawValues === 'function') return
const values = Array.isArray(rawValues)
? rawValues
: Object.values(rawValues)
const currentIndex = values.indexOf(currentHumanName as any)
if (currentIndex > 0) {
// Get previous value and set it (setValue will handle conversion)
const prevValue = values[currentIndex - 1]
;(widget as any).setValue(prevValue, options)
}
}
// Override setValue to handle human name selection from dropdown
const originalSetValue = (widget as any).setValue
;(widget as any).setValue = function (selectedValue: any, options?: any) {
if (typeof selectedValue === 'string') {
// Check if this is a human-readable name that needs reverse mapping
// Use deduplicated reverse mapping to handle suffixed names
const reverseMapping = fileNameMappingService.getCachedReverseMapping(
'input',
true
)
const hashValue = reverseMapping[selectedValue] || selectedValue
// Set the hash value
widget.value = hashValue
// Call original setValue with hash value if it exists
if (originalSetValue) {
originalSetValue.call(widget, hashValue, options)
}
// Trigger callback with hash value
if (widget.callback) {
widget.callback.call(widget, hashValue)
}
} else {
widget.value = selectedValue
if (originalSetValue) {
originalSetValue.call(widget, selectedValue, options)
}
if (widget.callback) {
widget.callback.call(widget, selectedValue)
}
}
}
// Override callback to handle human name selection
const originalCallback = widget.callback
widget.callback = function (selectedValue: any) {
if (typeof selectedValue === 'string') {
// Check if this is a human-readable name that needs reverse mapping
// Use deduplicated reverse mapping to handle suffixed names
const reverseMapping = fileNameMappingService.getCachedReverseMapping(
'input',
true
)
const hashValue = reverseMapping[selectedValue] || selectedValue
// Set the hash value
widget.value = hashValue
// Call original callback with hash value
if (originalCallback) {
originalCallback.call(widget, hashValue)
}
} else {
widget.value = selectedValue
if (originalCallback) {
originalCallback.call(widget, selectedValue)
}
}
}
// Trigger async load of mappings and update display when ready
fileNameMappingService
.getMapping('input')
.then(() => {
// Mappings loaded, trigger redraw to update display
node.setDirtyCanvas?.(true, true)
node.graph?.setDirtyCanvas?.(true, true)
})
.catch(() => {
// Silently fail - will show hash values as fallback
})
}
const getDefaultValue = (inputSpec: ComboInputSpec) => {
if (inputSpec.default) return inputSpec.default
if (inputSpec.options?.length) return inputSpec.options[0]
@@ -91,6 +350,31 @@ const addComboWidget = (node: LGraphNode, inputSpec: ComboInputSpec) => {
)
}
// For non-remote combo widgets, check if they contain filenames and apply mapping
if (!inputSpec.remote && inputSpec.options) {
// Check if options contain filename-like values
const hasFilenames = hasFilenameOptions(inputSpec.options)
console.debug(
'[FilenameMapping] Checking combo widget for filename mapping:',
{
inputName: inputSpec.name,
hasFilenames,
optionsCount: inputSpec.options.length,
sampleOptions: inputSpec.options.slice(0, 3)
}
)
if (hasFilenames) {
// Apply filename mapping for display
console.debug(
'[FilenameMapping] Applying filename mapping to widget:',
inputSpec.name
)
applyFilenameMappingToWidget(widget, node, inputSpec)
}
}
return widget
}

View File

@@ -7,6 +7,7 @@ import { IComboWidget } from '@/lib/litegraph/src/types/widgets'
import type { ResultItem, ResultItemType } from '@/schemas/apiSchema'
import type { InputSpec } from '@/schemas/nodeDefSchema'
import type { ComfyWidgetConstructor } from '@/scripts/widgets'
import { fileNameMappingService } from '@/services/fileNameMappingService'
import { useNodeOutputStore } from '@/stores/imagePreviewStore'
import { isImageUploadInput } from '@/types/nodeDefAugmentation'
import { createAnnotatedPath } from '@/utils/formatUtil'
@@ -76,11 +77,49 @@ export const useImageUploadWidget = () => {
fileFilter,
accept,
folder,
onUploadComplete: (output) => {
output.forEach((path) => addToComboValues(fileComboWidget, path))
onUploadComplete: async (output) => {
console.debug('[ImageUpload] Upload complete, output:', output)
// CRITICAL: Refresh mappings FIRST before updating dropdown
// This ensures new hash→human mappings are available when dropdown renders
try {
await fileNameMappingService.refreshMapping('input')
console.debug(
'[ImageUpload] Filename mappings refreshed, updating dropdown'
)
} catch (error) {
console.debug(
'[ImageUpload] Failed to refresh filename mappings:',
error
)
// Continue anyway - will show hash values as fallback
}
// Now add the files to dropdown - addToComboValues will trigger refreshMappings
output.forEach((path) => {
console.debug('[ImageUpload] Adding to combo values:', path)
addToComboValues(fileComboWidget, path)
})
// Set the widget value to the newly uploaded files
// Use the last uploaded file for single selection widgets
const selectedValue = allow_batch ? output : output[output.length - 1]
// @ts-expect-error litegraph combo value type does not support arrays yet
fileComboWidget.value = output
fileComboWidget.callback?.(output)
fileComboWidget.value = selectedValue
fileComboWidget.callback?.(selectedValue)
// Force one more refresh to ensure UI is in sync
if (typeof (fileComboWidget as any).refreshMappings === 'function') {
console.debug('[ImageUpload] Final refreshMappings call for UI sync')
;(fileComboWidget as any).refreshMappings()
}
// Trigger UI update to show human-readable names
node.setDirtyCanvas?.(true, true)
node.graph?.setDirtyCanvas?.(true, true)
console.debug('[ImageUpload] Upload handling complete')
}
})

View File

@@ -5,6 +5,7 @@ import { LGraphNode } from '@/lib/litegraph/src/litegraph'
import { IWidget } from '@/lib/litegraph/src/litegraph'
import type { RemoteWidgetConfig } from '@/schemas/nodeDefSchema'
import { api } from '@/scripts/api'
import { useFirebaseAuthStore } from '@/stores/firebaseAuthStore'
const MAX_RETRIES = 5
const TIMEOUT = 4096
@@ -58,10 +59,21 @@ const fetchData = async (
controller: AbortController
) => {
const { route, response_key, query_params, timeout = TIMEOUT } = config
// Get auth header from Firebase
const authStore = useFirebaseAuthStore()
const authHeader = await authStore.getAuthHeader()
const headers: Record<string, string> = {}
if (authHeader) {
Object.assign(headers, authHeader)
}
const res = await axios.get(route, {
params: query_params,
signal: controller.signal,
timeout
timeout,
headers
})
return response_key ? res.data[response_key] : res.data
}

View File

@@ -1,7 +1,9 @@
export const COMFY_API_BASE_URL = __USE_PROD_CONFIG__
import { isProductionEnvironment } from './environment'
export const COMFY_API_BASE_URL = isProductionEnvironment()
? 'https://api.comfy.org'
: 'https://stagingapi.comfy.org'
export const COMFY_PLATFORM_BASE_URL = __USE_PROD_CONFIG__
export const COMFY_PLATFORM_BASE_URL = isProductionEnvironment()
? 'https://platform.comfy.org'
: 'https://stagingplatform.comfy.org'

18
src/config/environment.ts Normal file
View File

@@ -0,0 +1,18 @@
/**
* Runtime environment configuration that determines if we're in production or staging
* based on the hostname. Replaces the build-time __USE_PROD_CONFIG__ constant.
*/
/**
* Checks if the application is running in production environment
* @returns true if hostname is cloud.comfy.org (production), false otherwise (staging)
*/
export function isProductionEnvironment(): boolean {
// In SSR/Node.js environments or during build, use the environment variable
if (typeof window === 'undefined') {
return process.env.USE_PROD_CONFIG === 'true'
}
// In browser, check the hostname
return window.location.hostname === 'cloud.comfy.org'
}

View File

@@ -1,5 +1,7 @@
import { FirebaseOptions } from 'firebase/app'
import { isProductionEnvironment } from './environment'
const DEV_CONFIG: FirebaseOptions = {
apiKey: 'AIzaSyDa_YMeyzV0SkVe92vBZ1tVikWBmOU5KVE',
authDomain: 'dreamboothy-dev.firebaseapp.com',
@@ -23,6 +25,6 @@ const PROD_CONFIG: FirebaseOptions = {
}
// To test with prod config while using dev server, set USE_PROD_CONFIG=true in .env
export const FIREBASE_CONFIG: FirebaseOptions = __USE_PROD_CONFIG__
export const FIREBASE_CONFIG: FirebaseOptions = isProductionEnvironment()
? PROD_CONFIG
: DEV_CONFIG

View File

@@ -14,6 +14,7 @@ import type { ResultItemType } from '@/schemas/apiSchema'
import type { ComfyNodeDef } from '@/schemas/nodeDefSchema'
import type { DOMWidget } from '@/scripts/domWidget'
import { useAudioService } from '@/services/audioService'
import { fileNameMappingService } from '@/services/fileNameMappingService'
import { useToastStore } from '@/stores/toastStore'
import { NodeLocatorId } from '@/types'
import { getNodeByLocatorId } from '@/utils/graphTraversalUtil'
@@ -66,10 +67,26 @@ async function uploadFile(
if (resp.status === 200) {
const data = await resp.json()
// Add the file to the dropdown list and update the widget value
// Build the file path
let path = data.name
if (data.subfolder) path = data.subfolder + '/' + path
// CRITICAL: Refresh mappings FIRST before updating dropdown
// This ensures new hash→human mappings are available when dropdown renders
try {
await fileNameMappingService.refreshMapping('input')
console.debug(
'[AudioUpload] Filename mappings refreshed, updating dropdown'
)
} catch (error) {
console.debug(
'[AudioUpload] Failed to refresh filename mappings:',
error
)
// Continue anyway - will show hash values as fallback
}
// Now add the file to the dropdown list - any filename proxy will use fresh mappings
// @ts-expect-error fixme ts strict error
if (!audioWidget.options.values.includes(path)) {
// @ts-expect-error fixme ts strict error

View File

@@ -1601,6 +1601,20 @@
"passwordUpdate": {
"success": "Password Updated",
"successDetail": "Your password has been updated successfully"
},
"errors": {
"auth/invalid-email": "Please enter a valid email address.",
"auth/user-disabled": "This account has been disabled. Please contact support.",
"auth/user-not-found": "No account found with this email. Would you like to create a new account?",
"auth/wrong-password": "The password you entered is incorrect. Please try again.",
"auth/email-already-in-use": "An account with this email already exists. Try signing in instead.",
"auth/weak-password": "Password is too weak. Please use a stronger password with at least 6 characters.",
"auth/too-many-requests": "Too many login attempts. Please wait a moment and try again.",
"auth/operation-not-allowed": "This sign-in method is not currently supported.",
"auth/invalid-credential": "Invalid login credentials. Please check your email and password.",
"auth/network-request-failed": "Network error. Please check your connection and try again.",
"auth/popup-closed-by-user": "Sign-in was cancelled. Please try again.",
"auth/cancelled-popup-request": "Sign-in was cancelled. Please try again."
}
},
"validation": {

View File

@@ -36,11 +36,8 @@ Sentry.init({
dsn: __SENTRY_DSN__,
enabled: __SENTRY_ENABLED__,
release: __COMFYUI_FRONTEND_VERSION__,
integrations: [],
autoSessionTracking: false,
defaultIntegrations: false,
normalizeDepth: 8,
tracesSampleRate: 0
tracesSampleRate: 1.0
})
app.directive('tooltip', Tooltip)
app

View File

@@ -6,11 +6,12 @@ import {
createWebHistory
} from 'vue-router'
import { useDialogService } from '@/services/dialogService'
import { useFirebaseAuthStore } from '@/stores/firebaseAuthStore'
import { useUserStore } from '@/stores/userStore'
import { isElectron } from '@/utils/envUtil'
import LayoutDefault from '@/views/layouts/LayoutDefault.vue'
import { useUserStore } from './stores/userStore'
import { isElectron } from './utils/envUtil'
const isFileProtocol = window.location.protocol === 'file:'
const basePath = isElectron() ? '/' : window.location.pathname
@@ -130,4 +131,41 @@ const router = createRouter({
}
})
// Global authentication guard
router.beforeEach(async (_to, _from, next) => {
const authStore = useFirebaseAuthStore()
// Wait for Firebase auth to initialize
if (!authStore.isInitialized) {
await new Promise<void>((resolve) => {
const unwatch = authStore.$subscribe((_, state) => {
if (state.isInitialized) {
unwatch()
resolve()
}
})
})
}
// Check if user is authenticated (Firebase or API key)
const authHeader = await authStore.getAuthHeader()
if (!authHeader) {
// User is not authenticated, show sign-in dialog
const dialogService = useDialogService()
const loginSuccess = await dialogService.showSignInDialog()
if (loginSuccess) {
// After successful login, proceed to the intended route
next()
} else {
// User cancelled login, stay on current page or redirect to home
next(false)
}
} else {
// User is authenticated, proceed
next()
}
})
export default router

View File

@@ -112,6 +112,11 @@ const zDisplayComponentWsMessage = z.object({
props: z.record(z.string(), z.any()).optional()
})
const zNotificationWsMessage = z.object({
value: z.string(),
id: z.string().optional()
})
const zTerminalSize = z.object({
cols: z.number(),
row: z.number()
@@ -153,15 +158,9 @@ export type DisplayComponentWsMessage = z.infer<
export type NodeProgressState = z.infer<typeof zNodeProgressState>
export type ProgressStateWsMessage = z.infer<typeof zProgressStateWsMessage>
export type FeatureFlagsWsMessage = z.infer<typeof zFeatureFlagsWsMessage>
export type NotificationWsMessage = z.infer<typeof zNotificationWsMessage>
// End of ws messages
const zPromptInputItem = z.object({
inputs: z.record(z.string(), z.any()),
class_type: zNodeType
})
const zPromptInputs = z.record(zPromptInputItem)
const zExtraPngInfo = z
.object({
workflow: zComfyWorkflow
@@ -173,7 +172,6 @@ const zExtraData = z.object({
extra_pnginfo: zExtraPngInfo.optional(),
client_id: z.string()
})
const zOutputsToExecute = z.array(zNodeId)
const zExecutionStartMessage = z.tuple([
z.literal('execution_start'),
@@ -214,13 +212,11 @@ const zStatus = z.object({
messages: z.array(zStatusMessage)
})
const zTaskPrompt = z.tuple([
zQueueIndex,
zPromptId,
zPromptInputs,
zExtraData,
zOutputsToExecute
])
const zTaskPrompt = z.object({
priority: zQueueIndex,
prompt_id: zPromptId,
extra_data: zExtraData
})
const zRunningTaskItem = z.object({
taskType: z.literal('Running'),
@@ -256,6 +252,20 @@ const zHistoryTaskItem = z.object({
meta: zTaskMeta.optional()
})
// Raw history item from backend (without taskType)
const zRawHistoryItem = z.object({
prompt_id: zPromptId,
prompt: zTaskPrompt,
status: zStatus.optional(),
outputs: zTaskOutput,
meta: zTaskMeta.optional()
})
// New API response format: { history: [{prompt_id: "...", ...}, ...] }
const zHistoryResponse = z.object({
history: z.array(zRawHistoryItem)
})
const zTaskItem = z.union([
zRunningTaskItem,
zPendingTaskItem,
@@ -278,6 +288,8 @@ export type RunningTaskItem = z.infer<typeof zRunningTaskItem>
export type PendingTaskItem = z.infer<typeof zPendingTaskItem>
// `/history`
export type HistoryTaskItem = z.infer<typeof zHistoryTaskItem>
export type RawHistoryItem = z.infer<typeof zRawHistoryItem>
export type HistoryResponse = z.infer<typeof zHistoryResponse>
export type TaskItem = z.infer<typeof zTaskItem>
export function validateTaskItem(taskItem: unknown) {

View File

@@ -1,4 +1,5 @@
import axios from 'axios'
import { debounce } from 'es-toolkit/compat'
import defaultClientFeatureFlags from '@/config/clientFeatureFlags.json'
import type {
@@ -13,9 +14,11 @@ import type {
ExecutionSuccessWsMessage,
ExtensionsResponse,
FeatureFlagsWsMessage,
HistoryResponse,
HistoryTaskItem,
LogsRawResponse,
LogsWsMessage,
NotificationWsMessage,
PendingTaskItem,
ProgressStateWsMessage,
ProgressTextWsMessage,
@@ -26,6 +29,7 @@ import type {
StatusWsMessage,
StatusWsMessageStatus,
SystemStats,
TaskPrompt,
User,
UserDataFullInfo
} from '@/schemas/apiSchema'
@@ -35,6 +39,8 @@ import type {
NodeId
} from '@/schemas/comfyWorkflowSchema'
import type { ComfyNodeDef } from '@/schemas/nodeDefSchema'
import { useFirebaseAuthStore } from '@/stores/firebaseAuthStore'
import { useToastStore } from '@/stores/toastStore'
import type { NodeExecutionId } from '@/types/nodeIdentification'
import { WorkflowTemplates } from '@/types/workflowTemplateTypes'
@@ -130,6 +136,7 @@ interface BackendApiCalls {
progress_state: ProgressStateWsMessage
display_component: DisplayComponentWsMessage
feature_flags: FeatureFlagsWsMessage
notification: NotificationWsMessage
}
/** Dictionary of all api calls */
@@ -272,6 +279,81 @@ export class ComfyApi extends EventTarget {
*/
serverFeatureFlags: Record<string, unknown> = {}
/**
* Map of notification toasts by ID
*/
#notificationToasts = new Map<string, any>()
/**
* Map of timers for auto-hiding notifications by ID
*/
#notificationTimers = new Map<string, number>()
/**
* Handle notification messages (with optional ID for multiple parallel notifications)
*/
#handleNotification(value: string, id?: string) {
try {
const toastStore = useToastStore()
const notificationId = id || 'default'
console.log(`Updating notification (${notificationId}):`, value)
// Get existing toast for this ID
const existingToast = this.#notificationToasts.get(notificationId)
if (existingToast) {
// Update existing toast by removing and re-adding with new content
console.log(`Updating existing notification toast: ${notificationId}`)
toastStore.remove(existingToast)
// Update the detail text
existingToast.detail = value
toastStore.add(existingToast)
} else {
// Create new persistent notification toast
console.log(`Creating new notification toast: ${notificationId}`)
const newToast = {
severity: 'info' as const,
summary: 'Notification',
detail: value,
closable: true
// No 'life' property means it won't auto-hide
}
this.#notificationToasts.set(notificationId, newToast)
toastStore.add(newToast)
}
// Clear existing timer for this ID and set new one
const existingTimer = this.#notificationTimers.get(notificationId)
if (existingTimer) {
clearTimeout(existingTimer)
}
const timer = window.setTimeout(() => {
const toast = this.#notificationToasts.get(notificationId)
if (toast) {
console.log(`Auto-hiding notification toast: ${notificationId}`)
toastStore.remove(toast)
this.#notificationToasts.delete(notificationId)
this.#notificationTimers.delete(notificationId)
}
}, 3000)
this.#notificationTimers.set(notificationId, timer)
console.log('Toast updated successfully')
} catch (error) {
console.error('Error handling notification:', error)
}
}
/**
* Debounced notification handler to avoid rapid toast updates
*/
#debouncedNotificationHandler = debounce((value: string, id?: string) => {
this.#handleNotification(value, id)
}, 300) // 300ms debounce delay
/**
* The auth token for the comfy org account if the user is logged in.
* This is only used for {@link queuePrompt} now. It is not directly
@@ -311,7 +393,27 @@ export class ComfyApi extends EventTarget {
return this.api_base + route
}
fetchApi(route: string, options?: RequestInit) {
/**
* Waits for Firebase auth to be initialized before proceeding
*/
async #waitForAuthInitialization(): Promise<void> {
const authStore = useFirebaseAuthStore()
if (authStore.isInitialized) {
return
}
return new Promise<void>((resolve) => {
const unwatch = authStore.$subscribe((_, state) => {
if (state.isInitialized) {
unwatch()
resolve()
}
})
})
}
async fetchApi(route: string, options?: RequestInit) {
if (!options) {
options = {}
}
@@ -322,6 +424,30 @@ export class ComfyApi extends EventTarget {
options.cache = 'no-cache'
}
// Wait for Firebase auth to be initialized before making any API request
await this.#waitForAuthInitialization()
// Add Firebase JWT token if user is logged in
try {
const authHeader = await useFirebaseAuthStore().getAuthHeader()
if (authHeader) {
if (Array.isArray(options.headers)) {
for (const [key, value] of Object.entries(authHeader)) {
options.headers.push([key, value])
}
} else if (options.headers instanceof Headers) {
for (const [key, value] of Object.entries(authHeader)) {
options.headers.set(key, value)
}
} else {
Object.assign(options.headers, authHeader)
}
}
} catch (error) {
// Silently ignore auth errors to avoid breaking API calls
console.warn('Failed to get auth header:', error)
}
if (Array.isArray(options.headers)) {
options.headers.push(['Comfy-User', this.user])
} else if (options.headers instanceof Headers) {
@@ -551,6 +677,16 @@ export class ComfyApi extends EventTarget {
this.serverFeatureFlags
)
break
case 'notification':
// Display notification in toast with debouncing
console.log(
'Received notification message:',
msg.data.value,
msg.data.id ? `(ID: ${msg.data.id})` : ''
)
this.#debouncedNotificationHandler(msg.data.value, msg.data.id)
this.dispatchCustomEvent(msg.type, msg.data)
break
default:
if (this.#registered.has(msg.type)) {
// Fallback for custom types - calls super direct.
@@ -576,6 +712,35 @@ export class ComfyApi extends EventTarget {
this.#createSocket()
}
/**
* Test method to simulate a notification message (for development/testing)
*/
testNotification(message: string = 'Test notification message', id?: string) {
console.log(
'Testing notification with message:',
message,
id ? `(ID: ${id})` : ''
)
const mockEvent = {
data: JSON.stringify({
type: 'notification',
data: { value: message, id }
})
}
// Simulate the websocket message handler
const msg = JSON.parse(mockEvent.data)
if (msg.type === 'notification') {
console.log(
'Received notification message:',
msg.data.value,
msg.data.id ? `(ID: ${msg.data.id})` : ''
)
this.#debouncedNotificationHandler(msg.data.value, msg.data.id)
this.dispatchCustomEvent(msg.type, msg.data)
}
}
/**
* Gets a list of extension urls
*/
@@ -739,6 +904,28 @@ export class ComfyApi extends EventTarget {
return this.getHistory()
}
/**
* Parses queue prompt data from array or object format
* @param rawPrompt The raw prompt data from the API
* @returns Normalized TaskPrompt object
*/
private parseQueuePrompt(rawPrompt: any): TaskPrompt {
if (Array.isArray(rawPrompt)) {
// Queue format: [priority, prompt_id, workflow, outputs]
const [priority, prompt_id, workflow] = rawPrompt
return {
priority,
prompt_id,
extra_data: workflow?.extra_data || {
client_id: '',
extra_pnginfo: workflow
}
}
}
return rawPrompt as TaskPrompt
}
/**
* Gets the current state of the queue
* @returns The currently running and queued items
@@ -752,15 +939,17 @@ export class ComfyApi extends EventTarget {
const data = await res.json()
return {
// Running action uses a different endpoint for cancelling
Running: data.queue_running.map((prompt: Record<number, any>) => ({
Running: data.queue_running.map((prompt: any) => ({
taskType: 'Running',
prompt,
// prompt[1] is the prompt id
remove: { name: 'Cancel', cb: () => api.interrupt(prompt[1]) }
prompt: this.parseQueuePrompt(prompt),
remove: {
name: 'Cancel',
cb: () => api.interrupt(this.parseQueuePrompt(prompt).prompt_id)
}
})),
Pending: data.queue_pending.map((prompt: Record<number, any>) => ({
Pending: data.queue_pending.map((prompt: any) => ({
taskType: 'Pending',
prompt
prompt: this.parseQueuePrompt(prompt)
}))
}
} catch (error) {
@@ -777,13 +966,17 @@ export class ComfyApi extends EventTarget {
max_items: number = 200
): Promise<{ History: HistoryTaskItem[] }> {
try {
const res = await this.fetchApi(`/history?max_items=${max_items}`)
const json: Promise<HistoryTaskItem[]> = await res.json()
const res = await this.fetchApi(`/history_v2?max_items=${max_items}`)
const json: HistoryResponse = await res.json()
// Extract history data from new format: { history: [{prompt_id: "...", ...}, ...] }
return {
History: Object.values(json).map((item) => ({
...item,
taskType: 'History'
}))
History: json.history.map(
(item): HistoryTaskItem => ({
...item,
taskType: 'History'
})
)
}
} catch (error) {
console.error(error)
@@ -791,6 +984,33 @@ export class ComfyApi extends EventTarget {
}
}
/**
* Gets workflow data for a specific prompt from history
* @param prompt_id The prompt ID to fetch workflow for
* @returns Workflow data for the specific prompt
*/
async getWorkflowFromHistory(
prompt_id: string
): Promise<ComfyWorkflowJSON | null> {
try {
const res = await this.fetchApi(`/history_v2/${prompt_id}`)
const json = await res.json()
// The /history_v2/{prompt_id} endpoint returns data for a specific prompt
// The response format is: { prompt_id: { prompt: {priority, prompt_id, extra_data}, outputs: {...}, status: {...} } }
const historyItem = json[prompt_id]
if (!historyItem) return null
// Extract workflow from the prompt object
// prompt.extra_data contains extra_pnginfo.workflow
const workflow = historyItem.prompt?.extra_data?.extra_pnginfo?.workflow
return workflow || null
} catch (error) {
console.error(`Failed to fetch workflow for prompt ${prompt_id}:`, error)
return null
}
}
/**
* Gets system & device stats
* @returns System stats such as python version, OS, per device info
@@ -1055,6 +1275,28 @@ export class ComfyApi extends EventTarget {
getServerFeatures(): Record<string, unknown> {
return { ...this.serverFeatureFlags }
}
/**
* Posts analytics event to cloud analytics service
* @param eventName The name of the analytics event
* @param eventData The event data (any JSON-serializable object)
* @returns Promise resolving to the response
*/
async postCloudAnalytics(
eventName: string,
eventData: any
): Promise<Response> {
return this.fetchApi(this.internalURL('/cloud_analytics'), {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
event_name: eventName,
event_data: eventData
})
})
}
}
export const api = new ComfyApi()

View File

@@ -324,6 +324,14 @@ export class ComfyApp {
return '&rand=' + Math.random()
}
getClientIdParam() {
const clientId = window.name
if (clientId) {
return '&client_id=' + clientId
}
return ''
}
static onClipspaceEditorSave() {
if (ComfyApp.clipspace_return_node) {
ComfyApp.pasteFromClipspace(ComfyApp.clipspace_return_node)
@@ -989,6 +997,10 @@ export class ComfyApp {
if (!templateData?.templates) {
return
}
api.postCloudAnalytics('load_workflow', {
source: 'template',
sourceData: { templateData }
})
const old = localStorage.getItem('litegrapheditor_clipboard')
@@ -1269,6 +1281,12 @@ export class ComfyApp {
const paths = await api.getFolderPaths()
this.#showMissingModelsError(missingModels, paths)
}
api.postCloudAnalytics('load_workflow', {
source: 'graph',
graph: this.graph.asSerialisable(),
missingNodeTypes,
missingModels
})
await useExtensionService().invokeExtensionsAsync(
'afterConfigureGraph',
missingNodeTypes
@@ -1577,6 +1595,11 @@ export class ComfyApp {
const missingNodeTypes = Object.values(apiData).filter(
(n) => !LiteGraph.registered_node_types[n.class_type]
)
api.postCloudAnalytics('load_workflow', {
source: 'api_json',
missingNodeTypes,
apiJson: apiData
})
if (missingNodeTypes.length) {
this.#showMissingNodesError(missingNodeTypes.map((t) => t.class_type))
return

View File

@@ -264,15 +264,21 @@ class ComfyList {
? item.remove
: {
name: 'Delete',
cb: () => api.deleteItem(this.#type, item.prompt[1])
cb: () =>
api.deleteItem(
this.#type,
Array.isArray(item.prompt)
? item.prompt[1]
: item.prompt.prompt_id
)
}
return $el('div', { textContent: item.prompt[0] + ': ' }, [
return $el('div', { textContent: item.prompt.priority + ': ' }, [
$el('button', {
textContent: 'Load',
onclick: async () => {
await app.loadGraphData(
// @ts-expect-error fixme ts strict error
item.prompt[3].extra_pnginfo.workflow,
item.prompt.extra_data.extra_pnginfo.workflow,
true,
false
)

View File

@@ -0,0 +1,401 @@
import { api } from '@/scripts/api'
export type FileType = 'input' | 'output' | 'temp'
export interface FileNameMapping {
[hashFilename: string]: string // hash -> human readable name
}
export interface CacheEntry {
data: FileNameMapping
dedupData?: FileNameMapping // Deduplicated mapping with unique display names
timestamp: number
error?: Error | null
fetchPromise?: Promise<FileNameMapping>
failed?: boolean
}
/**
* Service for fetching and caching filename mappings from the backend.
* Maps SHA256 hash filenames to their original human-readable names.
*/
export class FileNameMappingService {
private cache = new Map<FileType, CacheEntry>()
private readonly TTL = 5 * 60 * 1000 // 5 minutes
/**
* Get filename mapping for the specified file type.
* @param fileType - The type of files to get mappings for
* @returns Promise resolving to the filename mapping
*/
async getMapping(fileType: FileType = 'input'): Promise<FileNameMapping> {
const cached = this.cache.get(fileType)
// Return cached data if valid and not expired
if (cached && !this.isExpired(cached) && !cached.failed) {
return cached.data
}
// Return cached data if we're already fetching or if previous fetch failed recently
if (cached?.fetchPromise || (cached?.failed && !this.shouldRetry(cached))) {
return cached?.data ?? {}
}
// Fetch new data
return this.fetchMapping(fileType)
}
/**
* Get human-readable filename from hash filename.
* @param hashFilename - The SHA256 hash filename
* @param fileType - The type of file
* @returns Promise resolving to human-readable name or original if not found
*/
async getHumanReadableName(
hashFilename: string,
fileType: FileType = 'input'
): Promise<string> {
try {
const mapping = await this.getMapping(fileType)
return mapping[hashFilename] ?? hashFilename
} catch (error) {
console.warn(
`Failed to get human readable name for ${hashFilename}:`,
error
)
return hashFilename
}
}
/**
* Apply filename mapping to an array of hash filenames.
* @param hashFilenames - Array of SHA256 hash filenames
* @param fileType - The type of files
* @returns Promise resolving to array of human-readable names
*/
async applyMappingToArray(
hashFilenames: string[],
fileType: FileType = 'input'
): Promise<string[]> {
try {
const mapping = await this.getMapping(fileType)
return hashFilenames.map((filename) => mapping[filename] ?? filename)
} catch (error) {
console.warn('Failed to apply filename mapping:', error)
return hashFilenames
}
}
/**
* Get cached mapping synchronously (returns empty object if not cached).
* @param fileType - The file type to get cached mapping for
* @param deduplicated - Whether to return deduplicated names for display
* @returns The cached mapping or empty object
*/
getCachedMapping(
fileType: FileType = 'input',
deduplicated: boolean = false
): FileNameMapping {
const cached = this.cache.get(fileType)
if (cached && !this.isExpired(cached) && !cached.failed) {
// Return deduplicated mapping if requested and available
if (deduplicated && cached.dedupData) {
return cached.dedupData
}
const result = cached.data
console.debug(
`[FileNameMapping] getCachedMapping returning cached data:`,
{
fileType,
deduplicated,
mappingCount: Object.keys(result).length,
sampleMappings: Object.entries(result).slice(0, 3)
}
)
return result
}
console.debug(
`[FileNameMapping] getCachedMapping returning empty object for ${fileType} (cache miss)`
)
return {}
}
/**
* Get reverse mapping (human-readable name to hash) synchronously.
* @param fileType - The file type to get reverse mapping for
* @param deduplicated - Whether to use deduplicated names
* @returns The reverse mapping object
*/
getCachedReverseMapping(
fileType: FileType = 'input',
deduplicated: boolean = false
): Record<string, string> {
const mapping = this.getCachedMapping(fileType, deduplicated)
const reverseMapping: Record<string, string> = {}
// Build reverse mapping: humanName -> hashName
for (const [hash, humanName] of Object.entries(mapping)) {
reverseMapping[humanName] = hash
}
return reverseMapping
}
/**
* Convert a human-readable name back to its hash filename.
* @param humanName - The human-readable filename
* @param fileType - The file type
* @returns The hash filename or the original if no mapping exists
*/
getHashFromHumanName(
humanName: string,
fileType: FileType = 'input'
): string {
const reverseMapping = this.getCachedReverseMapping(fileType)
return reverseMapping[humanName] ?? humanName
}
/**
* Invalidate cached mapping for a specific file type.
* @param fileType - The file type to invalidate, or undefined to clear all
*/
invalidateCache(fileType?: FileType): void {
if (fileType) {
this.cache.delete(fileType)
} else {
this.cache.clear()
}
}
/**
* Refresh the mapping for a specific file type by clearing cache and fetching new data.
* @param fileType - The file type to refresh
* @returns Promise resolving to the new mapping
*/
async refreshMapping(fileType: FileType = 'input'): Promise<FileNameMapping> {
console.debug(`[FileNameMapping] Refreshing mapping for ${fileType}`)
this.invalidateCache(fileType)
const freshMapping = await this.getMapping(fileType)
console.debug(`[FileNameMapping] Fresh mapping fetched:`, {
fileType,
mappingCount: Object.keys(freshMapping).length,
sampleMappings: Object.entries(freshMapping).slice(0, 3)
})
return freshMapping
}
/**
* Ensures mappings are loaded and cached for immediate synchronous access.
* Use this to preload mappings before widget creation.
* @param fileType - The file type to preload
* @returns Promise that resolves when mappings are loaded
*/
async ensureMappingsLoaded(fileType: FileType = 'input'): Promise<void> {
try {
await this.getMapping(fileType)
} catch (error) {
// Errors are already handled in getMapping/performFetch
// This ensures we don't break the app initialization
console.debug(
'[FileNameMappingService] Preload completed with fallback to empty mapping'
)
}
}
private async fetchMapping(fileType: FileType): Promise<FileNameMapping> {
const cacheKey = fileType
let entry = this.cache.get(cacheKey)
if (!entry) {
entry = { data: {}, timestamp: 0 }
this.cache.set(cacheKey, entry)
}
// Prevent concurrent requests for the same fileType
if (entry.fetchPromise) {
return entry.fetchPromise
}
// Set up fetch promise to prevent concurrent requests
entry.fetchPromise = this.performFetch(fileType)
try {
const data = await entry.fetchPromise
// Update cache with successful result
entry.data = data
entry.dedupData = this.deduplicateMapping(data)
entry.timestamp = Date.now()
entry.error = null
entry.failed = false
return data
} catch (error) {
// Should not happen as performFetch now returns empty mapping on error
// But keep for safety
entry.error = error instanceof Error ? error : new Error(String(error))
entry.failed = true
console.debug(`[FileNameMappingService] Using fallback for ${fileType}`)
return entry.data // Return existing data or empty object
} finally {
// Clear the promise after completion
entry.fetchPromise = undefined
}
}
private async performFetch(fileType: FileType): Promise<FileNameMapping> {
// Check if api is available
if (!api || typeof api.fetchApi !== 'function') {
console.warn(
'[FileNameMappingService] API not available, returning empty mapping'
)
return {}
}
let response: Response
try {
response = await api.fetchApi(`/files/mappings`)
} catch (error) {
console.warn(
'[FileNameMappingService] Network error fetching mappings:',
error
)
return {} // Return empty mapping instead of throwing
}
if (!response.ok) {
console.warn(
`[FileNameMappingService] Server returned ${response.status} ${response.statusText}, using empty mapping`
)
return {} // Graceful degradation
}
let data: any
try {
// Check if response has json method
if (typeof response.json !== 'function') {
console.warn('[FileNameMappingService] Response has no json() method')
return {}
}
data = await response.json()
} catch (jsonError) {
console.warn(
'[FileNameMappingService] Failed to parse JSON response:',
jsonError
)
return {} // Return empty mapping on parse error
}
// Validate response structure
if (typeof data !== 'object' || data === null || Array.isArray(data)) {
console.warn(
'[FileNameMappingService] Invalid response format, expected object'
)
return {}
}
// Validate and filter entries
const validEntries: FileNameMapping = {}
let invalidEntryCount = 0
for (const [key, value] of Object.entries(data)) {
if (typeof key === 'string' && typeof value === 'string') {
validEntries[key] = value
} else {
invalidEntryCount++
}
}
if (invalidEntryCount > 0) {
console.debug(
`[FileNameMappingService] Filtered out ${invalidEntryCount} invalid entries`
)
}
console.debug(
`[FileNameMappingService] Loaded ${Object.keys(validEntries).length} mappings for '${fileType}'`
)
return validEntries
}
private isExpired(entry: CacheEntry): boolean {
return Date.now() - entry.timestamp > this.TTL
}
private shouldRetry(entry: CacheEntry): boolean {
// Allow retry after 30 seconds for failed requests
return entry.timestamp > 0 && Date.now() - entry.timestamp > 30000
}
/**
* Deduplicate human-readable names when multiple hashes map to the same name.
* Adds a suffix to duplicate names to make them unique.
* @param mapping - The original hash -> human name mapping
* @returns A new mapping with deduplicated human names
*/
private deduplicateMapping(mapping: FileNameMapping): FileNameMapping {
const dedupMapping: FileNameMapping = {}
const nameCount = new Map<string, number>()
const nameToHashes = new Map<string, string[]>()
// First pass: count occurrences of each human name
for (const [hash, humanName] of Object.entries(mapping)) {
const count = nameCount.get(humanName) || 0
nameCount.set(humanName, count + 1)
// Track which hashes map to this human name
const hashes = nameToHashes.get(humanName) || []
hashes.push(hash)
nameToHashes.set(humanName, hashes)
}
// Second pass: create deduplicated names
const nameIndex = new Map<string, number>()
for (const [hash, humanName] of Object.entries(mapping)) {
const count = nameCount.get(humanName) || 1
if (count === 1) {
// No duplicates, use original name
dedupMapping[hash] = humanName
} else {
// Has duplicates, add suffix
const currentIndex = (nameIndex.get(humanName) || 0) + 1
nameIndex.set(humanName, currentIndex)
// Extract file extension if present
const lastDotIndex = humanName.lastIndexOf('.')
let baseName = humanName
let extension = ''
if (lastDotIndex > 0 && lastDotIndex < humanName.length - 1) {
baseName = humanName.substring(0, lastDotIndex)
extension = humanName.substring(lastDotIndex)
}
// Add suffix: use first 8 chars of hash (without extension)
// Remove extension from hash if present
const hashWithoutExt = hash.includes('.')
? hash.substring(0, hash.lastIndexOf('.'))
: hash
const hashSuffix = hashWithoutExt.substring(0, 8)
dedupMapping[hash] = `${baseName}_${hashSuffix}${extension}`
}
}
console.debug('[FileNameMappingService] Deduplicated mapping:', {
original: Object.keys(mapping).length,
duplicates: Array.from(nameCount.entries()).filter(
([_, count]) => count > 1
),
sample: Object.entries(dedupMapping).slice(0, 5)
})
return dedupMapping
}
}
// Singleton instance
export const fileNameMappingService = new FileNameMappingService()

View File

@@ -5,10 +5,13 @@ import { LGraph, LGraphCanvas } from '@/lib/litegraph/src/litegraph'
import type { SerialisableGraph, Vector2 } from '@/lib/litegraph/src/litegraph'
import { useWorkflowThumbnail } from '@/renderer/thumbnail/composables/useWorkflowThumbnail'
import { ComfyWorkflowJSON } from '@/schemas/comfyWorkflowSchema'
import { api } from '@/scripts/api'
import { app } from '@/scripts/app'
import { blankGraph, defaultGraph } from '@/scripts/defaultGraph'
import { downloadBlob } from '@/scripts/utils'
import { useDomWidgetStore } from '@/stores/domWidgetStore'
import { useNodeOutputStore } from '@/stores/imagePreviewStore'
import { TaskItemImpl } from '@/stores/queueStore'
import { useSettingStore } from '@/stores/settingStore'
import { useToastStore } from '@/stores/toastStore'
import { ComfyWorkflow, useWorkflowStore } from '@/stores/workflowStore'
@@ -16,6 +19,7 @@ import { useWorkspaceStore } from '@/stores/workspaceStore'
import { appendJsonExt, generateUUID } from '@/utils/formatUtil'
import { useDialogService } from './dialogService'
import { useExtensionService } from './extensionService'
export const useWorkflowService = () => {
const settingStore = useSettingStore()
@@ -154,6 +158,47 @@ export const useWorkflowService = () => {
await app.loadGraphData(blankGraph)
}
/**
* Load a workflow from a task item (queue/history)
* For history items, fetches workflow data from /history_v2/{prompt_id}
* @param task The task item to load the workflow from
*/
const loadTaskWorkflow = async (task: TaskItemImpl) => {
let workflowData = task.workflow
// History items don't include workflow data - fetch from API
if (task.isHistory) {
const promptId = task.prompt.prompt_id
if (promptId) {
workflowData = (await api.getWorkflowFromHistory(promptId)) || undefined
}
}
if (!workflowData) {
return
}
await app.loadGraphData(toRaw(workflowData))
if (task.outputs) {
const nodeOutputsStore = useNodeOutputStore()
const rawOutputs = toRaw(task.outputs)
// Set outputs by execution ID to account for outputs inside of subgraphs
for (const nodeExecutionId in rawOutputs) {
nodeOutputsStore.setNodeOutputsByExecutionId(
nodeExecutionId,
rawOutputs[nodeExecutionId]
)
}
// Invoke extension (e.g., 3D nodes) hooks to allow them to update
useExtensionService().invokeExtensions(
'onNodeOutputsUpdated',
app.nodeOutputs
)
}
}
/**
* Reload the current workflow
* This is used to refresh the node definitions update, e.g. when the locale changes.
@@ -402,6 +447,7 @@ export const useWorkflowService = () => {
saveWorkflow,
loadDefaultWorkflow,
loadBlankWorkflow,
loadTaskWorkflow,
reloadCurrentWorkflow,
openWorkflow,
closeWorkflow,

View File

@@ -86,6 +86,16 @@ export const useFirebaseAuthStore = defineStore('firebaseAuth', () => {
currentUser.value = user
isInitialized.value = true
if (user && (window as any).mixpanel) {
;(window as any).mixpanel
.identify(user.uid)(window as any)
.mixpanel.people.set({
$email: user.email,
$name: user.displayName,
$created: user.metadata.creationTime
})
}
// Reset balance when auth state changes
balance.value = null
lastBalanceUpdateTime.value = null

View File

@@ -90,10 +90,13 @@ export const useNodeOutputStore = defineStore('nodeOutput', () => {
const rand = app.getRandParam()
const previewParam = getPreviewParam(node, outputs)
const clientIdParam = app.getClientIdParam()
return outputs.images.map((image) => {
const imgUrlPart = new URLSearchParams(image)
return api.apiURL(`/view?${imgUrlPart}${previewParam}${rand}`)
return api.apiURL(
`/view?${imgUrlPart}${previewParam}${rand}${clientIdParam}`
)
})
}

View File

@@ -59,6 +59,11 @@ export class ResultItemImpl {
params.set('type', this.type)
params.set('subfolder', this.subfolder)
const clientId = window.name
if (clientId) {
params.set('client_id', clientId)
}
if (this.format) {
params.set('format', this.format)
}
@@ -271,23 +276,15 @@ export class TaskItemImpl {
}
get queueIndex() {
return this.prompt[0]
return this.prompt.priority
}
get promptId() {
return this.prompt[1]
}
get promptInputs() {
return this.prompt[2]
return this.prompt.prompt_id
}
get extraData() {
return this.prompt[3]
}
get outputsToExecute() {
return this.prompt[4]
return this.prompt.extra_data
}
get extraPngInfo() {
@@ -403,13 +400,11 @@ export class TaskItemImpl {
(output: ResultItemImpl, i: number) =>
new TaskItemImpl(
this.taskType,
[
this.queueIndex,
`${this.promptId}-${i}`,
this.promptInputs,
this.extraData,
this.outputsToExecute
],
{
priority: this.queueIndex,
prompt_id: `${this.promptId}-${i}`,
extra_data: this.extraData
},
this.status,
{
[output.nodeId]: {
@@ -422,6 +417,13 @@ export class TaskItemImpl {
}
}
function executionStartTimestamp(taskItem: TaskItem) {
const status = 'status' in taskItem ? taskItem.status : undefined
const messages = status?.messages || []
const message = messages.find((message) => message[0] === 'execution_start')
return message ? message[1].timestamp : undefined
}
export const useQueueStore = defineStore('queue', () => {
const runningTasks = ref<TaskItemImpl[]>([])
const pendingTasks = ref<TaskItemImpl[]>([])
@@ -442,9 +444,12 @@ export const useQueueStore = defineStore('queue', () => {
tasks.value.flatMap((task: TaskItemImpl) => task.flatten())
)
const lastHistoryQueueIndex = computed<number>(() =>
historyTasks.value.length ? historyTasks.value[0].queueIndex : -1
)
const lastExecutionStartTimestamp = computed<number>(() => {
const latestItemWithTimestamp = historyTasks.value.length
? historyTasks.value.find((item) => item.executionStartTimestamp != null)
: undefined
return latestItemWithTimestamp?.executionStartTimestamp ?? -1
})
const hasPendingTasks = computed<boolean>(() => pendingTasks.value.length > 0)
@@ -474,19 +479,34 @@ export const useQueueStore = defineStore('queue', () => {
pendingTasks.value = toClassAll(queue.Pending)
const allIndex = new Set<number>(
history.History.map((item: TaskItem) => item.prompt[0])
history.History.map((item: TaskItem) => item.prompt.priority)
)
const newHistoryItems = toClassAll(
history.History.filter(
(item) => item.prompt[0] > lastHistoryQueueIndex.value
(item) =>
(executionStartTimestamp(item) ?? Number.MAX_SAFE_INTEGER) >
lastExecutionStartTimestamp.value
)
)
const existingHistoryItems = historyTasks.value.filter((item) =>
allIndex.has(item.queueIndex)
)
historyTasks.value = [...newHistoryItems, ...existingHistoryItems]
const sortedTasks = [...newHistoryItems, ...existingHistoryItems]
.slice(0, maxHistoryItems.value)
.sort((a, b) => b.queueIndex - a.queueIndex)
.sort((a, b) => {
const aTime = a.executionStartTimestamp ?? 0
const bTime = b.executionStartTimestamp ?? 0
return bTime - aTime
})
const foundPromptIds = new Set()
const deduplicatedTasks = sortedTasks.filter((item) => {
if (!foundPromptIds.has(item.promptId)) {
foundPromptIds.add(item.promptId)
return true
}
return false
})
historyTasks.value = deduplicatedTasks
} finally {
isLoading.value = false
}
@@ -516,7 +536,6 @@ export const useQueueStore = defineStore('queue', () => {
tasks,
flatTasks,
lastHistoryQueueIndex,
hasPendingTasks,
update,

View File

@@ -1,3 +1,4 @@
import axios, { type AxiosError } from 'axios'
import { groupBy } from 'es-toolkit/compat'
import { defineStore } from 'pinia'
import { computed, ref, shallowRef } from 'vue'
@@ -205,6 +206,12 @@ export const useWorkflowTemplatesStore = defineStore(
}
} catch (error) {
console.error('Error fetching workflow templates:', error)
if (axios.isAxiosError(error)) {
const axiosError = error as AxiosError
if (axiosError.response?.data) {
console.error('Template error details:', axiosError.response.data)
}
}
}
}

View File

@@ -0,0 +1,27 @@
import { FirebaseError } from 'firebase/app'
import { t, te } from '@/i18n'
/**
* Translates authentication errors to user-friendly messages.
* Handles Firebase errors with specific translations, and provides fallbacks for other error types.
* @param error - Any error object from authentication flows
* @returns User-friendly error message
*/
export function translateAuthError(error: unknown): string {
if (error instanceof FirebaseError) {
const translationKey = `auth.errors.${error.code}`
// Check if translation exists using te() function
if (te(translationKey)) {
return t(translationKey)
}
}
// Fallback to original error message or generic error
if (error instanceof Error && error.message) {
return error.message
}
return t('g.unknownError')
}

View File

@@ -43,10 +43,31 @@ export function isAudioNode(node: LGraphNode | undefined): boolean {
export function addToComboValues(widget: IComboWidget, value: string) {
if (!widget.options) widget.options = { values: [] }
if (!widget.options.values) widget.options.values = []
// @ts-expect-error Combo widget values may be a dictionary or legacy function type
if (!widget.options.values.includes(value)) {
// Check if this widget has our filename mapping (has getRawValues method)
const mappingWidget = widget as any
if (
mappingWidget.getRawValues &&
typeof mappingWidget.getRawValues === 'function'
) {
// This is a filename mapping widget - work with raw values directly
const rawValues = mappingWidget.getRawValues()
if (!rawValues.includes(value)) {
console.debug('[FilenameMapping] Adding to raw values:', value)
rawValues.push(value)
// Trigger refresh
if (mappingWidget.refreshMappings) {
mappingWidget.refreshMappings()
}
}
} else {
// Regular widget without mapping
// @ts-expect-error Combo widget values may be a dictionary or legacy function type
widget.options.values.push(value)
if (!widget.options.values.includes(value)) {
// @ts-expect-error Combo widget values may be a dictionary or legacy function type
widget.options.values.push(value)
}
}
}

View File

@@ -2,16 +2,189 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
import { useComboWidget } from '@/composables/widgets/useComboWidget'
import type { InputSpec } from '@/schemas/nodeDef/nodeDefSchemaV2'
import { fileNameMappingService } from '@/services/fileNameMappingService'
// Mock api to prevent app initialization
vi.mock('@/scripts/api', () => ({
api: {
fetchApi: vi.fn(),
addEventListener: vi.fn(),
apiURL: vi.fn((path) => `/api${path}`),
fileURL: vi.fn((path) => path)
}
}))
vi.mock('@/scripts/widgets', () => ({
addValueControlWidgets: vi.fn()
}))
vi.mock('@/services/fileNameMappingService', () => ({
fileNameMappingService: {
getMapping: vi.fn().mockResolvedValue({}),
getCachedMapping: vi.fn().mockReturnValue({}),
getCachedReverseMapping: vi.fn().mockReturnValue({}),
refreshMapping: vi.fn().mockResolvedValue({}),
invalidateCache: vi.fn()
}
}))
describe('useComboWidget', () => {
beforeEach(() => {
vi.clearAllMocks()
})
describe('deduplication', () => {
it('should display deduplicated names in dropdown', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'hash1.png',
options: {
values: ['hash1.png', 'hash2.png', 'hash3.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget)
}
// Mock deduplicated mapping
vi.mocked(fileNameMappingService.getCachedMapping).mockImplementation(
(_fileType, deduplicated) => {
if (deduplicated) {
return {
'hash1.png': 'vacation_hash1.png',
'hash2.png': 'vacation_hash2.png',
'hash3.png': 'landscape.png'
}
}
return {
'hash1.png': 'vacation.png',
'hash2.png': 'vacation.png',
'hash3.png': 'landscape.png'
}
}
)
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['hash1.png', 'hash2.png', 'hash3.png']
}
const widget = constructor(mockNode as any, inputSpec)
// Check that dropdown values are deduplicated
const dropdownValues = widget.options.values
expect(dropdownValues).toEqual([
'vacation_hash1.png',
'vacation_hash2.png',
'landscape.png'
])
})
it('should correctly handle selection of deduplicated names', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'hash1.png',
options: {
values: ['hash1.png', 'hash2.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget)
}
// Mock deduplicated mappings
vi.mocked(fileNameMappingService.getCachedMapping).mockImplementation(
(_fileType, deduplicated) => {
if (deduplicated) {
return {
'hash1.png': 'image_hash1.png',
'hash2.png': 'image_hash2.png'
}
}
return {
'hash1.png': 'image.png',
'hash2.png': 'image.png'
}
}
)
vi.mocked(
fileNameMappingService.getCachedReverseMapping
).mockImplementation((_fileType, deduplicated) => {
if (deduplicated) {
return {
'image_hash1.png': 'hash1.png',
'image_hash2.png': 'hash2.png'
} as Record<string, string>
}
return {
'image.png': 'hash2.png' // Last one wins in non-dedup
} as Record<string, string>
})
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['hash1.png', 'hash2.png']
}
const widget = constructor(mockNode as any, inputSpec)
// Select deduplicated name
;(widget as any).setValue('image_hash1.png')
// Should set the correct hash value
expect(widget.value).toBe('hash1.png')
})
it('should display correct deduplicated name in _displayValue', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png', 'def456.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget)
}
// Mock deduplicated mapping
vi.mocked(fileNameMappingService.getCachedMapping).mockImplementation(
(_fileType, deduplicated) => {
if (deduplicated) {
return {
'abc123.png': 'photo_abc123.png',
'def456.png': 'photo_def456.png'
}
}
return {
'abc123.png': 'photo.png',
'def456.png': 'photo.png'
}
}
)
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png', 'def456.png']
}
const widget = constructor(mockNode as any, inputSpec)
// Check display value shows deduplicated name
expect((widget as any)._displayValue).toBe('photo_abc123.png')
})
})
it('should handle undefined spec', () => {
const constructor = useComboWidget()
const mockNode = {
@@ -36,4 +209,498 @@ describe('useComboWidget', () => {
)
expect(widget).toEqual({ options: {} })
})
describe('filename mapping', () => {
it('should apply filename mapping to widgets with file extensions', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png', 'def456.jpg']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png', 'def456.jpg', 'xyz789.webp']
}
// Setup mapping service mocks
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png',
'def456.jpg': 'profile_picture.jpg',
'xyz789.webp': 'animated_logo.webp'
})
vi.mocked(fileNameMappingService.getCachedReverseMapping).mockReturnValue(
{
'vacation_photo.png': 'abc123.png',
'profile_picture.jpg': 'def456.jpg',
'animated_logo.webp': 'xyz789.webp'
}
)
vi.mocked(fileNameMappingService.getMapping).mockResolvedValue({
'abc123.png': 'vacation_photo.png',
'def456.jpg': 'profile_picture.jpg',
'xyz789.webp': 'animated_logo.webp'
})
const widget = constructor(mockNode as any, inputSpec)
// Widget should have mapping methods
expect(widget).toBeDefined()
expect(typeof (widget as any).refreshMappings).toBe('function')
expect(typeof (widget as any).serializeValue).toBe('function')
})
it('should display human-readable names in dropdown', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png', 'def456.jpg']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png', 'def456.jpg']
}
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png',
'def456.jpg': 'profile_picture.jpg'
})
const widget = constructor(mockNode as any, inputSpec) as any
// Access options.values through the proxy
const dropdownValues = widget.options.values
// Should return human-readable names
expect(dropdownValues).toEqual([
'vacation_photo.png',
'profile_picture.jpg'
])
})
it('should handle selection of human-readable name and convert to hash', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png']
}
vi.mocked(fileNameMappingService.getCachedReverseMapping).mockReturnValue(
{
'vacation_photo.png': 'abc123.png'
}
)
const widget = constructor(mockNode as any, inputSpec) as any
// Simulate selecting human-readable name
widget.callback('vacation_photo.png')
// Should store hash value
expect(widget.value).toBe('abc123.png')
})
it('should not apply mapping to non-file widgets', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'mode',
value: 'linear',
options: {
values: ['linear', 'cubic', 'nearest']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget)
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'mode',
options: ['linear', 'cubic', 'nearest']
}
const widget = constructor(mockNode as any, inputSpec)
// Should not have mapping methods
expect((widget as any).refreshMappings).toBeUndefined()
expect((widget as any).serializeValue).toBeUndefined()
})
it('should show newly uploaded file in dropdown even without mapping', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png']
}
// Start with mapping for existing file only
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png'
})
const widget = constructor(mockNode as any, inputSpec) as any
// Simulate adding new file without mapping yet
const newValues = [...mockWidget.options.values, 'new789.png']
mockWidget.options.values = newValues
// Mapping still doesn't have the new file
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png'
})
// Force refresh
widget.refreshMappings()
// Access updated dropdown values
const dropdownValues = widget.options.values
// Should show human name for mapped file and hash for unmapped file
expect(dropdownValues).toEqual(['vacation_photo.png', 'new789.png'])
})
it('should handle dropdown update after new file upload', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png']
}
// Initial mapping
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png'
})
const widget = constructor(mockNode as any, inputSpec) as any
// The proxy should initially return mapped values
expect(widget.options.values).toEqual(['vacation_photo.png'])
// Simulate adding new file by replacing the values array (as happens in practice)
// This is how addToComboValues would modify it
const newValues = [...mockWidget.options.values, 'new789.png']
mockWidget.options.values = newValues
// Update mapping to include the new file
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png',
'new789.png': 'new_upload.png'
})
// Force refresh of cached values
widget.refreshMappings()
// Access updated dropdown values - proxy should recompute with new mapping
const dropdownValues = widget.options.values
// Should include both mapped names
expect(dropdownValues).toEqual(['vacation_photo.png', 'new_upload.png'])
})
it('should display hash as fallback when no mapping exists', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'unmapped123.png',
options: {
values: ['unmapped123.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['unmapped123.png']
}
// Return empty mapping
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({})
const widget = constructor(mockNode as any, inputSpec) as any
// Access _displayValue
const displayValue = widget._displayValue
// Should show hash when no mapping exists
expect(displayValue).toBe('unmapped123.png')
// Dropdown should also show hash
const dropdownValues = widget.options.values
expect(dropdownValues).toEqual(['unmapped123.png'])
})
it('should serialize widget value as hash for API calls', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png']
}
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation_photo.png'
})
const widget = constructor(mockNode as any, inputSpec) as any
// serializeValue should always return hash
const serialized = widget.serializeValue()
expect(serialized).toBe('abc123.png')
})
it('should ensure widget.value always contains hash for API calls', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png']
}
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation.png'
})
vi.mocked(fileNameMappingService.getCachedReverseMapping).mockReturnValue(
{
'vacation.png': 'abc123.png'
}
)
const widget = constructor(mockNode as any, inputSpec) as any
// Simulate user selecting from dropdown (human name)
widget.setValue('vacation.png')
// Widget.value should contain the hash for API calls
expect(widget.value).toBe('abc123.png')
// Callback should also convert human name to hash
widget.callback('vacation.png')
expect(widget.value).toBe('abc123.png')
// The value used for API calls should always be the hash
// This is what would be used in /view?filename=...
const apiValue = widget.value
expect(apiValue).toBe('abc123.png')
})
it('should handle arrow key navigation with filename mapping', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'image',
value: 'abc123.png',
options: {
values: ['abc123.png', 'def456.jpg', 'xyz789.webp']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'image',
options: ['abc123.png', 'def456.jpg', 'xyz789.webp']
}
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'vacation.png',
'def456.jpg': 'profile.jpg',
'xyz789.webp': 'banner.webp'
})
vi.mocked(fileNameMappingService.getCachedReverseMapping).mockReturnValue(
{
'vacation.png': 'abc123.png',
'profile.jpg': 'def456.jpg',
'banner.webp': 'xyz789.webp'
}
)
const widget = constructor(mockNode as any, inputSpec) as any
// Test increment (arrow right/up)
widget.incrementValue({ canvas: { last_mouseclick: 0 } })
// Should move from abc123.png to def456.jpg
expect(widget.value).toBe('def456.jpg')
// Test decrement (arrow left/down)
widget.decrementValue({ canvas: { last_mouseclick: 0 } })
// Should move back to abc123.png
expect(widget.value).toBe('abc123.png')
})
it('should handle mixed file and non-file options', () => {
const constructor = useComboWidget()
const mockWidget = {
name: 'source',
value: 'abc123.png',
options: {
values: ['abc123.png', 'none', 'default']
},
callback: vi.fn()
}
const mockNode = {
addWidget: vi.fn().mockReturnValue(mockWidget),
setDirtyCanvas: vi.fn(),
graph: {
setDirtyCanvas: vi.fn()
}
}
const inputSpec: InputSpec = {
type: 'COMBO',
name: 'source',
options: ['abc123.png', 'none', 'default']
}
vi.mocked(fileNameMappingService.getCachedMapping).mockReturnValue({
'abc123.png': 'background.png'
})
const widget = constructor(mockNode as any, inputSpec) as any
const dropdownValues = widget.options.values
// Should map file, but leave non-files unchanged
expect(dropdownValues).toEqual(['background.png', 'none', 'default'])
})
})
})

View File

@@ -0,0 +1,248 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import type {
HistoryResponse,
RawHistoryItem
} from '../../../src/schemas/apiSchema'
import type { ComfyWorkflowJSON } from '../../../src/schemas/comfyWorkflowSchema'
import { ComfyApi } from '../../../src/scripts/api'
describe('ComfyApi getHistory', () => {
let api: ComfyApi
beforeEach(() => {
api = new ComfyApi()
})
const mockHistoryItem: RawHistoryItem = {
prompt_id: 'test_prompt_id',
prompt: {
priority: 0,
prompt_id: 'test_prompt_id',
extra_data: {
extra_pnginfo: {
workflow: {
last_node_id: 1,
last_link_id: 0,
nodes: [],
links: [],
groups: [],
config: {},
extra: {},
version: 0.4
}
},
client_id: 'test_client_id'
}
},
outputs: {},
status: {
status_str: 'success',
completed: true,
messages: []
}
}
describe('history v2 API format', () => {
it('should handle history array format from /history_v2', async () => {
const historyResponse: HistoryResponse = {
history: [
{ ...mockHistoryItem, prompt_id: 'prompt_id_1' },
{ ...mockHistoryItem, prompt_id: 'prompt_id_2' }
]
}
// Mock fetchApi to return the v2 format
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue(historyResponse)
})
api.fetchApi = mockFetchApi
const result = await api.getHistory(10)
expect(result.History).toHaveLength(2)
expect(result.History[0]).toEqual({
...mockHistoryItem,
prompt_id: 'prompt_id_1',
taskType: 'History'
})
expect(result.History[1]).toEqual({
...mockHistoryItem,
prompt_id: 'prompt_id_2',
taskType: 'History'
})
})
it('should handle empty history array', async () => {
const historyResponse: HistoryResponse = {
history: []
}
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue(historyResponse)
})
api.fetchApi = mockFetchApi
const result = await api.getHistory(10)
expect(result.History).toHaveLength(0)
expect(result.History).toEqual([])
})
})
describe('error handling', () => {
it('should return empty history on error', async () => {
const mockFetchApi = vi.fn().mockRejectedValue(new Error('Network error'))
api.fetchApi = mockFetchApi
const result = await api.getHistory()
expect(result.History).toEqual([])
})
})
describe('API call parameters', () => {
it('should call fetchApi with correct v2 endpoint and parameters', async () => {
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue({ history: [] })
})
api.fetchApi = mockFetchApi
await api.getHistory(50)
expect(mockFetchApi).toHaveBeenCalledWith('/history_v2?max_items=50')
})
it('should use default max_items parameter with v2 endpoint', async () => {
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue({ history: [] })
})
api.fetchApi = mockFetchApi
await api.getHistory()
expect(mockFetchApi).toHaveBeenCalledWith('/history_v2?max_items=200')
})
})
})
describe('ComfyApi getWorkflowFromHistory', () => {
let api: ComfyApi
beforeEach(() => {
api = new ComfyApi()
})
const mockWorkflow: ComfyWorkflowJSON = {
last_node_id: 1,
last_link_id: 0,
nodes: [],
links: [],
groups: [],
config: {},
extra: {},
version: 0.4
}
it('should fetch workflow data for a specific prompt', async () => {
const promptId = 'test_prompt_id'
const mockResponse = {
[promptId]: {
prompt: {
priority: 0,
prompt_id: promptId,
extra_data: {
extra_pnginfo: {
workflow: mockWorkflow
}
}
},
outputs: {},
status: {
status_str: 'success',
completed: true,
messages: []
}
}
}
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue(mockResponse)
})
api.fetchApi = mockFetchApi
const result = await api.getWorkflowFromHistory(promptId)
expect(mockFetchApi).toHaveBeenCalledWith(`/history_v2/${promptId}`)
expect(result).toEqual(mockWorkflow)
})
it('should return null when prompt_id is not found', async () => {
const promptId = 'non_existent_prompt'
const mockResponse = {}
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue(mockResponse)
})
api.fetchApi = mockFetchApi
const result = await api.getWorkflowFromHistory(promptId)
expect(mockFetchApi).toHaveBeenCalledWith(`/history_v2/${promptId}`)
expect(result).toBeNull()
})
it('should return null when workflow data is missing', async () => {
const promptId = 'test_prompt_id'
const mockResponse = {
[promptId]: {
prompt: {
priority: 0,
prompt_id: promptId,
extra_data: {}
},
outputs: {},
status: {
status_str: 'success',
completed: true,
messages: []
}
}
}
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue(mockResponse)
})
api.fetchApi = mockFetchApi
const result = await api.getWorkflowFromHistory(promptId)
expect(result).toBeNull()
})
it('should handle API errors gracefully', async () => {
const promptId = 'test_prompt_id'
const mockFetchApi = vi.fn().mockRejectedValue(new Error('Network error'))
api.fetchApi = mockFetchApi
const result = await api.getWorkflowFromHistory(promptId)
expect(result).toBeNull()
})
it('should handle malformed response gracefully', async () => {
const promptId = 'test_prompt_id'
const mockResponse = {
[promptId]: null
}
const mockFetchApi = vi.fn().mockResolvedValue({
json: vi.fn().mockResolvedValue(mockResponse)
})
api.fetchApi = mockFetchApi
const result = await api.getWorkflowFromHistory(promptId)
expect(result).toBeNull()
})
})

View File

@@ -0,0 +1,571 @@
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { api } from '@/scripts/api'
import {
type FileNameMapping,
FileNameMappingService
} from '@/services/fileNameMappingService'
// Mock api module
vi.mock('@/scripts/api', () => ({
api: {
fetchApi: vi.fn()
}
}))
describe('FileNameMappingService', () => {
let service: FileNameMappingService
beforeEach(() => {
vi.clearAllMocks()
// Create a new instance for each test to avoid cache pollution
service = new FileNameMappingService()
})
describe('deduplication', () => {
it('should not modify unique names', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation.png',
'def456.jpg': 'profile.jpg',
'ghi789.gif': 'animation.gif'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const dedupMapping = service.getCachedMapping('input', true)
// All unique names should remain unchanged
expect(dedupMapping['abc123.png']).toBe('vacation.png')
expect(dedupMapping['def456.jpg']).toBe('profile.jpg')
expect(dedupMapping['ghi789.gif']).toBe('animation.gif')
})
it('should add hash suffix to duplicate names', async () => {
const mockData: FileNameMapping = {
'abc123def456.png': 'vacation.png',
'xyz789uvw012.png': 'vacation.png',
'mno345pqr678.png': 'vacation.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const dedupMapping = service.getCachedMapping('input', true)
// Check that all values are unique
const values = Object.values(dedupMapping)
const uniqueValues = new Set(values)
expect(uniqueValues.size).toBe(values.length)
// Check that suffixes are added correctly
expect(dedupMapping['abc123def456.png']).toBe('vacation_abc123de.png')
expect(dedupMapping['xyz789uvw012.png']).toBe('vacation_xyz789uv.png')
expect(dedupMapping['mno345pqr678.png']).toBe('vacation_mno345pq.png')
})
it('should preserve file extensions when deduplicating', async () => {
const mockData: FileNameMapping = {
'hash1234.safetensors': 'model.safetensors',
'hash5678.safetensors': 'model.safetensors'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const dedupMapping = service.getCachedMapping('input', true)
// Extensions should be preserved
expect(dedupMapping['hash1234.safetensors']).toBe(
'model_hash1234.safetensors'
)
expect(dedupMapping['hash5678.safetensors']).toBe(
'model_hash5678.safetensors'
)
})
it('should handle files without extensions', async () => {
const mockData: FileNameMapping = {
abc123: 'README',
def456: 'README',
ghi789: 'LICENSE'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const dedupMapping = service.getCachedMapping('input', true)
// Files without extensions should still get deduplicated
expect(dedupMapping['abc123']).toBe('README_abc123')
expect(dedupMapping['def456']).toBe('README_def456')
expect(dedupMapping['ghi789']).toBe('LICENSE') // Unique, no suffix
})
it('should build correct reverse mapping for deduplicated names', async () => {
const mockData: FileNameMapping = {
'hash1.png': 'image.png',
'hash2.png': 'image.png',
'hash3.jpg': 'photo.jpg'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const reverseMapping = service.getCachedReverseMapping('input', true)
// Reverse mapping should map deduplicated names back to hashes
expect(reverseMapping['image_hash1.png']).toBe('hash1.png')
expect(reverseMapping['image_hash2.png']).toBe('hash2.png')
expect(reverseMapping['photo.jpg']).toBe('hash3.jpg')
// Should not have original duplicate names in reverse mapping
expect(reverseMapping['image.png']).toBeUndefined()
})
it('should handle mixed duplicate and unique names', async () => {
const mockData: FileNameMapping = {
'a1.png': 'sunset.png',
'b2.png': 'sunset.png',
'c3.jpg': 'portrait.jpg',
'd4.gif': 'animation.gif',
'e5.png': 'sunset.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const dedupMapping = service.getCachedMapping('input', true)
// Duplicates get suffixes
expect(dedupMapping['a1.png']).toBe('sunset_a1.png')
expect(dedupMapping['b2.png']).toBe('sunset_b2.png')
expect(dedupMapping['e5.png']).toBe('sunset_e5.png')
// Unique names remain unchanged
expect(dedupMapping['c3.jpg']).toBe('portrait.jpg')
expect(dedupMapping['d4.gif']).toBe('animation.gif')
})
it('should return non-deduplicated mapping when deduplicated=false', async () => {
const mockData: FileNameMapping = {
'hash1.png': 'image.png',
'hash2.png': 'image.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
// Without deduplication flag
const normalMapping = service.getCachedMapping('input', false)
expect(normalMapping['hash1.png']).toBe('image.png')
expect(normalMapping['hash2.png']).toBe('image.png')
// With deduplication flag
const dedupMapping = service.getCachedMapping('input', true)
expect(dedupMapping['hash1.png']).toBe('image_hash1.png')
expect(dedupMapping['hash2.png']).toBe('image_hash2.png')
})
})
describe('getMapping', () => {
it('should fetch mappings from API', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation_photo.png',
'def456.jpg': 'profile_picture.jpg'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
const result = await service.getMapping('input')
expect(api.fetchApi).toHaveBeenCalledWith('/files/mappings')
expect(result).toEqual(mockData)
})
it('should cache mappings and not refetch within TTL', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation_photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
// First call
await service.getMapping('input')
expect(api.fetchApi).toHaveBeenCalledTimes(1)
// Second call should use cache
const result = await service.getMapping('input')
expect(api.fetchApi).toHaveBeenCalledTimes(1)
expect(result).toEqual(mockData)
})
it('should return empty object on API failure', async () => {
vi.mocked(api.fetchApi).mockRejectedValue(new Error('Network error'))
const result = await service.getMapping('input')
expect(result).toEqual({})
})
it('should return empty object on non-200 response', async () => {
vi.mocked(api.fetchApi).mockResolvedValue({
ok: false,
status: 404,
statusText: 'Not Found'
} as any)
const result = await service.getMapping('input')
expect(result).toEqual({})
})
})
describe('getCachedMapping', () => {
it('should return empty object if no cached data', () => {
const result = service.getCachedMapping('input')
expect(result).toEqual({})
})
it('should return cached data after successful fetch', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation_photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const result = service.getCachedMapping('input')
expect(result).toEqual(mockData)
})
})
describe('getCachedReverseMapping', () => {
it('should return reverse mapping (human -> hash)', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation_photo.png',
'def456.jpg': 'profile_picture.jpg'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const reverseMapping = service.getCachedReverseMapping('input')
expect(reverseMapping).toEqual({
'vacation_photo.png': 'abc123.png',
'profile_picture.jpg': 'def456.jpg'
})
})
it('should return empty object if no cached data', () => {
const result = service.getCachedReverseMapping('input')
expect(result).toEqual({})
})
})
describe('getHashFromHumanName', () => {
it('should convert human name to hash', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation_photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
const hash = service.getHashFromHumanName('vacation_photo.png', 'input')
expect(hash).toBe('abc123.png')
})
it('should return original name if no mapping exists', async () => {
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => ({})
} as any)
await service.getMapping('input')
const result = service.getHashFromHumanName('unknown.png', 'input')
expect(result).toBe('unknown.png')
})
})
describe('getHumanReadableName', () => {
it('should convert hash to human-readable name', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation_photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
const humanName = await service.getHumanReadableName(
'abc123.png',
'input'
)
expect(humanName).toBe('vacation_photo.png')
})
it('should return hash if no mapping exists', async () => {
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => ({})
} as any)
const result = await service.getHumanReadableName('xyz789.png', 'input')
expect(result).toBe('xyz789.png')
})
})
describe('refreshMapping', () => {
it('should invalidate cache and fetch fresh data', async () => {
const mockData1: FileNameMapping = {
'abc123.png': 'old_photo.png'
}
const mockData2: FileNameMapping = {
'def456.png': 'new_photo.png'
}
vi.mocked(api.fetchApi)
.mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => mockData1
} as any)
.mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => mockData2
} as any)
// First fetch
await service.getMapping('input')
expect(service.getCachedMapping('input')).toEqual(mockData1)
// Refresh should fetch new data
const refreshedData = await service.refreshMapping('input')
expect(api.fetchApi).toHaveBeenCalledTimes(2)
expect(refreshedData).toEqual(mockData2)
expect(service.getCachedMapping('input')).toEqual(mockData2)
})
})
describe('invalidateCache', () => {
it('should clear cache for specific file type', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
expect(service.getCachedMapping('input')).toEqual(mockData)
service.invalidateCache('input')
expect(service.getCachedMapping('input')).toEqual({})
})
it('should clear all caches when no type specified', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
await service.getMapping('input')
await service.getMapping('output')
service.invalidateCache()
expect(service.getCachedMapping('input')).toEqual({})
expect(service.getCachedMapping('output')).toEqual({})
})
})
describe('ensureMappingsLoaded', () => {
it('should preload mappings for immediate synchronous access', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'photo.png'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
// Ensure mappings are loaded
await service.ensureMappingsLoaded('input')
// Should be available synchronously
const cached = service.getCachedMapping('input')
expect(cached).toEqual(mockData)
})
it('should not throw on API failure', async () => {
vi.mocked(api.fetchApi).mockRejectedValue(new Error('Network error'))
// Should not throw
await expect(service.ensureMappingsLoaded('input')).resolves.not.toThrow()
// Should have empty mapping
expect(service.getCachedMapping('input')).toEqual({})
})
})
describe('applyMappingToArray', () => {
it('should apply mapping to array of filenames', async () => {
const mockData: FileNameMapping = {
'abc123.png': 'vacation.png',
'def456.jpg': 'profile.jpg'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
const result = await service.applyMappingToArray(
['abc123.png', 'def456.jpg', 'unknown.gif'],
'input'
)
expect(result).toEqual(['vacation.png', 'profile.jpg', 'unknown.gif'])
})
it('should return original array on API failure', async () => {
vi.mocked(api.fetchApi).mockRejectedValue(new Error('Network error'))
const input = ['abc123.png', 'def456.jpg']
const result = await service.applyMappingToArray(input, 'input')
expect(result).toEqual(input)
})
})
describe('edge cases', () => {
it('should handle invalid JSON response gracefully', async () => {
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => {
throw new Error('Invalid JSON')
}
} as any)
const result = await service.getMapping('input')
expect(result).toEqual({})
})
it('should filter out invalid entries from response', async () => {
const mockData = {
'valid.png': 'photo.png',
invalid: 123, // Invalid value type - will be filtered
123: 'number_key', // Numeric key becomes string "123" in JS
'another_valid.jpg': 'image.jpg'
}
vi.mocked(api.fetchApi).mockResolvedValue({
ok: true,
status: 200,
json: async () => mockData
} as any)
const result = await service.getMapping('input')
// Should filter out non-string values but keep string keys (including coerced numeric keys)
expect(result).toEqual({
'valid.png': 'photo.png',
'123': 'number_key', // Numeric key becomes string
'another_valid.jpg': 'image.jpg'
})
})
it('should handle null or array responses', async () => {
// Test null response
vi.mocked(api.fetchApi).mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => null
} as any)
let result = await service.getMapping('input')
expect(result).toEqual({})
// Test array response
vi.mocked(api.fetchApi).mockResolvedValueOnce({
ok: true,
status: 200,
json: async () => []
} as any)
result = await service.getMapping('output')
expect(result).toEqual({})
})
})
})

View File

@@ -3,10 +3,94 @@ import { describe, expect, it } from 'vitest'
import { TaskItemImpl } from '@/stores/queueStore'
describe('TaskItemImpl', () => {
describe('prompt property accessors', () => {
it('should correctly access queueIndex from priority', () => {
const taskItem = new TaskItemImpl('Pending', {
priority: 5,
prompt_id: 'test-id',
extra_data: { client_id: 'client-id' }
})
expect(taskItem.queueIndex).toBe(5)
})
it('should correctly access promptId from prompt_id', () => {
const taskItem = new TaskItemImpl('History', {
priority: 0,
prompt_id: 'unique-prompt-id',
extra_data: { client_id: 'client-id' }
})
expect(taskItem.promptId).toBe('unique-prompt-id')
})
it('should correctly access extraData', () => {
const extraData = {
client_id: 'client-id',
extra_pnginfo: {
workflow: {
last_node_id: 1,
last_link_id: 0,
nodes: [],
links: [],
groups: [],
config: {},
extra: {},
version: 0.4
}
}
}
const taskItem = new TaskItemImpl('Running', {
priority: 1,
prompt_id: 'test-id',
extra_data: extraData
})
expect(taskItem.extraData).toEqual(extraData)
})
it('should correctly access workflow from extraPngInfo', () => {
const workflow = {
last_node_id: 1,
last_link_id: 0,
nodes: [],
links: [],
groups: [],
config: {},
extra: {},
version: 0.4
}
const taskItem = new TaskItemImpl('History', {
priority: 0,
prompt_id: 'test-id',
extra_data: {
client_id: 'client-id',
extra_pnginfo: { workflow }
}
})
expect(taskItem.workflow).toEqual(workflow)
})
it('should return undefined workflow when extraPngInfo is missing', () => {
const taskItem = new TaskItemImpl('History', {
priority: 0,
prompt_id: 'test-id',
extra_data: { client_id: 'client-id' }
})
expect(taskItem.workflow).toBeUndefined()
})
})
it('should remove animated property from outputs during construction', () => {
const taskItem = new TaskItemImpl(
'History',
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
{
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: 'client-id' }
},
{ status_str: 'success', messages: [], completed: true },
{
'node-1': {
@@ -26,7 +110,11 @@ describe('TaskItemImpl', () => {
it('should handle outputs without animated property', () => {
const taskItem = new TaskItemImpl(
'History',
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
{
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: 'client-id' }
},
{ status_str: 'success', messages: [], completed: true },
{
'node-1': {
@@ -42,7 +130,11 @@ describe('TaskItemImpl', () => {
it('should recognize webm video from core', () => {
const taskItem = new TaskItemImpl(
'History',
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
{
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: 'client-id' }
},
{ status_str: 'success', messages: [], completed: true },
{
'node-1': {
@@ -64,7 +156,11 @@ describe('TaskItemImpl', () => {
it('should recognize webm video from VHS', () => {
const taskItem = new TaskItemImpl(
'History',
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
{
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: 'client-id' }
},
{ status_str: 'success', messages: [], completed: true },
{
'node-1': {
@@ -93,7 +189,11 @@ describe('TaskItemImpl', () => {
it('should recognize mp4 video from core', () => {
const taskItem = new TaskItemImpl(
'History',
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
{
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: 'client-id' }
},
{ status_str: 'success', messages: [], completed: true },
{
'node-1': {
@@ -128,7 +228,11 @@ describe('TaskItemImpl', () => {
it(`should recognize ${extension} audio`, () => {
const taskItem = new TaskItemImpl(
'History',
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
{
priority: 0,
prompt_id: 'prompt-id',
extra_data: { client_id: 'client-id' }
},
{ status_str: 'success', messages: [], completed: true },
{
'node-1': {
@@ -153,4 +257,193 @@ describe('TaskItemImpl', () => {
})
})
})
describe('execution timestamp properties', () => {
it('should extract execution start timestamp from messages', () => {
const taskItem = new TaskItemImpl(
'History',
{
priority: 0,
prompt_id: 'test-id',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: [
[
'execution_start',
{ prompt_id: 'test-id', timestamp: 1234567890 }
],
[
'execution_success',
{ prompt_id: 'test-id', timestamp: 1234567900 }
]
]
}
)
expect(taskItem.executionStartTimestamp).toBe(1234567890)
})
it('should return undefined when no execution_start message exists', () => {
const taskItem = new TaskItemImpl(
'History',
{
priority: 0,
prompt_id: 'test-id',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: [
[
'execution_success',
{ prompt_id: 'test-id', timestamp: 1234567900 }
]
]
}
)
expect(taskItem.executionStartTimestamp).toBeUndefined()
})
it('should return undefined when status has no messages', () => {
const taskItem = new TaskItemImpl(
'History',
{
priority: 0,
prompt_id: 'test-id',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: []
}
)
expect(taskItem.executionStartTimestamp).toBeUndefined()
})
it('should return undefined when status is undefined', () => {
const taskItem = new TaskItemImpl('History', {
priority: 0,
prompt_id: 'test-id',
extra_data: { client_id: 'client-id' }
})
expect(taskItem.executionStartTimestamp).toBeUndefined()
})
})
describe('sorting by execution start time', () => {
it('should sort history tasks by execution start timestamp descending', () => {
const task1 = new TaskItemImpl(
'History',
{
priority: 1,
prompt_id: 'old-task',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: [
['execution_start', { prompt_id: 'old-task', timestamp: 1000 }]
]
}
)
const task2 = new TaskItemImpl(
'History',
{
priority: 2,
prompt_id: 'new-task',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: [
['execution_start', { prompt_id: 'new-task', timestamp: 3000 }]
]
}
)
const task3 = new TaskItemImpl(
'History',
{
priority: 3,
prompt_id: 'middle-task',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: [
['execution_start', { prompt_id: 'middle-task', timestamp: 2000 }]
]
}
)
const tasks = [task1, task2, task3]
// Sort using the same logic as queueStore
tasks.sort((a, b) => {
const aTime = a.executionStartTimestamp ?? 0
const bTime = b.executionStartTimestamp ?? 0
return bTime - aTime
})
expect(tasks[0].promptId).toBe('new-task')
expect(tasks[1].promptId).toBe('middle-task')
expect(tasks[2].promptId).toBe('old-task')
})
it('should place tasks without execution start timestamp at end', () => {
const taskWithTime = new TaskItemImpl(
'History',
{
priority: 1,
prompt_id: 'with-time',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: [
['execution_start', { prompt_id: 'with-time', timestamp: 2000 }]
]
}
)
const taskWithoutTime = new TaskItemImpl(
'History',
{
priority: 2,
prompt_id: 'without-time',
extra_data: { client_id: 'client-id' }
},
{
status_str: 'success',
completed: true,
messages: []
}
)
const tasks = [taskWithoutTime, taskWithTime]
// Sort using the same logic as queueStore
tasks.sort((a, b) => {
const aTime = a.executionStartTimestamp ?? 0
const bTime = b.executionStartTimestamp ?? 0
return bTime - aTime
})
expect(tasks[0].promptId).toBe('with-time')
expect(tasks[1].promptId).toBe('without-time')
})
})
})

View File

@@ -17,10 +17,22 @@ const SHOULD_MINIFY = process.env.ENABLE_MINIFY === 'true'
// vite dev server will listen on all addresses, including LAN and public addresses
const VITE_REMOTE_DEV = process.env.VITE_REMOTE_DEV === 'true'
const DISABLE_TEMPLATES_PROXY = process.env.DISABLE_TEMPLATES_PROXY === 'true'
const DISABLE_VUE_PLUGINS = process.env.DISABLE_VUE_PLUGINS === 'true'
const DISABLE_VUE_PLUGINS = false // Always enable Vue DevTools for development
// Hardcoded to staging cloud for testing frontend changes against cloud backend
const DEV_SERVER_COMFYUI_URL =
process.env.DEV_SERVER_COMFYUI_URL || 'http://127.0.0.1:8188'
process.env.DEV_SERVER_COMFYUI_URL || 'https://stagingcloud.comfy.org'
// To use local backend, change to: 'http://127.0.0.1:8188'
// Optional: Add API key to .env as STAGING_API_KEY if needed for authentication
const addAuthHeaders = (proxy: any) => {
proxy.on('proxyReq', (proxyReq: any, _req: any, _res: any) => {
const apiKey = process.env.STAGING_API_KEY
if (apiKey) {
proxyReq.setHeader('X-API-KEY', apiKey)
}
})
}
export default defineConfig({
base: '',
@@ -28,16 +40,31 @@ export default defineConfig({
host: VITE_REMOTE_DEV ? '0.0.0.0' : undefined,
proxy: {
'/internal': {
target: DEV_SERVER_COMFYUI_URL
target: DEV_SERVER_COMFYUI_URL,
changeOrigin: true,
secure: false,
configure: addAuthHeaders
},
'/api': {
target: DEV_SERVER_COMFYUI_URL,
changeOrigin: true,
secure: false,
configure: (proxy, _options) => {
addAuthHeaders(proxy)
},
// Return empty array for extensions API as these modules
// are not on vite's dev server.
bypass: (req, res, _options) => {
if (req.url === '/api/extensions') {
res.end(JSON.stringify([]))
return false // Return false to indicate request is handled
}
// Bypass multi-user auth check from staging
if (req.url === '/api/users') {
res.setHeader('Content-Type', 'application/json')
res.end(JSON.stringify({})) // Return empty object to simulate single-user mode
return false // Return false to indicate request is handled
}
return null
}
@@ -45,29 +72,39 @@ export default defineConfig({
'/ws': {
target: DEV_SERVER_COMFYUI_URL,
ws: true
ws: true,
changeOrigin: true,
secure: false,
configure: addAuthHeaders
},
'/workflow_templates': {
target: DEV_SERVER_COMFYUI_URL
target: DEV_SERVER_COMFYUI_URL,
changeOrigin: true,
secure: false,
configure: addAuthHeaders
},
// Proxy extension assets (images/videos) under /extensions to the ComfyUI backend
'/extensions': {
target: DEV_SERVER_COMFYUI_URL,
changeOrigin: true
changeOrigin: true,
secure: false
},
// Proxy docs markdown from backend
'/docs': {
target: DEV_SERVER_COMFYUI_URL,
changeOrigin: true
changeOrigin: true,
secure: false
},
...(!DISABLE_TEMPLATES_PROXY
? {
'/templates': {
target: DEV_SERVER_COMFYUI_URL
target: DEV_SERVER_COMFYUI_URL,
changeOrigin: true,
secure: false
}
}
: {}),