mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-04-26 09:19:43 +00:00
chore: migrate tests from tests-ui/ to colocate with source files (#7811)
## Summary Migrates all unit tests from `tests-ui/` to colocate with their source files in `src/`, improving discoverability and maintainability. ## Changes - **What**: Relocated all unit tests to be adjacent to the code they test, following the `<source>.test.ts` naming convention - **Config**: Updated `vitest.config.ts` to remove `tests-ui` include pattern and `@tests-ui` alias - **Docs**: Moved testing documentation to `docs/testing/` with updated paths and patterns ## Review Focus - Migration patterns documented in `temp/plans/migrate-tests-ui-to-src.md` - Tests use `@/` path aliases instead of relative imports - Shared fixtures placed in `__fixtures__/` directories ┆Issue is synchronized with this [Notion page](https://www.notion.so/PR-7811-chore-migrate-tests-from-tests-ui-to-colocate-with-source-files-2da6d73d36508147a4cce85365dee614) by [Unito](https://www.unito.io) --------- Co-authored-by: Amp <amp@ampcode.com> Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
519
src/stores/assetsStore.test.ts
Normal file
519
src/stores/assetsStore.test.ts
Normal file
@@ -0,0 +1,519 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { useAssetsStore } from '@/stores/assetsStore'
|
||||
import { api } from '@/scripts/api'
|
||||
import type {
|
||||
HistoryTaskItem,
|
||||
TaskPrompt,
|
||||
TaskStatus,
|
||||
TaskOutput
|
||||
} from '@/schemas/apiSchema'
|
||||
|
||||
// Mock the api module
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
getHistory: vi.fn(),
|
||||
internalURL: vi.fn((path) => `http://localhost:3000${path}`),
|
||||
user: 'test-user'
|
||||
}
|
||||
}))
|
||||
|
||||
// Mock the asset service
|
||||
vi.mock('@/platform/assets/services/assetService', () => ({
|
||||
assetService: {
|
||||
getAssetsByTag: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
// Mock distribution type
|
||||
vi.mock('@/platform/distribution/types', () => ({
|
||||
isCloud: false
|
||||
}))
|
||||
|
||||
// Mock TaskItemImpl
|
||||
vi.mock('@/stores/queueStore', () => ({
|
||||
TaskItemImpl: class {
|
||||
public flatOutputs: Array<{
|
||||
supportsPreview: boolean
|
||||
filename: string
|
||||
subfolder: string
|
||||
type: string
|
||||
url: string
|
||||
}>
|
||||
public previewOutput:
|
||||
| {
|
||||
supportsPreview: boolean
|
||||
filename: string
|
||||
subfolder: string
|
||||
type: string
|
||||
url: string
|
||||
}
|
||||
| undefined
|
||||
|
||||
constructor(
|
||||
public taskType: string,
|
||||
public prompt: TaskPrompt,
|
||||
public status: TaskStatus | undefined,
|
||||
public outputs: TaskOutput
|
||||
) {
|
||||
this.flatOutputs = this.outputs
|
||||
? [
|
||||
{
|
||||
supportsPreview: true,
|
||||
filename: 'test.png',
|
||||
subfolder: '',
|
||||
type: 'output',
|
||||
url: 'http://test.com/test.png'
|
||||
}
|
||||
]
|
||||
: []
|
||||
this.previewOutput = this.flatOutputs[0]
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
// Mock asset mappers - add unique timestamps
|
||||
vi.mock('@/platform/assets/composables/media/assetMappers', () => ({
|
||||
mapInputFileToAssetItem: vi.fn((name, index, type) => ({
|
||||
id: `${type}-${index}`,
|
||||
name,
|
||||
size: 0,
|
||||
created_at: new Date(Date.now() - index * 1000).toISOString(), // Unique timestamps
|
||||
tags: [type],
|
||||
preview_url: `http://test.com/${name}`
|
||||
})),
|
||||
mapTaskOutputToAssetItem: vi.fn((task, output) => {
|
||||
const index = parseInt(task.prompt[1].split('_')[1]) || 0
|
||||
return {
|
||||
id: task.prompt[1], // Use promptId as asset ID
|
||||
name: output.filename,
|
||||
size: 0,
|
||||
created_at: new Date(Date.now() - index * 1000).toISOString(), // Unique timestamps
|
||||
tags: ['output'],
|
||||
preview_url: output.url,
|
||||
user_metadata: {}
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
describe('assetsStore - Refactored (Option A)', () => {
|
||||
let store: ReturnType<typeof useAssetsStore>
|
||||
|
||||
// Helper function to create mock history items
|
||||
const createMockHistoryItem = (index: number): HistoryTaskItem => ({
|
||||
taskType: 'History' as const,
|
||||
prompt: [
|
||||
1000 + index, // queueIndex
|
||||
`prompt_${index}`, // promptId
|
||||
{}, // promptInputs
|
||||
{
|
||||
extra_pnginfo: {
|
||||
workflow: {
|
||||
last_node_id: 1,
|
||||
last_link_id: 1,
|
||||
nodes: [],
|
||||
links: [],
|
||||
groups: [],
|
||||
config: {},
|
||||
version: 1
|
||||
}
|
||||
}
|
||||
}, // extraData
|
||||
[] // outputsToExecute
|
||||
],
|
||||
status: {
|
||||
status_str: 'success' as const,
|
||||
completed: true,
|
||||
messages: []
|
||||
},
|
||||
outputs: {
|
||||
'1': {
|
||||
images: [
|
||||
{
|
||||
filename: `output_${index}.png`,
|
||||
subfolder: '',
|
||||
type: 'output' as const
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useAssetsStore()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('Initial Load', () => {
|
||||
it('should load initial history items', async () => {
|
||||
const mockHistory = Array.from({ length: 10 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValue({
|
||||
History: mockHistory
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
expect(api.getHistory).toHaveBeenCalledWith(200, { offset: 0 })
|
||||
expect(store.historyAssets).toHaveLength(10)
|
||||
expect(store.hasMoreHistory).toBe(false) // Less than BATCH_SIZE
|
||||
expect(store.historyLoading).toBe(false)
|
||||
expect(store.historyError).toBe(null)
|
||||
})
|
||||
|
||||
it('should set hasMoreHistory to true when batch is full', async () => {
|
||||
const mockHistory = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValue({
|
||||
History: mockHistory
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
expect(store.historyAssets).toHaveLength(200)
|
||||
expect(store.hasMoreHistory).toBe(true) // Exactly BATCH_SIZE
|
||||
})
|
||||
|
||||
it('should handle errors during initial load', async () => {
|
||||
const error = new Error('Failed to fetch')
|
||||
vi.mocked(api.getHistory).mockRejectedValue(error)
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
expect(store.historyAssets).toHaveLength(0)
|
||||
expect(store.historyError).toBe(error)
|
||||
expect(store.historyLoading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Pagination', () => {
|
||||
it('should accumulate items when loading more', async () => {
|
||||
// First batch - full BATCH_SIZE
|
||||
const firstBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
expect(store.historyAssets).toHaveLength(200)
|
||||
expect(store.hasMoreHistory).toBe(true)
|
||||
|
||||
// Second batch - different items
|
||||
const secondBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(200 + i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: secondBatch
|
||||
})
|
||||
|
||||
await store.loadMoreHistory()
|
||||
|
||||
expect(api.getHistory).toHaveBeenCalledWith(200, { offset: 200 })
|
||||
expect(store.historyAssets).toHaveLength(400) // Accumulated
|
||||
expect(store.hasMoreHistory).toBe(true)
|
||||
})
|
||||
|
||||
it('should prevent duplicate items during pagination', async () => {
|
||||
// First batch - full BATCH_SIZE
|
||||
const firstBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
expect(store.historyAssets).toHaveLength(200)
|
||||
|
||||
// Second batch with some duplicates
|
||||
const secondBatch = [
|
||||
createMockHistoryItem(2), // Duplicate
|
||||
createMockHistoryItem(5), // Duplicate
|
||||
...Array.from({ length: 198 }, (_, i) => createMockHistoryItem(200 + i)) // New
|
||||
]
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: secondBatch
|
||||
})
|
||||
|
||||
await store.loadMoreHistory()
|
||||
|
||||
// Should only add new items (198 new, 2 duplicates filtered)
|
||||
expect(store.historyAssets).toHaveLength(398)
|
||||
|
||||
// Verify no duplicates
|
||||
const assetIds = store.historyAssets.map((a) => a.id)
|
||||
const uniqueAssetIds = new Set(assetIds)
|
||||
expect(uniqueAssetIds.size).toBe(store.historyAssets.length)
|
||||
})
|
||||
|
||||
it('should stop loading when no more items', async () => {
|
||||
// First batch - less than BATCH_SIZE
|
||||
const firstBatch = Array.from({ length: 50 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
expect(store.hasMoreHistory).toBe(false)
|
||||
|
||||
// Try to load more - should return early
|
||||
await store.loadMoreHistory()
|
||||
|
||||
// Should only have been called once (initial load)
|
||||
expect(api.getHistory).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should handle race conditions with concurrent loads', async () => {
|
||||
// Setup initial state with full batch
|
||||
const initialBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: initialBatch
|
||||
})
|
||||
await store.updateHistory()
|
||||
expect(store.hasMoreHistory).toBe(true)
|
||||
|
||||
// Clear mock to count only loadMore calls
|
||||
vi.mocked(api.getHistory).mockClear()
|
||||
|
||||
// Setup slow API response
|
||||
let resolveLoadMore: (value: { History: HistoryTaskItem[] }) => void
|
||||
const loadMorePromise = new Promise<{ History: HistoryTaskItem[] }>(
|
||||
(resolve) => {
|
||||
resolveLoadMore = resolve
|
||||
}
|
||||
)
|
||||
vi.mocked(api.getHistory).mockReturnValueOnce(loadMorePromise)
|
||||
|
||||
// Start first loadMore
|
||||
const firstLoad = store.loadMoreHistory()
|
||||
|
||||
// Try concurrent load - should be ignored
|
||||
const secondLoad = store.loadMoreHistory()
|
||||
|
||||
// Resolve
|
||||
const secondBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(200 + i)
|
||||
)
|
||||
resolveLoadMore!({ History: secondBatch })
|
||||
|
||||
await Promise.all([firstLoad, secondLoad])
|
||||
|
||||
// Only one API call
|
||||
expect(api.getHistory).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should respect MAX_HISTORY_ITEMS limit', async () => {
|
||||
const BATCH_COUNT = 6 // 6 × 200 = 1200 items
|
||||
|
||||
// Initial load
|
||||
const firstBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
await store.updateHistory()
|
||||
|
||||
// Load additional batches
|
||||
for (let batch = 1; batch < BATCH_COUNT; batch++) {
|
||||
const items = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(batch * 200 + i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: items
|
||||
})
|
||||
await store.loadMoreHistory()
|
||||
}
|
||||
|
||||
// Should be capped at MAX_HISTORY_ITEMS (1000)
|
||||
expect(store.historyAssets).toHaveLength(1000)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Sorting', () => {
|
||||
it('should maintain date sorting after pagination', async () => {
|
||||
// First batch
|
||||
const firstBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
// Second batch
|
||||
const secondBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(200 + i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: secondBatch
|
||||
})
|
||||
|
||||
await store.loadMoreHistory()
|
||||
|
||||
// Verify sorting (newest first - lower index = newer)
|
||||
for (let i = 1; i < store.historyAssets.length; i++) {
|
||||
const prevDate = new Date(store.historyAssets[i - 1].created_at)
|
||||
const currDate = new Date(store.historyAssets[i].created_at)
|
||||
expect(prevDate.getTime()).toBeGreaterThanOrEqual(currDate.getTime())
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('Error Handling', () => {
|
||||
it('should preserve existing data when loadMore fails', async () => {
|
||||
// First successful load - full batch
|
||||
const firstBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
expect(store.historyAssets).toHaveLength(200)
|
||||
|
||||
// Second load fails
|
||||
const error = new Error('Network error')
|
||||
vi.mocked(api.getHistory).mockRejectedValueOnce(error)
|
||||
|
||||
await store.loadMoreHistory()
|
||||
|
||||
// Should keep existing data
|
||||
expect(store.historyAssets).toHaveLength(200)
|
||||
expect(store.historyError).toBe(error)
|
||||
expect(store.isLoadingMore).toBe(false)
|
||||
})
|
||||
|
||||
it('should clear error state on successful retry', async () => {
|
||||
// First load succeeds
|
||||
const firstBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: firstBatch
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
// Second load fails
|
||||
const error = new Error('Network error')
|
||||
vi.mocked(api.getHistory).mockRejectedValueOnce(error)
|
||||
|
||||
await store.loadMoreHistory()
|
||||
expect(store.historyError).toBe(error)
|
||||
|
||||
// Third load succeeds
|
||||
const thirdBatch = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(200 + i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: thirdBatch
|
||||
})
|
||||
|
||||
await store.loadMoreHistory()
|
||||
|
||||
// Error should be cleared
|
||||
expect(store.historyError).toBe(null)
|
||||
expect(store.historyAssets).toHaveLength(400)
|
||||
})
|
||||
|
||||
it('should handle errors with proper loading state', async () => {
|
||||
const error = new Error('API error')
|
||||
vi.mocked(api.getHistory).mockRejectedValue(error)
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
expect(store.historyLoading).toBe(false)
|
||||
expect(store.historyError).toBe(error)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Memory Management', () => {
|
||||
it('should cleanup when exceeding MAX_HISTORY_ITEMS', async () => {
|
||||
// Load 1200 items (exceeds 1000 limit)
|
||||
const batches = 6
|
||||
|
||||
for (let batch = 0; batch < batches; batch++) {
|
||||
const items = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(batch * 200 + i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: items
|
||||
})
|
||||
|
||||
if (batch === 0) {
|
||||
await store.updateHistory()
|
||||
} else {
|
||||
await store.loadMoreHistory()
|
||||
}
|
||||
}
|
||||
|
||||
// Should be limited to 1000
|
||||
expect(store.historyAssets).toHaveLength(1000)
|
||||
|
||||
// All items should be unique (Set cleanup works)
|
||||
const assetIds = store.historyAssets.map((a) => a.id)
|
||||
const uniqueAssetIds = new Set(assetIds)
|
||||
expect(uniqueAssetIds.size).toBe(1000)
|
||||
})
|
||||
|
||||
it('should maintain correct state after cleanup', async () => {
|
||||
// Load items beyond limit
|
||||
for (let batch = 0; batch < 6; batch++) {
|
||||
const items = Array.from({ length: 200 }, (_, i) =>
|
||||
createMockHistoryItem(batch * 200 + i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValueOnce({
|
||||
History: items
|
||||
})
|
||||
|
||||
if (batch === 0) {
|
||||
await store.updateHistory()
|
||||
} else {
|
||||
await store.loadMoreHistory()
|
||||
}
|
||||
}
|
||||
|
||||
expect(store.historyAssets).toHaveLength(1000)
|
||||
|
||||
// Should still maintain sorting
|
||||
for (let i = 1; i < store.historyAssets.length; i++) {
|
||||
const prevDate = new Date(store.historyAssets[i - 1].created_at)
|
||||
const currDate = new Date(store.historyAssets[i].created_at)
|
||||
expect(prevDate.getTime()).toBeGreaterThanOrEqual(currDate.getTime())
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
describe('jobDetailView Support', () => {
|
||||
it('should include outputCount and allOutputs in user_metadata', async () => {
|
||||
const mockHistory = Array.from({ length: 5 }, (_, i) =>
|
||||
createMockHistoryItem(i)
|
||||
)
|
||||
vi.mocked(api.getHistory).mockResolvedValue({
|
||||
History: mockHistory
|
||||
})
|
||||
|
||||
await store.updateHistory()
|
||||
|
||||
// Check first asset
|
||||
const asset = store.historyAssets[0]
|
||||
expect(asset.user_metadata).toBeDefined()
|
||||
expect(asset.user_metadata).toHaveProperty('outputCount')
|
||||
expect(asset.user_metadata).toHaveProperty('allOutputs')
|
||||
expect(Array.isArray(asset.user_metadata!.allOutputs)).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
259
src/stores/comfyRegistryStore.test.ts
Normal file
259
src/stores/comfyRegistryStore.test.ts
Normal file
@@ -0,0 +1,259 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ref } from 'vue'
|
||||
|
||||
import { useComfyRegistryService } from '@/services/comfyRegistryService'
|
||||
import { useComfyRegistryStore } from '@/stores/comfyRegistryStore'
|
||||
import type { components, operations } from '@/types/comfyRegistryTypes'
|
||||
|
||||
vi.mock('@/services/comfyRegistryService', () => ({
|
||||
useComfyRegistryService: vi.fn()
|
||||
}))
|
||||
|
||||
const mockNodePack: components['schemas']['Node'] = {
|
||||
id: 'test-pack-id',
|
||||
name: 'Test Pack',
|
||||
description: 'A test node pack',
|
||||
downloads: 1000,
|
||||
publisher: {
|
||||
id: 'test-publisher',
|
||||
name: 'Test Publisher'
|
||||
},
|
||||
latest_version: {
|
||||
id: 'test-version',
|
||||
version: '1.0.0',
|
||||
createdAt: '2023-01-01T00:00:00Z'
|
||||
}
|
||||
}
|
||||
|
||||
const mockNodePack2: components['schemas']['Node'] = {
|
||||
id: 'test-pack-id-2',
|
||||
name: 'Test Pack 2',
|
||||
description: 'A second test node pack',
|
||||
downloads: 1000,
|
||||
publisher: {
|
||||
id: 'test-publisher',
|
||||
name: 'Test Publisher'
|
||||
},
|
||||
latest_version: {
|
||||
id: 'test-version',
|
||||
version: '1.0.0',
|
||||
createdAt: '2023-01-01T00:00:00Z'
|
||||
}
|
||||
}
|
||||
|
||||
const mockNodePack3: components['schemas']['Node'] = {
|
||||
id: 'test-pack-id-3',
|
||||
name: 'Test Pack 3',
|
||||
description: 'A third test node pack',
|
||||
downloads: 1000,
|
||||
publisher: {
|
||||
id: 'test-publisher',
|
||||
name: 'Test Publisher'
|
||||
},
|
||||
latest_version: {
|
||||
id: 'test-version',
|
||||
version: '1.0.0',
|
||||
createdAt: '2023-01-01T00:00:00Z'
|
||||
}
|
||||
}
|
||||
|
||||
const mockListResult: operations['listAllNodes']['responses'][200]['content']['application/json'] =
|
||||
{
|
||||
nodes: [mockNodePack],
|
||||
total: 1,
|
||||
page: 1,
|
||||
limit: 10
|
||||
}
|
||||
|
||||
describe('useComfyRegistryStore', () => {
|
||||
let mockRegistryService: {
|
||||
isLoading: ReturnType<typeof ref<boolean>>
|
||||
error: ReturnType<typeof ref<string | null>>
|
||||
listAllPacks: ReturnType<typeof vi.fn>
|
||||
getPackById: ReturnType<typeof vi.fn>
|
||||
inferPackFromNodeName: ReturnType<typeof vi.fn>
|
||||
search: ReturnType<typeof vi.fn>
|
||||
getPackVersions: ReturnType<typeof vi.fn>
|
||||
getPackByVersion: ReturnType<typeof vi.fn>
|
||||
getPublisherById: ReturnType<typeof vi.fn>
|
||||
listPacksForPublisher: ReturnType<typeof vi.fn>
|
||||
getNodeDefs: ReturnType<typeof vi.fn>
|
||||
postPackReview: ReturnType<typeof vi.fn>
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
vi.clearAllMocks()
|
||||
mockRegistryService = {
|
||||
isLoading: ref(false),
|
||||
error: ref(null),
|
||||
listAllPacks: vi.fn().mockImplementation((params) => {
|
||||
// If node_id is provided, return specific nodes
|
||||
if (params.node_id) {
|
||||
return Promise.resolve({
|
||||
nodes: params.node_id
|
||||
.map((id: string) => {
|
||||
switch (id) {
|
||||
case 'test-pack-id':
|
||||
return mockNodePack
|
||||
case 'test-pack-id-2':
|
||||
return mockNodePack2
|
||||
case 'test-pack-id-3':
|
||||
return mockNodePack3
|
||||
default:
|
||||
return null
|
||||
}
|
||||
})
|
||||
.filter(Boolean),
|
||||
total: params.node_id.length,
|
||||
page: 1,
|
||||
limit: 10
|
||||
})
|
||||
}
|
||||
// Otherwise return paginated results
|
||||
return Promise.resolve(mockListResult)
|
||||
}),
|
||||
getPackById: vi.fn().mockResolvedValue(mockNodePack),
|
||||
inferPackFromNodeName: vi.fn().mockResolvedValue(mockNodePack),
|
||||
search: vi.fn().mockResolvedValue(mockListResult),
|
||||
getPackVersions: vi.fn().mockResolvedValue([]),
|
||||
getPackByVersion: vi.fn().mockResolvedValue({}),
|
||||
getPublisherById: vi.fn().mockResolvedValue({}),
|
||||
listPacksForPublisher: vi.fn().mockResolvedValue([]),
|
||||
getNodeDefs: vi.fn().mockResolvedValue({}),
|
||||
postPackReview: vi.fn().mockResolvedValue({})
|
||||
}
|
||||
|
||||
vi.mocked(useComfyRegistryService).mockReturnValue(
|
||||
mockRegistryService as any
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
useComfyRegistryStore().clearCache()
|
||||
})
|
||||
|
||||
it('should fetch and store packs', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
const params = { page: 1, limit: 10 }
|
||||
|
||||
const result = await store.listAllPacks.call(params)
|
||||
|
||||
expect(result).toEqual(mockListResult)
|
||||
expect(mockRegistryService.listAllPacks).toHaveBeenCalledWith(
|
||||
params,
|
||||
expect.any(Object) // abort signal
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle empty nodes array in response', async () => {
|
||||
const emptyResult = {
|
||||
nodes: undefined,
|
||||
total: 0,
|
||||
page: 1,
|
||||
limit: 10
|
||||
}
|
||||
mockRegistryService.listAllPacks.mockResolvedValueOnce(emptyResult)
|
||||
|
||||
const store = useComfyRegistryStore()
|
||||
const result = await store.listAllPacks.call({ page: 1, limit: 10 })
|
||||
|
||||
expect(result).toEqual(emptyResult)
|
||||
})
|
||||
|
||||
it('should fetch a pack by ID', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
const packId = 'test-pack-id'
|
||||
|
||||
const result = await store.getPackById.call(packId)
|
||||
|
||||
expect(result).toEqual(mockNodePack)
|
||||
expect(mockRegistryService.getPackById).toHaveBeenCalledWith(packId)
|
||||
})
|
||||
|
||||
it('should return null when fetching a pack with null ID', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
vi.spyOn(store.getPackById, 'call').mockResolvedValueOnce(null)
|
||||
|
||||
const result = await store.getPackById.call(null as any)
|
||||
|
||||
expect(result).toBeNull()
|
||||
expect(mockRegistryService.getPackById).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle service errors gracefully', async () => {
|
||||
mockRegistryService.listAllPacks.mockResolvedValueOnce(null)
|
||||
|
||||
const store = useComfyRegistryStore()
|
||||
const result = await store.listAllPacks.call({ page: 1, limit: 10 })
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should fetch packs by IDs', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
const packIds = ['test-pack-id', 'test-pack-id-2', 'test-pack-id-3']
|
||||
const result = await store.getPacksByIds.call(packIds)
|
||||
|
||||
expect(result).toEqual([mockNodePack, mockNodePack2, mockNodePack3])
|
||||
expect(mockRegistryService.listAllPacks).toHaveBeenCalledWith(
|
||||
{ node_id: packIds },
|
||||
expect.any(Object) // abort signal
|
||||
)
|
||||
})
|
||||
|
||||
describe('inferPackFromNodeName', () => {
|
||||
it('should fetch a pack by comfy node name', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
const nodeName = 'KSampler'
|
||||
|
||||
const result = await store.inferPackFromNodeName.call(nodeName)
|
||||
|
||||
expect(result).toEqual(mockNodePack)
|
||||
expect(mockRegistryService.inferPackFromNodeName).toHaveBeenCalledWith(
|
||||
nodeName,
|
||||
expect.any(Object) // abort signal
|
||||
)
|
||||
})
|
||||
|
||||
it('should cache results', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
const nodeName = 'KSampler'
|
||||
|
||||
// First call
|
||||
const result1 = await store.inferPackFromNodeName.call(nodeName)
|
||||
expect(mockRegistryService.inferPackFromNodeName).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Second call - should use cache
|
||||
const result2 = await store.inferPackFromNodeName.call(nodeName)
|
||||
expect(mockRegistryService.inferPackFromNodeName).toHaveBeenCalledTimes(1)
|
||||
expect(result2).toEqual(result1)
|
||||
})
|
||||
|
||||
it('should handle null results when node is not found', async () => {
|
||||
mockRegistryService.inferPackFromNodeName.mockResolvedValueOnce(null)
|
||||
|
||||
const store = useComfyRegistryStore()
|
||||
const result = await store.inferPackFromNodeName.call('NonExistentNode')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should clear cache when clearCache is called', async () => {
|
||||
const store = useComfyRegistryStore()
|
||||
const nodeName = 'KSampler'
|
||||
|
||||
// First call to populate cache
|
||||
await store.inferPackFromNodeName.call(nodeName)
|
||||
expect(mockRegistryService.inferPackFromNodeName).toHaveBeenCalledTimes(1)
|
||||
|
||||
// Clear cache
|
||||
store.clearCache()
|
||||
|
||||
// Call again - should hit the service again
|
||||
await store.inferPackFromNodeName.call(nodeName)
|
||||
expect(mockRegistryService.inferPackFromNodeName).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
229
src/stores/dialogStore.test.ts
Normal file
229
src/stores/dialogStore.test.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
import { defineComponent } from 'vue'
|
||||
|
||||
import { useDialogStore } from '@/stores/dialogStore'
|
||||
|
||||
const MockComponent = defineComponent({
|
||||
name: 'MockComponent',
|
||||
template: '<div>Mock</div>'
|
||||
})
|
||||
|
||||
describe('dialogStore', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
})
|
||||
|
||||
describe('priority system', () => {
|
||||
it('should create dialogs in correct priority order', () => {
|
||||
const store = useDialogStore()
|
||||
|
||||
// Create dialogs with different priorities
|
||||
store.showDialog({
|
||||
key: 'low-priority',
|
||||
component: MockComponent,
|
||||
priority: 0
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'high-priority',
|
||||
component: MockComponent,
|
||||
priority: 10
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'medium-priority',
|
||||
component: MockComponent,
|
||||
priority: 5
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'no-priority',
|
||||
component: MockComponent
|
||||
})
|
||||
|
||||
// Check order: high (2) -> medium (1) -> low (0)
|
||||
expect(store.dialogStack.map((d) => d.key)).toEqual([
|
||||
'high-priority',
|
||||
'medium-priority',
|
||||
'no-priority',
|
||||
'low-priority'
|
||||
])
|
||||
})
|
||||
|
||||
it('should maintain priority order when rising dialogs', () => {
|
||||
const store = useDialogStore()
|
||||
|
||||
// Create dialogs with different priorities
|
||||
store.showDialog({
|
||||
key: 'priority-2',
|
||||
component: MockComponent,
|
||||
priority: 2
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'priority-1',
|
||||
component: MockComponent,
|
||||
priority: 1
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'priority-0',
|
||||
component: MockComponent,
|
||||
priority: 0
|
||||
})
|
||||
|
||||
// Try to rise the lowest priority dialog
|
||||
store.riseDialog({ key: 'priority-0' })
|
||||
|
||||
// Should still be at the bottom because of its priority
|
||||
expect(store.dialogStack.map((d) => d.key)).toEqual([
|
||||
'priority-2',
|
||||
'priority-1',
|
||||
'priority-0'
|
||||
])
|
||||
|
||||
// Rise the medium priority dialog
|
||||
store.riseDialog({ key: 'priority-1' })
|
||||
|
||||
// Should be above priority-0 but below priority-2
|
||||
expect(store.dialogStack.map((d) => d.key)).toEqual([
|
||||
'priority-2',
|
||||
'priority-1',
|
||||
'priority-0'
|
||||
])
|
||||
})
|
||||
|
||||
it('should keep high priority dialogs on top when creating new lower priority dialogs', () => {
|
||||
const store = useDialogStore()
|
||||
|
||||
// Create a high priority dialog (like manager progress)
|
||||
store.showDialog({
|
||||
key: 'manager-progress',
|
||||
component: MockComponent,
|
||||
priority: 10
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'dialog-2',
|
||||
component: MockComponent,
|
||||
priority: 0
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'dialog-3',
|
||||
component: MockComponent
|
||||
// Default priority is 1
|
||||
})
|
||||
|
||||
// Manager progress should still be on top
|
||||
expect(store.dialogStack[0].key).toBe('manager-progress')
|
||||
|
||||
// Check full order
|
||||
expect(store.dialogStack.map((d) => d.key)).toEqual([
|
||||
'manager-progress', // priority 2
|
||||
'dialog-3', // priority 1 (default)
|
||||
'dialog-2' // priority 0
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('basic dialog operations', () => {
|
||||
it('should show and close dialogs', () => {
|
||||
const store = useDialogStore()
|
||||
|
||||
store.showDialog({
|
||||
key: 'test-dialog',
|
||||
component: MockComponent
|
||||
})
|
||||
|
||||
expect(store.dialogStack).toHaveLength(1)
|
||||
expect(store.isDialogOpen('test-dialog')).toBe(true)
|
||||
|
||||
store.closeDialog({ key: 'test-dialog' })
|
||||
|
||||
expect(store.dialogStack).toHaveLength(0)
|
||||
expect(store.isDialogOpen('test-dialog')).toBe(false)
|
||||
})
|
||||
|
||||
it('should reuse existing dialog when showing with same key', () => {
|
||||
const store = useDialogStore()
|
||||
|
||||
store.showDialog({
|
||||
key: 'reusable-dialog',
|
||||
component: MockComponent,
|
||||
title: 'Original Title'
|
||||
})
|
||||
|
||||
// First call should create the dialog
|
||||
expect(store.dialogStack).toHaveLength(1)
|
||||
expect(store.dialogStack[0].title).toBe('Original Title')
|
||||
|
||||
// Second call with same key should reuse the dialog
|
||||
store.showDialog({
|
||||
key: 'reusable-dialog',
|
||||
component: MockComponent,
|
||||
title: 'New Title' // This should be ignored
|
||||
})
|
||||
|
||||
// Should still have only one dialog with original title
|
||||
expect(store.dialogStack).toHaveLength(1)
|
||||
expect(store.dialogStack[0].key).toBe('reusable-dialog')
|
||||
expect(store.dialogStack[0].title).toBe('Original Title')
|
||||
})
|
||||
})
|
||||
|
||||
describe('ESC key behavior with multiple dialogs', () => {
|
||||
it('should only allow the active dialog to close with ESC key', () => {
|
||||
const store = useDialogStore()
|
||||
|
||||
// Create dialogs with different priorities
|
||||
store.showDialog({
|
||||
key: 'dialog-1',
|
||||
component: MockComponent,
|
||||
priority: 1
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'dialog-2',
|
||||
component: MockComponent,
|
||||
priority: 2
|
||||
})
|
||||
|
||||
store.showDialog({
|
||||
key: 'dialog-3',
|
||||
component: MockComponent,
|
||||
priority: 3
|
||||
})
|
||||
|
||||
// Only the active dialog should be closable with ESC
|
||||
const activeDialog = store.dialogStack.find(
|
||||
(d) => d.key === store.activeKey
|
||||
)
|
||||
const inactiveDialogs = store.dialogStack.filter(
|
||||
(d) => d.key !== store.activeKey
|
||||
)
|
||||
|
||||
expect(activeDialog?.dialogComponentProps.closeOnEscape).toBe(true)
|
||||
inactiveDialogs.forEach((dialog) => {
|
||||
expect(dialog.dialogComponentProps.closeOnEscape).toBe(false)
|
||||
})
|
||||
|
||||
// Close the active dialog
|
||||
store.closeDialog({ key: store.activeKey! })
|
||||
|
||||
// The new active dialog should now be closable with ESC
|
||||
const newActiveDialog = store.dialogStack.find(
|
||||
(d) => d.key === store.activeKey
|
||||
)
|
||||
const newInactiveDialogs = store.dialogStack.filter(
|
||||
(d) => d.key !== store.activeKey
|
||||
)
|
||||
|
||||
expect(newActiveDialog?.dialogComponentProps.closeOnEscape).toBe(true)
|
||||
newInactiveDialogs.forEach((dialog) => {
|
||||
expect(dialog.dialogComponentProps.closeOnEscape).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
151
src/stores/domWidgetStore.test.ts
Normal file
151
src/stores/domWidgetStore.test.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
|
||||
import { useDomWidgetStore } from '@/stores/domWidgetStore'
|
||||
|
||||
// Mock DOM widget for testing
|
||||
const createMockDOMWidget = (id: string) => {
|
||||
const element = document.createElement('input')
|
||||
return {
|
||||
id,
|
||||
element,
|
||||
node: {
|
||||
id: 'node-1',
|
||||
title: 'Test Node',
|
||||
pos: [0, 0],
|
||||
size: [200, 100]
|
||||
} as any,
|
||||
name: 'test_widget',
|
||||
type: 'text',
|
||||
value: 'test',
|
||||
options: {},
|
||||
y: 0,
|
||||
margin: 10,
|
||||
isVisible: () => true,
|
||||
containerNode: undefined as any
|
||||
}
|
||||
}
|
||||
|
||||
describe('domWidgetStore', () => {
|
||||
let store: ReturnType<typeof useDomWidgetStore>
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useDomWidgetStore()
|
||||
})
|
||||
|
||||
describe('widget registration', () => {
|
||||
it('should register a widget with default state', () => {
|
||||
const widget = createMockDOMWidget('widget-1')
|
||||
|
||||
store.registerWidget(widget)
|
||||
|
||||
expect(store.widgetStates.has('widget-1')).toBe(true)
|
||||
const state = store.widgetStates.get('widget-1')
|
||||
expect(state).toBeDefined()
|
||||
expect(state!.widget).toBe(widget)
|
||||
expect(state!.visible).toBe(true)
|
||||
expect(state!.active).toBe(true)
|
||||
expect(state!.readonly).toBe(false)
|
||||
expect(state!.zIndex).toBe(0)
|
||||
expect(state!.pos).toEqual([0, 0])
|
||||
expect(state!.size).toEqual([0, 0])
|
||||
})
|
||||
|
||||
it('should not register the same widget twice', () => {
|
||||
const widget = createMockDOMWidget('widget-1')
|
||||
|
||||
store.registerWidget(widget)
|
||||
store.registerWidget(widget)
|
||||
|
||||
// Should still only have one entry
|
||||
const states = Array.from(store.widgetStates.values())
|
||||
expect(states.length).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('widget unregistration', () => {
|
||||
it('should unregister a widget by id', () => {
|
||||
const widget = createMockDOMWidget('widget-1')
|
||||
|
||||
store.registerWidget(widget)
|
||||
expect(store.widgetStates.has('widget-1')).toBe(true)
|
||||
|
||||
store.unregisterWidget('widget-1')
|
||||
expect(store.widgetStates.has('widget-1')).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle unregistering non-existent widget gracefully', () => {
|
||||
// Should not throw
|
||||
expect(() => {
|
||||
store.unregisterWidget('non-existent')
|
||||
}).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('widget state management', () => {
|
||||
it('should activate a widget', () => {
|
||||
const widget = createMockDOMWidget('widget-1')
|
||||
store.registerWidget(widget)
|
||||
|
||||
// Set to inactive first
|
||||
const state = store.widgetStates.get('widget-1')!
|
||||
state.active = false
|
||||
|
||||
store.activateWidget('widget-1')
|
||||
expect(state.active).toBe(true)
|
||||
})
|
||||
|
||||
it('should deactivate a widget', () => {
|
||||
const widget = createMockDOMWidget('widget-1')
|
||||
store.registerWidget(widget)
|
||||
|
||||
store.deactivateWidget('widget-1')
|
||||
const state = store.widgetStates.get('widget-1')
|
||||
expect(state!.active).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle activating non-existent widget gracefully', () => {
|
||||
expect(() => {
|
||||
store.activateWidget('non-existent')
|
||||
}).not.toThrow()
|
||||
})
|
||||
})
|
||||
|
||||
describe('computed states', () => {
|
||||
it('should separate active and inactive widget states', () => {
|
||||
const widget1 = createMockDOMWidget('widget-1')
|
||||
const widget2 = createMockDOMWidget('widget-2')
|
||||
|
||||
store.registerWidget(widget1)
|
||||
store.registerWidget(widget2)
|
||||
|
||||
// Deactivate widget2
|
||||
store.deactivateWidget('widget-2')
|
||||
|
||||
expect(store.activeWidgetStates.length).toBe(1)
|
||||
expect(store.activeWidgetStates[0].widget.id).toBe('widget-1')
|
||||
|
||||
expect(store.inactiveWidgetStates.length).toBe(1)
|
||||
expect(store.inactiveWidgetStates[0].widget.id).toBe('widget-2')
|
||||
})
|
||||
})
|
||||
|
||||
describe('clear functionality', () => {
|
||||
it('should clear all widget states', () => {
|
||||
const widget1 = createMockDOMWidget('widget-1')
|
||||
const widget2 = createMockDOMWidget('widget-2')
|
||||
|
||||
store.registerWidget(widget1)
|
||||
store.registerWidget(widget2)
|
||||
|
||||
expect(store.widgetStates.size).toBe(2)
|
||||
|
||||
store.clear()
|
||||
|
||||
expect(store.widgetStates.size).toBe(0)
|
||||
expect(store.activeWidgetStates.length).toBe(0)
|
||||
expect(store.inactiveWidgetStates.length).toBe(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
301
src/stores/executionStore.test.ts
Normal file
301
src/stores/executionStore.test.ts
Normal file
@@ -0,0 +1,301 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { app } from '@/scripts/app'
|
||||
import { useExecutionStore } from '@/stores/executionStore'
|
||||
|
||||
// Create mock functions that will be shared
|
||||
const mockNodeExecutionIdToNodeLocatorId = vi.fn()
|
||||
const mockNodeIdToNodeLocatorId = vi.fn()
|
||||
const mockNodeLocatorIdToNodeExecutionId = vi.fn()
|
||||
|
||||
import type * as WorkflowStoreModule from '@/platform/workflow/management/stores/workflowStore'
|
||||
|
||||
// Mock the workflowStore
|
||||
vi.mock('@/platform/workflow/management/stores/workflowStore', async () => {
|
||||
const { ComfyWorkflow } = await vi.importActual<typeof WorkflowStoreModule>(
|
||||
'@/platform/workflow/management/stores/workflowStore'
|
||||
)
|
||||
return {
|
||||
ComfyWorkflow,
|
||||
useWorkflowStore: vi.fn(() => ({
|
||||
nodeExecutionIdToNodeLocatorId: mockNodeExecutionIdToNodeLocatorId,
|
||||
nodeIdToNodeLocatorId: mockNodeIdToNodeLocatorId,
|
||||
nodeLocatorIdToNodeExecutionId: mockNodeLocatorIdToNodeExecutionId
|
||||
}))
|
||||
}
|
||||
})
|
||||
|
||||
// Remove any previous global types
|
||||
declare global {
|
||||
interface Window {}
|
||||
}
|
||||
|
||||
vi.mock('@/composables/node/useNodeProgressText', () => ({
|
||||
useNodeProgressText: () => ({
|
||||
showTextPreview: vi.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
// Mock the app import with proper implementation
|
||||
vi.mock('@/scripts/app', () => ({
|
||||
app: {
|
||||
rootGraph: {
|
||||
getNodeById: vi.fn(),
|
||||
nodes: [] // Add nodes array for workflowStore iteration
|
||||
},
|
||||
revokePreviews: vi.fn(),
|
||||
nodePreviewImages: {}
|
||||
}
|
||||
}))
|
||||
|
||||
describe('useExecutionStore - NodeLocatorId conversions', () => {
|
||||
let store: ReturnType<typeof useExecutionStore>
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
// Reset mock implementations
|
||||
mockNodeExecutionIdToNodeLocatorId.mockReset()
|
||||
mockNodeIdToNodeLocatorId.mockReset()
|
||||
mockNodeLocatorIdToNodeExecutionId.mockReset()
|
||||
|
||||
setActivePinia(createPinia())
|
||||
store = useExecutionStore()
|
||||
})
|
||||
|
||||
describe('executionIdToNodeLocatorId', () => {
|
||||
it('should convert execution ID to NodeLocatorId', () => {
|
||||
// Mock subgraph structure
|
||||
const mockSubgraph = {
|
||||
id: 'a1b2c3d4-e5f6-7890-abcd-ef1234567890',
|
||||
nodes: []
|
||||
}
|
||||
|
||||
const mockNode = {
|
||||
id: 123,
|
||||
isSubgraphNode: () => true,
|
||||
subgraph: mockSubgraph
|
||||
} as any
|
||||
|
||||
// Mock app.rootGraph.getNodeById to return the mock node
|
||||
vi.mocked(app.rootGraph.getNodeById).mockReturnValue(mockNode)
|
||||
|
||||
const result = store.executionIdToNodeLocatorId('123:456')
|
||||
|
||||
expect(result).toBe('a1b2c3d4-e5f6-7890-abcd-ef1234567890:456')
|
||||
})
|
||||
|
||||
it('should convert simple node ID to NodeLocatorId', () => {
|
||||
const result = store.executionIdToNodeLocatorId('123')
|
||||
|
||||
// For simple node IDs, it should return the ID as-is
|
||||
expect(result).toBe('123')
|
||||
})
|
||||
|
||||
it('should handle numeric node IDs', () => {
|
||||
const result = store.executionIdToNodeLocatorId(123)
|
||||
|
||||
// For numeric IDs, it should convert to string and return as-is
|
||||
expect(result).toBe('123')
|
||||
})
|
||||
|
||||
it('should return undefined when conversion fails', () => {
|
||||
// Mock app.rootGraph.getNodeById to return null (node not found)
|
||||
vi.mocked(app.rootGraph.getNodeById).mockReturnValue(null)
|
||||
|
||||
expect(store.executionIdToNodeLocatorId('999:456')).toBe(undefined)
|
||||
})
|
||||
})
|
||||
|
||||
describe('nodeLocatorIdToExecutionId', () => {
|
||||
it('should convert NodeLocatorId to execution ID', () => {
|
||||
const mockExecutionId = '123:456'
|
||||
mockNodeLocatorIdToNodeExecutionId.mockReturnValue(mockExecutionId)
|
||||
|
||||
const result = store.nodeLocatorIdToExecutionId(
|
||||
'a1b2c3d4-e5f6-7890-abcd-ef1234567890:456'
|
||||
)
|
||||
|
||||
expect(mockNodeLocatorIdToNodeExecutionId).toHaveBeenCalledWith(
|
||||
'a1b2c3d4-e5f6-7890-abcd-ef1234567890:456'
|
||||
)
|
||||
expect(result).toBe(mockExecutionId)
|
||||
})
|
||||
|
||||
it('should return null when conversion fails', () => {
|
||||
mockNodeLocatorIdToNodeExecutionId.mockReturnValue(null)
|
||||
|
||||
const result = store.nodeLocatorIdToExecutionId('invalid:format')
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useExecutionStore - Node Error Lookups', () => {
|
||||
let store: ReturnType<typeof useExecutionStore>
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
setActivePinia(createPinia())
|
||||
store = useExecutionStore()
|
||||
})
|
||||
|
||||
describe('getNodeErrors', () => {
|
||||
it('should return undefined when no errors exist', () => {
|
||||
const result = store.getNodeErrors('123')
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return node error by locator ID for root graph node', () => {
|
||||
store.lastNodeErrors = {
|
||||
'123': {
|
||||
errors: [
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'Invalid input',
|
||||
details: 'Width must be positive',
|
||||
extra_info: { input_name: 'width' }
|
||||
}
|
||||
],
|
||||
class_type: 'TestNode',
|
||||
dependent_outputs: []
|
||||
}
|
||||
}
|
||||
|
||||
const result = store.getNodeErrors('123')
|
||||
expect(result).toBeDefined()
|
||||
expect(result?.errors).toHaveLength(1)
|
||||
expect(result?.errors[0].message).toBe('Invalid input')
|
||||
})
|
||||
|
||||
it('should return node error by locator ID for subgraph node', () => {
|
||||
const subgraphUuid = 'a1b2c3d4-e5f6-7890-abcd-ef1234567890'
|
||||
const mockSubgraph = {
|
||||
id: subgraphUuid,
|
||||
getNodeById: vi.fn(),
|
||||
nodes: []
|
||||
}
|
||||
|
||||
const mockNode = {
|
||||
id: 123,
|
||||
isSubgraphNode: () => true,
|
||||
subgraph: mockSubgraph
|
||||
} as any
|
||||
|
||||
vi.mocked(app.rootGraph.getNodeById).mockReturnValue(mockNode)
|
||||
|
||||
store.lastNodeErrors = {
|
||||
'123:456': {
|
||||
errors: [
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'Invalid subgraph input',
|
||||
details: 'Missing required input',
|
||||
extra_info: { input_name: 'image' }
|
||||
}
|
||||
],
|
||||
class_type: 'SubgraphNode',
|
||||
dependent_outputs: []
|
||||
}
|
||||
}
|
||||
|
||||
const locatorId = `${subgraphUuid}:456`
|
||||
const result = store.getNodeErrors(locatorId)
|
||||
expect(result).toBeDefined()
|
||||
expect(result?.errors[0].message).toBe('Invalid subgraph input')
|
||||
})
|
||||
})
|
||||
|
||||
describe('slotHasError', () => {
|
||||
it('should return false when node has no errors', () => {
|
||||
const result = store.slotHasError('123', 'width')
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when node has errors but slot is not mentioned', () => {
|
||||
store.lastNodeErrors = {
|
||||
'123': {
|
||||
errors: [
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'Invalid input',
|
||||
details: 'Width must be positive',
|
||||
extra_info: { input_name: 'width' }
|
||||
}
|
||||
],
|
||||
class_type: 'TestNode',
|
||||
dependent_outputs: []
|
||||
}
|
||||
}
|
||||
|
||||
const result = store.slotHasError('123', 'height')
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return true when slot has error', () => {
|
||||
store.lastNodeErrors = {
|
||||
'123': {
|
||||
errors: [
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'Invalid input',
|
||||
details: 'Width must be positive',
|
||||
extra_info: { input_name: 'width' }
|
||||
}
|
||||
],
|
||||
class_type: 'TestNode',
|
||||
dependent_outputs: []
|
||||
}
|
||||
}
|
||||
|
||||
const result = store.slotHasError('123', 'width')
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should return true when multiple errors exist for the same slot', () => {
|
||||
store.lastNodeErrors = {
|
||||
'123': {
|
||||
errors: [
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'Invalid input',
|
||||
details: 'Width must be positive',
|
||||
extra_info: { input_name: 'width' }
|
||||
},
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'Invalid range',
|
||||
details: 'Width must be less than 1000',
|
||||
extra_info: { input_name: 'width' }
|
||||
}
|
||||
],
|
||||
class_type: 'TestNode',
|
||||
dependent_outputs: []
|
||||
}
|
||||
}
|
||||
|
||||
const result = store.slotHasError('123', 'width')
|
||||
expect(result).toBe(true)
|
||||
})
|
||||
|
||||
it('should handle errors without extra_info', () => {
|
||||
store.lastNodeErrors = {
|
||||
'123': {
|
||||
errors: [
|
||||
{
|
||||
type: 'validation_error',
|
||||
message: 'General error',
|
||||
details: 'Something went wrong'
|
||||
}
|
||||
],
|
||||
class_type: 'TestNode',
|
||||
dependent_outputs: []
|
||||
}
|
||||
}
|
||||
|
||||
const result = store.slotHasError('123', 'width')
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
618
src/stores/firebaseAuthStore.test.ts
Normal file
618
src/stores/firebaseAuthStore.test.ts
Normal file
@@ -0,0 +1,618 @@
|
||||
import { FirebaseError } from 'firebase/app'
|
||||
import * as firebaseAuth from 'firebase/auth'
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import * as vuefire from 'vuefire'
|
||||
|
||||
import { useDialogService } from '@/services/dialogService'
|
||||
import { useFirebaseAuthStore } from '@/stores/firebaseAuthStore'
|
||||
|
||||
// Mock fetch
|
||||
const mockFetch = vi.fn()
|
||||
vi.stubGlobal('fetch', mockFetch)
|
||||
|
||||
// Mock successful API responses
|
||||
const mockCreateCustomerResponse = {
|
||||
ok: true,
|
||||
statusText: 'OK',
|
||||
json: () => Promise.resolve({ id: 'test-customer-id' })
|
||||
}
|
||||
|
||||
const mockFetchBalanceResponse = {
|
||||
ok: true,
|
||||
json: () => Promise.resolve({ balance: 0 })
|
||||
}
|
||||
|
||||
const mockAddCreditsResponse = {
|
||||
ok: true,
|
||||
statusText: 'OK'
|
||||
}
|
||||
|
||||
const mockAccessBillingPortalResponse = {
|
||||
ok: true,
|
||||
statusText: 'OK',
|
||||
json: () =>
|
||||
Promise.resolve({ billing_portal_url: 'https://billing.stripe.com/test' })
|
||||
}
|
||||
|
||||
vi.mock('vuefire', () => ({
|
||||
useFirebaseAuth: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('vue-i18n', () => ({
|
||||
useI18n: () => ({
|
||||
t: (key: string) => key
|
||||
}),
|
||||
createI18n: () => ({
|
||||
global: {
|
||||
t: (key: string) => key
|
||||
}
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('firebase/auth', async (importOriginal) => {
|
||||
const actual = await importOriginal<typeof firebaseAuth>()
|
||||
return {
|
||||
...actual,
|
||||
signInWithEmailAndPassword: vi.fn(),
|
||||
createUserWithEmailAndPassword: vi.fn(),
|
||||
signOut: vi.fn(),
|
||||
onAuthStateChanged: vi.fn(),
|
||||
onIdTokenChanged: vi.fn(),
|
||||
signInWithPopup: vi.fn(),
|
||||
GoogleAuthProvider: class {
|
||||
addScope = vi.fn()
|
||||
setCustomParameters = vi.fn()
|
||||
},
|
||||
GithubAuthProvider: class {
|
||||
addScope = vi.fn()
|
||||
setCustomParameters = vi.fn()
|
||||
},
|
||||
setPersistence: vi.fn().mockResolvedValue(undefined)
|
||||
}
|
||||
})
|
||||
|
||||
// Mock useToastStore
|
||||
vi.mock('@/stores/toastStore', () => ({
|
||||
useToastStore: () => ({
|
||||
add: vi.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
// Mock useDialogService
|
||||
vi.mock('@/services/dialogService')
|
||||
|
||||
describe('useFirebaseAuthStore', () => {
|
||||
let store: ReturnType<typeof useFirebaseAuthStore>
|
||||
let authStateCallback: (user: any) => void
|
||||
let idTokenCallback: (user: any) => void
|
||||
|
||||
const mockAuth = {
|
||||
/* mock Auth object */
|
||||
}
|
||||
|
||||
const mockUser = {
|
||||
uid: 'test-user-id',
|
||||
email: 'test@example.com',
|
||||
getIdToken: vi.fn().mockResolvedValue('mock-id-token')
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks()
|
||||
|
||||
// Setup dialog service mock
|
||||
vi.mocked(useDialogService, { partial: true }).mockReturnValue({
|
||||
showSettingsDialog: vi.fn(),
|
||||
showErrorDialog: vi.fn()
|
||||
})
|
||||
|
||||
// Mock useFirebaseAuth to return our mock auth object
|
||||
vi.mocked(vuefire.useFirebaseAuth).mockReturnValue(mockAuth as any)
|
||||
|
||||
// Mock onAuthStateChanged to capture the callback and simulate initial auth state
|
||||
vi.mocked(firebaseAuth.onAuthStateChanged).mockImplementation(
|
||||
(_, callback) => {
|
||||
authStateCallback = callback as (user: any) => void
|
||||
// Call the callback with our mock user
|
||||
;(callback as (user: any) => void)(mockUser)
|
||||
// Return an unsubscribe function
|
||||
return vi.fn()
|
||||
}
|
||||
)
|
||||
|
||||
// Mock fetch responses
|
||||
mockFetch.mockImplementation((url: string) => {
|
||||
if (url.endsWith('/customers')) {
|
||||
return Promise.resolve(mockCreateCustomerResponse)
|
||||
}
|
||||
if (url.endsWith('/customers/balance')) {
|
||||
return Promise.resolve(mockFetchBalanceResponse)
|
||||
}
|
||||
if (url.endsWith('/customers/credit')) {
|
||||
return Promise.resolve(mockAddCreditsResponse)
|
||||
}
|
||||
if (url.endsWith('/customers/billing')) {
|
||||
return Promise.resolve(mockAccessBillingPortalResponse)
|
||||
}
|
||||
return Promise.reject(new Error('Unexpected API call'))
|
||||
})
|
||||
|
||||
// Initialize Pinia
|
||||
setActivePinia(createPinia())
|
||||
store = useFirebaseAuthStore()
|
||||
|
||||
// Reset and set up getIdToken mock
|
||||
mockUser.getIdToken.mockReset()
|
||||
mockUser.getIdToken.mockResolvedValue('mock-id-token')
|
||||
})
|
||||
|
||||
describe('token refresh events', () => {
|
||||
beforeEach(async () => {
|
||||
vi.resetModules()
|
||||
vi.doMock('@/platform/distribution/types', () => ({
|
||||
isCloud: true,
|
||||
isDesktop: true
|
||||
}))
|
||||
|
||||
vi.mocked(firebaseAuth.onIdTokenChanged).mockImplementation(
|
||||
(_auth, callback) => {
|
||||
idTokenCallback = callback as (user: any) => void
|
||||
return vi.fn()
|
||||
}
|
||||
)
|
||||
|
||||
vi.mocked(vuefire.useFirebaseAuth).mockReturnValue(mockAuth as any)
|
||||
|
||||
setActivePinia(createPinia())
|
||||
const storeModule = await import('@/stores/firebaseAuthStore')
|
||||
store = storeModule.useFirebaseAuthStore()
|
||||
})
|
||||
|
||||
it("should not increment tokenRefreshTrigger on the user's first ID token event", () => {
|
||||
idTokenCallback?.(mockUser)
|
||||
expect(store.tokenRefreshTrigger).toBe(0)
|
||||
})
|
||||
|
||||
it('should increment tokenRefreshTrigger on subsequent ID token events for the same user', () => {
|
||||
idTokenCallback?.(mockUser)
|
||||
idTokenCallback?.(mockUser)
|
||||
expect(store.tokenRefreshTrigger).toBe(1)
|
||||
})
|
||||
|
||||
it('should not increment when ID token event is for a different user UID', () => {
|
||||
const otherUser = { uid: 'other-user-id' }
|
||||
idTokenCallback?.(mockUser)
|
||||
idTokenCallback?.(otherUser)
|
||||
expect(store.tokenRefreshTrigger).toBe(0)
|
||||
})
|
||||
|
||||
it('should increment after switching to a new UID and receiving a second event for that UID', () => {
|
||||
const otherUser = { uid: 'other-user-id' }
|
||||
idTokenCallback?.(mockUser)
|
||||
idTokenCallback?.(otherUser)
|
||||
idTokenCallback?.(otherUser)
|
||||
expect(store.tokenRefreshTrigger).toBe(1)
|
||||
})
|
||||
})
|
||||
|
||||
it('should initialize with the current user', () => {
|
||||
expect(store.currentUser).toEqual(mockUser)
|
||||
expect(store.isAuthenticated).toBe(true)
|
||||
expect(store.userEmail).toBe('test@example.com')
|
||||
expect(store.userId).toBe('test-user-id')
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should set persistence to local storage on initialization', () => {
|
||||
expect(firebaseAuth.setPersistence).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
firebaseAuth.browserLocalPersistence
|
||||
)
|
||||
})
|
||||
|
||||
it('should properly clean up error state between operations', async () => {
|
||||
// First, cause an error
|
||||
const mockError = new Error('Invalid password')
|
||||
vi.mocked(firebaseAuth.signInWithEmailAndPassword).mockRejectedValueOnce(
|
||||
mockError
|
||||
)
|
||||
|
||||
try {
|
||||
await store.login('test@example.com', 'wrong-password')
|
||||
} catch (e) {
|
||||
// Error expected
|
||||
}
|
||||
|
||||
// Now, succeed on next attempt
|
||||
vi.mocked(firebaseAuth.signInWithEmailAndPassword).mockResolvedValueOnce({
|
||||
user: mockUser
|
||||
} as any)
|
||||
|
||||
await store.login('test@example.com', 'correct-password')
|
||||
})
|
||||
|
||||
describe('login', () => {
|
||||
it('should login with valid credentials', async () => {
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.signInWithEmailAndPassword).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
const result = await store.login('test@example.com', 'password')
|
||||
|
||||
expect(firebaseAuth.signInWithEmailAndPassword).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
'test@example.com',
|
||||
'password'
|
||||
)
|
||||
expect(result).toEqual(mockUserCredential)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle login errors', async () => {
|
||||
const mockError = new Error('Invalid password')
|
||||
vi.mocked(firebaseAuth.signInWithEmailAndPassword).mockRejectedValue(
|
||||
mockError
|
||||
)
|
||||
|
||||
await expect(
|
||||
store.login('test@example.com', 'wrong-password')
|
||||
).rejects.toThrow('Invalid password')
|
||||
|
||||
expect(firebaseAuth.signInWithEmailAndPassword).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
'test@example.com',
|
||||
'wrong-password'
|
||||
)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle concurrent login attempts correctly', async () => {
|
||||
// Set up multiple login promises
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.signInWithEmailAndPassword).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
const loginPromise1 = store.login('user1@example.com', 'password1')
|
||||
const loginPromise2 = store.login('user2@example.com', 'password2')
|
||||
|
||||
// Resolve both promises
|
||||
await Promise.all([loginPromise1, loginPromise2])
|
||||
|
||||
// Verify the loading state is reset
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('register', () => {
|
||||
it('should register a new user', async () => {
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.createUserWithEmailAndPassword).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
const result = await store.register('new@example.com', 'password')
|
||||
|
||||
expect(firebaseAuth.createUserWithEmailAndPassword).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
'new@example.com',
|
||||
'password'
|
||||
)
|
||||
expect(result).toEqual(mockUserCredential)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle registration errors', async () => {
|
||||
const mockError = new Error('Email already in use')
|
||||
vi.mocked(firebaseAuth.createUserWithEmailAndPassword).mockRejectedValue(
|
||||
mockError
|
||||
)
|
||||
|
||||
await expect(
|
||||
store.register('existing@example.com', 'password')
|
||||
).rejects.toThrow('Email already in use')
|
||||
|
||||
expect(firebaseAuth.createUserWithEmailAndPassword).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
'existing@example.com',
|
||||
'password'
|
||||
)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('logout', () => {
|
||||
it('should sign out the user', async () => {
|
||||
vi.mocked(firebaseAuth.signOut).mockResolvedValue(undefined)
|
||||
|
||||
await store.logout()
|
||||
|
||||
expect(firebaseAuth.signOut).toHaveBeenCalledWith(mockAuth)
|
||||
})
|
||||
|
||||
it('should handle logout errors', async () => {
|
||||
const mockError = new Error('Network error')
|
||||
vi.mocked(firebaseAuth.signOut).mockRejectedValue(mockError)
|
||||
|
||||
await expect(store.logout()).rejects.toThrow('Network error')
|
||||
|
||||
expect(firebaseAuth.signOut).toHaveBeenCalledWith(mockAuth)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getIdToken', () => {
|
||||
it('should return the user ID token', async () => {
|
||||
// FIX 2: Reset the mock and set a specific return value
|
||||
mockUser.getIdToken.mockReset()
|
||||
mockUser.getIdToken.mockResolvedValue('mock-id-token')
|
||||
|
||||
const token = await store.getIdToken()
|
||||
|
||||
expect(mockUser.getIdToken).toHaveBeenCalled()
|
||||
expect(token).toBe('mock-id-token')
|
||||
})
|
||||
|
||||
it('should return null when no user is logged in', async () => {
|
||||
// Simulate logged out state
|
||||
authStateCallback(null)
|
||||
|
||||
const token = await store.getIdToken()
|
||||
|
||||
expect(token).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return null for token after login and logout sequence', async () => {
|
||||
// Setup mock for login
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.signInWithEmailAndPassword).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
// Login
|
||||
await store.login('test@example.com', 'password')
|
||||
|
||||
// Simulate successful auth state update after login
|
||||
authStateCallback(mockUser)
|
||||
|
||||
// Verify we're logged in and can get a token
|
||||
mockUser.getIdToken.mockReset()
|
||||
mockUser.getIdToken.mockResolvedValue('mock-id-token')
|
||||
expect(await store.getIdToken()).toBe('mock-id-token')
|
||||
|
||||
// Setup mock for logout
|
||||
vi.mocked(firebaseAuth.signOut).mockResolvedValue(undefined)
|
||||
|
||||
// Logout
|
||||
await store.logout()
|
||||
|
||||
// Simulate successful auth state update after logout
|
||||
authStateCallback(null)
|
||||
|
||||
// Verify token is null after logout
|
||||
const tokenAfterLogout = await store.getIdToken()
|
||||
expect(tokenAfterLogout).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should handle network errors gracefully when offline (reproduces issue #4468)', async () => {
|
||||
// This test reproduces the issue where Firebase Auth makes network requests when offline
|
||||
// and fails without graceful error handling, causing toast error messages
|
||||
|
||||
// Simulate a user with an expired token that requires network refresh
|
||||
mockUser.getIdToken.mockReset()
|
||||
|
||||
// Mock network failure (auth/network-request-failed error from Firebase)
|
||||
const networkError = new FirebaseError(
|
||||
firebaseAuth.AuthErrorCodes.NETWORK_REQUEST_FAILED,
|
||||
'mock error'
|
||||
)
|
||||
|
||||
mockUser.getIdToken.mockRejectedValue(networkError)
|
||||
|
||||
const token = await store.getIdToken()
|
||||
expect(token).toBeUndefined() // Should return undefined instead of throwing
|
||||
})
|
||||
|
||||
it('should show error dialog when getIdToken fails with non-network error', async () => {
|
||||
// This test verifies that non-network errors trigger the error dialog
|
||||
mockUser.getIdToken.mockReset()
|
||||
|
||||
// Mock a non-network error using actual Firebase Auth error code
|
||||
const authError = new FirebaseError(
|
||||
firebaseAuth.AuthErrorCodes.USER_DISABLED,
|
||||
'User account is disabled.'
|
||||
)
|
||||
|
||||
mockUser.getIdToken.mockRejectedValue(authError)
|
||||
|
||||
// Should call the error dialog instead of throwing
|
||||
const token = await store.getIdToken()
|
||||
const dialogService = useDialogService()
|
||||
|
||||
expect(dialogService.showErrorDialog).toHaveBeenCalledWith(authError, {
|
||||
title: 'errorDialog.defaultTitle',
|
||||
reportType: 'authenticationError'
|
||||
})
|
||||
expect(token).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAuthHeader', () => {
|
||||
it('should handle network errors gracefully when getting Firebase token (reproduces issue #4468)', async () => {
|
||||
// This test reproduces the issue where getAuthHeader fails due to network errors
|
||||
// when Firebase Auth tries to refresh tokens offline
|
||||
|
||||
// Mock useApiKeyAuthStore to return null (no API key fallback)
|
||||
const mockApiKeyStore = {
|
||||
getAuthHeader: vi.fn().mockReturnValue(null)
|
||||
}
|
||||
vi.doMock('@/stores/apiKeyAuthStore', () => ({
|
||||
useApiKeyAuthStore: () => mockApiKeyStore
|
||||
}))
|
||||
|
||||
// Setup user with network error on token refresh
|
||||
mockUser.getIdToken.mockReset()
|
||||
const networkError = new FirebaseError(
|
||||
firebaseAuth.AuthErrorCodes.NETWORK_REQUEST_FAILED,
|
||||
'mock error'
|
||||
)
|
||||
mockUser.getIdToken.mockRejectedValue(networkError)
|
||||
|
||||
const authHeader = await store.getAuthHeader()
|
||||
expect(authHeader).toBeNull() // Should fallback gracefully
|
||||
})
|
||||
})
|
||||
|
||||
describe('social authentication', () => {
|
||||
describe('loginWithGoogle', () => {
|
||||
it('should sign in with Google', async () => {
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.signInWithPopup).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
const result = await store.loginWithGoogle()
|
||||
|
||||
expect(firebaseAuth.signInWithPopup).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
expect.any(firebaseAuth.GoogleAuthProvider)
|
||||
)
|
||||
expect(result).toEqual(mockUserCredential)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle Google sign in errors', async () => {
|
||||
const mockError = new Error('Google authentication failed')
|
||||
vi.mocked(firebaseAuth.signInWithPopup).mockRejectedValue(mockError)
|
||||
|
||||
await expect(store.loginWithGoogle()).rejects.toThrow(
|
||||
'Google authentication failed'
|
||||
)
|
||||
|
||||
expect(firebaseAuth.signInWithPopup).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
expect.any(firebaseAuth.GoogleAuthProvider)
|
||||
)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('loginWithGithub', () => {
|
||||
it('should sign in with Github', async () => {
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.signInWithPopup).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
const result = await store.loginWithGithub()
|
||||
|
||||
expect(firebaseAuth.signInWithPopup).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
expect.any(firebaseAuth.GithubAuthProvider)
|
||||
)
|
||||
expect(result).toEqual(mockUserCredential)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle Github sign in errors', async () => {
|
||||
const mockError = new Error('Github authentication failed')
|
||||
vi.mocked(firebaseAuth.signInWithPopup).mockRejectedValue(mockError)
|
||||
|
||||
await expect(store.loginWithGithub()).rejects.toThrow(
|
||||
'Github authentication failed'
|
||||
)
|
||||
|
||||
expect(firebaseAuth.signInWithPopup).toHaveBeenCalledWith(
|
||||
mockAuth,
|
||||
expect.any(firebaseAuth.GithubAuthProvider)
|
||||
)
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle concurrent social login attempts correctly', async () => {
|
||||
const mockUserCredential = { user: mockUser }
|
||||
vi.mocked(firebaseAuth.signInWithPopup).mockResolvedValue(
|
||||
mockUserCredential as any
|
||||
)
|
||||
|
||||
const googleLoginPromise = store.loginWithGoogle()
|
||||
const githubLoginPromise = store.loginWithGithub()
|
||||
|
||||
await Promise.all([googleLoginPromise, githubLoginPromise])
|
||||
|
||||
expect(store.loading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('accessBillingPortal', () => {
|
||||
it('should call billing endpoint without body when no targetTier provided', async () => {
|
||||
const result = await store.accessBillingPortal()
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/customers/billing'),
|
||||
expect.objectContaining({
|
||||
method: 'POST',
|
||||
headers: expect.objectContaining({
|
||||
Authorization: 'Bearer mock-id-token',
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
})
|
||||
)
|
||||
|
||||
const callArgs = mockFetch.mock.calls.find((call) =>
|
||||
(call[0] as string).endsWith('/customers/billing')
|
||||
)
|
||||
expect(callArgs?.[1]).not.toHaveProperty('body')
|
||||
expect(result).toEqual({
|
||||
billing_portal_url: 'https://billing.stripe.com/test'
|
||||
})
|
||||
})
|
||||
|
||||
it('should include target_tier in request body when targetTier provided', async () => {
|
||||
await store.accessBillingPortal('creator')
|
||||
|
||||
const callArgs = mockFetch.mock.calls.find((call) =>
|
||||
(call[0] as string).endsWith('/customers/billing')
|
||||
)
|
||||
expect(callArgs?.[1]).toHaveProperty('body')
|
||||
expect(JSON.parse(callArgs?.[1]?.body as string)).toEqual({
|
||||
target_tier: 'creator'
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle different checkout tier formats', async () => {
|
||||
const tiers = [
|
||||
'standard',
|
||||
'creator',
|
||||
'pro',
|
||||
'standard-yearly',
|
||||
'creator-yearly',
|
||||
'pro-yearly'
|
||||
] as const
|
||||
|
||||
for (const tier of tiers) {
|
||||
mockFetch.mockClear()
|
||||
await store.accessBillingPortal(tier)
|
||||
|
||||
const callArgs = mockFetch.mock.calls.find((call) =>
|
||||
(call[0] as string).endsWith('/customers/billing')
|
||||
)
|
||||
expect(JSON.parse(callArgs?.[1]?.body as string)).toEqual({
|
||||
target_tier: tier
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
it('should throw error when API returns error response', async () => {
|
||||
mockFetch.mockImplementationOnce(() =>
|
||||
Promise.resolve({
|
||||
ok: false,
|
||||
json: () => Promise.resolve({ message: 'Billing portal unavailable' })
|
||||
})
|
||||
)
|
||||
|
||||
await expect(store.accessBillingPortal()).rejects.toThrow()
|
||||
})
|
||||
})
|
||||
})
|
||||
98
src/stores/imagePreviewStore.test.ts
Normal file
98
src/stores/imagePreviewStore.test.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { LGraphNode } from '@/lib/litegraph/src/litegraph'
|
||||
import type { ExecutedWsMessage } from '@/schemas/apiSchema'
|
||||
import { app } from '@/scripts/app'
|
||||
import { useNodeOutputStore } from '@/stores/imagePreviewStore'
|
||||
import * as litegraphUtil from '@/utils/litegraphUtil'
|
||||
|
||||
vi.mock('@/utils/litegraphUtil', () => ({
|
||||
isVideoNode: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('@/scripts/app', () => ({
|
||||
app: {
|
||||
getPreviewFormatParam: vi.fn(() => '&format=test_webp')
|
||||
}
|
||||
}))
|
||||
|
||||
const createMockNode = (overrides: Partial<LGraphNode> = {}): LGraphNode =>
|
||||
({
|
||||
id: 1,
|
||||
type: 'TestNode',
|
||||
...overrides
|
||||
}) as LGraphNode
|
||||
|
||||
const createMockOutputs = (
|
||||
images?: ExecutedWsMessage['output']['images']
|
||||
): ExecutedWsMessage['output'] => ({ images })
|
||||
|
||||
describe('imagePreviewStore getPreviewParam', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
vi.clearAllMocks()
|
||||
vi.mocked(litegraphUtil.isVideoNode).mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('should return empty string if node.animatedImages is true', () => {
|
||||
const store = useNodeOutputStore()
|
||||
// @ts-expect-error `animatedImages` property is not typed
|
||||
const node = createMockNode({ animatedImages: true })
|
||||
const outputs = createMockOutputs([{ filename: 'img.png' }])
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return empty string if isVideoNode returns true', () => {
|
||||
const store = useNodeOutputStore()
|
||||
vi.mocked(litegraphUtil.isVideoNode).mockReturnValue(true)
|
||||
const node = createMockNode()
|
||||
const outputs = createMockOutputs([{ filename: 'img.png' }])
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return empty string if outputs.images is undefined', () => {
|
||||
const store = useNodeOutputStore()
|
||||
const node = createMockNode()
|
||||
const outputs: ExecutedWsMessage['output'] = {}
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return empty string if outputs.images is empty', () => {
|
||||
const store = useNodeOutputStore()
|
||||
const node = createMockNode()
|
||||
const outputs = createMockOutputs([])
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return empty string if outputs.images contains SVG images', () => {
|
||||
const store = useNodeOutputStore()
|
||||
const node = createMockNode()
|
||||
const outputs = createMockOutputs([{ filename: 'img.svg' }])
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should return format param for standard image outputs', () => {
|
||||
const store = useNodeOutputStore()
|
||||
const node = createMockNode()
|
||||
const outputs = createMockOutputs([{ filename: 'img.png' }])
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('&format=test_webp')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
it('should return format param for multiple standard images', () => {
|
||||
const store = useNodeOutputStore()
|
||||
const node = createMockNode()
|
||||
const outputs = createMockOutputs([
|
||||
{ filename: 'img1.png' },
|
||||
{ filename: 'img2.jpg' }
|
||||
])
|
||||
expect(store.getPreviewParam(node, outputs)).toBe('&format=test_webp')
|
||||
expect(vi.mocked(app).getPreviewFormatParam).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
})
|
||||
424
src/stores/keybindingStore.test.ts
Normal file
424
src/stores/keybindingStore.test.ts
Normal file
@@ -0,0 +1,424 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
|
||||
import { KeybindingImpl, useKeybindingStore } from '@/stores/keybindingStore'
|
||||
|
||||
describe('useKeybindingStore', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
})
|
||||
|
||||
it('should add and retrieve default keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'A', ctrl: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(keybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(keybinding.combo)).toEqual(keybinding)
|
||||
})
|
||||
|
||||
it('should add and retrieve user keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'B', alt: true }
|
||||
})
|
||||
|
||||
store.addUserKeybinding(keybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(keybinding.combo)).toEqual(keybinding)
|
||||
})
|
||||
|
||||
it('should get keybindings by command id', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'C', ctrl: true }
|
||||
})
|
||||
store.addDefaultKeybinding(keybinding)
|
||||
expect(store.getKeybindingsByCommandId('test.command')).toEqual([
|
||||
keybinding
|
||||
])
|
||||
})
|
||||
|
||||
it('should override default keybindings with user keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command1',
|
||||
combo: { key: 'C', ctrl: true }
|
||||
})
|
||||
const userKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command2',
|
||||
combo: { key: 'C', ctrl: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
store.addUserKeybinding(userKeybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(userKeybinding.combo)).toEqual(userKeybinding)
|
||||
})
|
||||
|
||||
it('Should allow binding to unsetted default keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command1',
|
||||
combo: { key: 'C', ctrl: true }
|
||||
})
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
store.unsetKeybinding(defaultKeybinding)
|
||||
|
||||
const userKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command2',
|
||||
combo: { key: 'C', ctrl: true }
|
||||
})
|
||||
store.addUserKeybinding(userKeybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(userKeybinding.combo)).toEqual(userKeybinding)
|
||||
})
|
||||
|
||||
it('should unset user keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'D', meta: true }
|
||||
})
|
||||
|
||||
store.addUserKeybinding(keybinding)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
|
||||
store.unsetKeybinding(keybinding)
|
||||
expect(store.keybindings).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should unset default keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'E', ctrl: true, alt: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(keybinding)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
|
||||
store.unsetKeybinding(keybinding)
|
||||
expect(store.keybindings).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should throw an error when adding duplicate default keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'F', shift: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(keybinding)
|
||||
expect(() => store.addDefaultKeybinding(keybinding)).toThrow()
|
||||
})
|
||||
|
||||
it('should allow adding duplicate user keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding1 = new KeybindingImpl({
|
||||
commandId: 'test.command1',
|
||||
combo: { key: 'G', ctrl: true }
|
||||
})
|
||||
const keybinding2 = new KeybindingImpl({
|
||||
commandId: 'test.command2',
|
||||
combo: { key: 'G', ctrl: true }
|
||||
})
|
||||
|
||||
store.addUserKeybinding(keybinding1)
|
||||
store.addUserKeybinding(keybinding2)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(keybinding2.combo)).toEqual(keybinding2)
|
||||
})
|
||||
|
||||
it('should not throw an error when unsetting non-existent keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'H', alt: true, shift: true }
|
||||
})
|
||||
|
||||
expect(() => store.unsetKeybinding(keybinding)).not.toThrow()
|
||||
})
|
||||
|
||||
it('should not throw an error when unsetting unknown keybinding', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'I', ctrl: true }
|
||||
})
|
||||
store.addUserKeybinding(keybinding)
|
||||
|
||||
expect(() =>
|
||||
store.unsetKeybinding(
|
||||
new KeybindingImpl({
|
||||
commandId: 'test.foo',
|
||||
combo: { key: 'I', ctrl: true }
|
||||
})
|
||||
)
|
||||
).not.toThrow()
|
||||
})
|
||||
|
||||
it('should remove unset keybinding when adding back a default keybinding', () => {
|
||||
const store = useKeybindingStore()
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'I', ctrl: true }
|
||||
})
|
||||
|
||||
// Add default keybinding
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
|
||||
// Unset the default keybinding
|
||||
store.unsetKeybinding(defaultKeybinding)
|
||||
expect(store.keybindings).toHaveLength(0)
|
||||
|
||||
// Add the same keybinding as a user keybinding
|
||||
store.addUserKeybinding(defaultKeybinding)
|
||||
|
||||
// Check that the keybinding is back and not in the unset list
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(defaultKeybinding.combo)).toEqual(
|
||||
defaultKeybinding
|
||||
)
|
||||
})
|
||||
|
||||
it('Should accept same keybinding from default and user', () => {
|
||||
const store = useKeybindingStore()
|
||||
const keybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'J', ctrl: true }
|
||||
})
|
||||
// Add default keybinding.
|
||||
// This can happen when we change default keybindings.
|
||||
store.addDefaultKeybinding(keybinding)
|
||||
// Add user keybinding.
|
||||
store.addUserKeybinding(keybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(keybinding.combo)).toEqual(keybinding)
|
||||
})
|
||||
|
||||
it('Should keep previously customized keybindings after default keybindings change', () => {
|
||||
// Initially command 'foo' was bound to 'K, Ctrl'. User unset it and bound the
|
||||
// command to 'A, Ctrl'.
|
||||
// Now we change the default keybindings of 'foo' to 'A, Ctrl'.
|
||||
// The user customized keybinding should be kept.
|
||||
const store = useKeybindingStore()
|
||||
|
||||
const userUnsetKeybindings = [
|
||||
new KeybindingImpl({
|
||||
commandId: 'foo',
|
||||
combo: { key: 'K', ctrl: true }
|
||||
})
|
||||
]
|
||||
|
||||
const userNewKeybindings = [
|
||||
new KeybindingImpl({
|
||||
commandId: 'foo',
|
||||
combo: { key: 'A', ctrl: true }
|
||||
})
|
||||
]
|
||||
|
||||
const newCoreKeybindings = [
|
||||
new KeybindingImpl({
|
||||
commandId: 'foo',
|
||||
combo: { key: 'A', ctrl: true }
|
||||
})
|
||||
]
|
||||
|
||||
for (const keybinding of newCoreKeybindings) {
|
||||
store.addDefaultKeybinding(keybinding)
|
||||
}
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(userNewKeybindings[0].combo)).toEqual(
|
||||
userNewKeybindings[0]
|
||||
)
|
||||
|
||||
for (const keybinding of userUnsetKeybindings) {
|
||||
store.unsetKeybinding(keybinding)
|
||||
}
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(userNewKeybindings[0].combo)).toEqual(
|
||||
userNewKeybindings[0]
|
||||
)
|
||||
|
||||
for (const keybinding of userNewKeybindings) {
|
||||
store.addUserKeybinding(keybinding)
|
||||
}
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(userNewKeybindings[0].combo)).toEqual(
|
||||
userNewKeybindings[0]
|
||||
)
|
||||
})
|
||||
|
||||
it('should replace the previous keybinding with a new one for the same combo and unset the old command', () => {
|
||||
const store = useKeybindingStore()
|
||||
|
||||
const oldKeybinding = new KeybindingImpl({
|
||||
commandId: 'command1',
|
||||
combo: { key: 'A', ctrl: true }
|
||||
})
|
||||
|
||||
store.addUserKeybinding(oldKeybinding)
|
||||
|
||||
const newKeybinding = new KeybindingImpl({
|
||||
commandId: 'command2',
|
||||
combo: { key: 'A', ctrl: true }
|
||||
})
|
||||
|
||||
store.updateKeybindingOnCommand(newKeybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybinding(newKeybinding.combo)?.commandId).toBe('command2')
|
||||
expect(store.getKeybindingsByCommandId('command1')).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should return false when no default or current keybinding exists during reset', () => {
|
||||
const store = useKeybindingStore()
|
||||
const result = store.resetKeybindingForCommand('nonexistent.command')
|
||||
expect(result).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false when current keybinding equals default keybinding', () => {
|
||||
const store = useKeybindingStore()
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'L', ctrl: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
const result = store.resetKeybindingForCommand('test.command')
|
||||
|
||||
expect(result).toBe(false)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybindingByCommandId('test.command')).toEqual(
|
||||
defaultKeybinding
|
||||
)
|
||||
})
|
||||
|
||||
it('should unset user keybinding when no default keybinding exists and return true', () => {
|
||||
const store = useKeybindingStore()
|
||||
const userKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'M', ctrl: true }
|
||||
})
|
||||
|
||||
store.addUserKeybinding(userKeybinding)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
|
||||
const result = store.resetKeybindingForCommand('test.command')
|
||||
|
||||
expect(result).toBe(true)
|
||||
expect(store.keybindings).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should restore default keybinding when user has overridden it and return true', () => {
|
||||
const store = useKeybindingStore()
|
||||
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'N', ctrl: true }
|
||||
})
|
||||
|
||||
const userKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'O', alt: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
store.updateKeybindingOnCommand(userKeybinding)
|
||||
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybindingByCommandId('test.command')).toEqual(
|
||||
userKeybinding
|
||||
)
|
||||
|
||||
const result = store.resetKeybindingForCommand('test.command')
|
||||
|
||||
expect(result).toBe(true)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybindingByCommandId('test.command')).toEqual(
|
||||
defaultKeybinding
|
||||
)
|
||||
})
|
||||
|
||||
it('should remove unset record and restore default keybinding when user has unset it', () => {
|
||||
const store = useKeybindingStore()
|
||||
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'P', ctrl: true }
|
||||
})
|
||||
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
|
||||
store.unsetKeybinding(defaultKeybinding)
|
||||
expect(store.keybindings).toHaveLength(0)
|
||||
|
||||
const serializedCombo = defaultKeybinding.combo.serialize()
|
||||
const userUnsetKeybindings = store.getUserUnsetKeybindings()
|
||||
expect(userUnsetKeybindings[serializedCombo]).toBeTruthy()
|
||||
expect(
|
||||
userUnsetKeybindings[serializedCombo].equals(defaultKeybinding)
|
||||
).toBe(true)
|
||||
|
||||
const result = store.resetKeybindingForCommand('test.command')
|
||||
|
||||
expect(result).toBe(true)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybindingByCommandId('test.command')).toEqual(
|
||||
defaultKeybinding
|
||||
)
|
||||
|
||||
expect(store.getUserUnsetKeybindings()[serializedCombo]).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should handle complex scenario with both unset and user keybindings', () => {
|
||||
const store = useKeybindingStore()
|
||||
|
||||
// Create default keybinding
|
||||
const defaultKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'Q', ctrl: true }
|
||||
})
|
||||
store.addDefaultKeybinding(defaultKeybinding)
|
||||
|
||||
// Unset default keybinding
|
||||
store.unsetKeybinding(defaultKeybinding)
|
||||
expect(store.keybindings).toHaveLength(0)
|
||||
|
||||
// Add user keybinding with different combo
|
||||
const userKeybinding = new KeybindingImpl({
|
||||
commandId: 'test.command',
|
||||
combo: { key: 'R', alt: true }
|
||||
})
|
||||
store.addUserKeybinding(userKeybinding)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybindingByCommandId('test.command')).toEqual(
|
||||
userKeybinding
|
||||
)
|
||||
|
||||
// Reset keybinding to default
|
||||
const result = store.resetKeybindingForCommand('test.command')
|
||||
|
||||
expect(result).toBe(true)
|
||||
expect(store.keybindings).toHaveLength(1)
|
||||
expect(store.getKeybindingByCommandId('test.command')).toEqual(
|
||||
defaultKeybinding
|
||||
)
|
||||
})
|
||||
})
|
||||
181
src/stores/modelStore.test.ts
Normal file
181
src/stores/modelStore.test.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { assetService } from '@/platform/assets/services/assetService'
|
||||
import { useSettingStore } from '@/platform/settings/settingStore'
|
||||
import { api } from '@/scripts/api'
|
||||
import { useModelStore } from '@/stores/modelStore'
|
||||
|
||||
// Mock the api
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
getModels: vi.fn(),
|
||||
getModelFolders: vi.fn(),
|
||||
viewMetadata: vi.fn(),
|
||||
apiURL: vi.fn((path: string) => `http://localhost:8188${path}`),
|
||||
addEventListener: vi.fn(),
|
||||
removeEventListener: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
// Mock the assetService
|
||||
vi.mock('@/platform/assets/services/assetService', () => ({
|
||||
assetService: {
|
||||
getAssetModelFolders: vi.fn(),
|
||||
getAssetModels: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
// Mock the settingStore
|
||||
vi.mock('@/platform/settings/settingStore', () => ({
|
||||
useSettingStore: vi.fn()
|
||||
}))
|
||||
|
||||
function enableMocks(useAssetAPI = false) {
|
||||
// Mock settingStore to return the useAssetAPI setting
|
||||
const mockSettingStore = {
|
||||
get: vi.fn().mockImplementation((key: string) => {
|
||||
if (key === 'Comfy.Assets.UseAssetAPI') {
|
||||
return useAssetAPI
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
vi.mocked(useSettingStore, { partial: true }).mockReturnValue(
|
||||
mockSettingStore
|
||||
)
|
||||
|
||||
// Mock experimental API - returns objects with name and folders properties
|
||||
vi.mocked(api.getModels).mockResolvedValue([
|
||||
{ name: 'sdxl.safetensors', pathIndex: 0 },
|
||||
{ name: 'sdv15.safetensors', pathIndex: 0 },
|
||||
{ name: 'noinfo.safetensors', pathIndex: 0 }
|
||||
])
|
||||
vi.mocked(api.getModelFolders).mockResolvedValue([
|
||||
{ name: 'checkpoints', folders: ['/path/to/checkpoints'] },
|
||||
{ name: 'vae', folders: ['/path/to/vae'] }
|
||||
])
|
||||
|
||||
// Mock asset API - also returns objects with name and folders properties
|
||||
vi.mocked(assetService.getAssetModelFolders).mockResolvedValue([
|
||||
{ name: 'checkpoints', folders: ['/path/to/checkpoints'] },
|
||||
{ name: 'vae', folders: ['/path/to/vae'] }
|
||||
])
|
||||
vi.mocked(assetService.getAssetModels).mockResolvedValue([
|
||||
{ name: 'sdxl.safetensors', pathIndex: 0 },
|
||||
{ name: 'sdv15.safetensors', pathIndex: 0 },
|
||||
{ name: 'noinfo.safetensors', pathIndex: 0 }
|
||||
])
|
||||
|
||||
vi.mocked(api.viewMetadata).mockImplementation((_, model) => {
|
||||
if (model === 'noinfo.safetensors') {
|
||||
return Promise.resolve({})
|
||||
}
|
||||
return Promise.resolve({
|
||||
'modelspec.title': `Title of ${model}`,
|
||||
display_name: 'Should not show',
|
||||
'modelspec.architecture': 'stable-diffusion-xl-base-v1',
|
||||
'modelspec.author': `Author of ${model}`,
|
||||
'modelspec.description': `Description of ${model}`,
|
||||
'modelspec.resolution': '1024x1024',
|
||||
trigger_phrase: `Trigger phrase of ${model}`,
|
||||
usage_hint: `Usage hint of ${model}`,
|
||||
tags: `tags,for,${model}`
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
describe('useModelStore', () => {
|
||||
let store: ReturnType<typeof useModelStore>
|
||||
|
||||
beforeEach(async () => {
|
||||
setActivePinia(createPinia())
|
||||
vi.resetAllMocks()
|
||||
})
|
||||
|
||||
it('should load models', async () => {
|
||||
enableMocks()
|
||||
store = useModelStore()
|
||||
await store.loadModelFolders()
|
||||
const folderStore = await store.getLoadedModelFolder('checkpoints')
|
||||
expect(folderStore).toBeDefined()
|
||||
expect(Object.keys(folderStore!.models)).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should load model metadata', async () => {
|
||||
enableMocks()
|
||||
store = useModelStore()
|
||||
await store.loadModelFolders()
|
||||
const folderStore = await store.getLoadedModelFolder('checkpoints')
|
||||
expect(folderStore).toBeDefined()
|
||||
const model = folderStore!.models['0/sdxl.safetensors']
|
||||
await model.load()
|
||||
expect(model.title).toBe('Title of sdxl.safetensors')
|
||||
expect(model.architecture_id).toBe('stable-diffusion-xl-base-v1')
|
||||
expect(model.author).toBe('Author of sdxl.safetensors')
|
||||
expect(model.description).toBe('Description of sdxl.safetensors')
|
||||
expect(model.resolution).toBe('1024x1024')
|
||||
expect(model.trigger_phrase).toBe('Trigger phrase of sdxl.safetensors')
|
||||
expect(model.usage_hint).toBe('Usage hint of sdxl.safetensors')
|
||||
expect(model.tags).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should handle no metadata', async () => {
|
||||
enableMocks()
|
||||
store = useModelStore()
|
||||
await store.loadModelFolders()
|
||||
const folderStore = await store.getLoadedModelFolder('checkpoints')
|
||||
expect(folderStore).toBeDefined()
|
||||
const model = folderStore!.models['0/noinfo.safetensors']
|
||||
await model.load()
|
||||
expect(model.file_name).toBe('noinfo.safetensors')
|
||||
expect(model.title).toBe('noinfo')
|
||||
expect(model.architecture_id).toBe('')
|
||||
expect(model.author).toBe('')
|
||||
expect(model.description).toBe('')
|
||||
expect(model.resolution).toBe('')
|
||||
})
|
||||
|
||||
it('should cache model information', async () => {
|
||||
enableMocks()
|
||||
store = useModelStore()
|
||||
await store.loadModelFolders()
|
||||
expect(api.getModels).toHaveBeenCalledTimes(0)
|
||||
await store.getLoadedModelFolder('checkpoints')
|
||||
expect(api.getModels).toHaveBeenCalledTimes(1)
|
||||
await store.getLoadedModelFolder('checkpoints')
|
||||
expect(api.getModels).toHaveBeenCalledTimes(1)
|
||||
})
|
||||
|
||||
describe('API switching functionality', () => {
|
||||
it('should use experimental API for complete workflow when UseAssetAPI setting is false', async () => {
|
||||
enableMocks(false) // useAssetAPI = false
|
||||
store = useModelStore()
|
||||
await store.loadModelFolders()
|
||||
const folderStore = await store.getLoadedModelFolder('checkpoints')
|
||||
|
||||
// Both APIs return objects with .name property, modelStore extracts folder.name in both cases
|
||||
expect(api.getModelFolders).toHaveBeenCalledTimes(1)
|
||||
expect(api.getModels).toHaveBeenCalledWith('checkpoints')
|
||||
expect(assetService.getAssetModelFolders).toHaveBeenCalledTimes(0)
|
||||
expect(assetService.getAssetModels).toHaveBeenCalledTimes(0)
|
||||
expect(folderStore).toBeDefined()
|
||||
expect(Object.keys(folderStore!.models)).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should use asset API for complete workflow when UseAssetAPI setting is true', async () => {
|
||||
enableMocks(true) // useAssetAPI = true
|
||||
store = useModelStore()
|
||||
await store.loadModelFolders()
|
||||
const folderStore = await store.getLoadedModelFolder('checkpoints')
|
||||
|
||||
// Both APIs return objects with .name property, modelStore extracts folder.name in both cases
|
||||
expect(assetService.getAssetModelFolders).toHaveBeenCalledTimes(1)
|
||||
expect(assetService.getAssetModels).toHaveBeenCalledWith('checkpoints')
|
||||
expect(api.getModelFolders).toHaveBeenCalledTimes(0)
|
||||
expect(api.getModels).toHaveBeenCalledTimes(0)
|
||||
expect(folderStore).toBeDefined()
|
||||
expect(Object.keys(folderStore!.models)).toHaveLength(3)
|
||||
})
|
||||
})
|
||||
})
|
||||
494
src/stores/modelToNodeStore.test.ts
Normal file
494
src/stores/modelToNodeStore.test.ts
Normal file
@@ -0,0 +1,494 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { ComfyNodeDef as ComfyNodeDefV1 } from '@/schemas/nodeDefSchema'
|
||||
import {
|
||||
ModelNodeProvider,
|
||||
useModelToNodeStore
|
||||
} from '@/stores/modelToNodeStore'
|
||||
import { ComfyNodeDefImpl, useNodeDefStore } from '@/stores/nodeDefStore'
|
||||
|
||||
const EXPECTED_DEFAULT_TYPES = [
|
||||
'checkpoints',
|
||||
'loras',
|
||||
'vae',
|
||||
'controlnet',
|
||||
'diffusion_models',
|
||||
'upscale_models',
|
||||
'style_models',
|
||||
'gligen',
|
||||
'clip_vision',
|
||||
'text_encoders',
|
||||
'audio_encoders',
|
||||
'model_patches',
|
||||
'animatediff_models',
|
||||
'animatediff_motion_lora'
|
||||
] as const
|
||||
|
||||
type NodeDefStoreType = ReturnType<typeof useNodeDefStore>
|
||||
|
||||
// Create minimal but valid ComfyNodeDefImpl for testing
|
||||
function createMockNodeDef(name: string): ComfyNodeDefImpl {
|
||||
const def: ComfyNodeDefV1 = {
|
||||
name,
|
||||
display_name: name,
|
||||
category: 'test',
|
||||
python_module: 'nodes',
|
||||
description: '',
|
||||
input: { required: {}, optional: {} },
|
||||
output: [],
|
||||
output_name: [],
|
||||
output_is_list: [],
|
||||
output_node: false
|
||||
}
|
||||
return new ComfyNodeDefImpl(def)
|
||||
}
|
||||
|
||||
const MOCK_NODE_NAMES = [
|
||||
'CheckpointLoaderSimple',
|
||||
'ImageOnlyCheckpointLoader',
|
||||
'LoraLoader',
|
||||
'LoraLoaderModelOnly',
|
||||
'VAELoader',
|
||||
'ControlNetLoader',
|
||||
'UNETLoader',
|
||||
'UpscaleModelLoader',
|
||||
'StyleModelLoader',
|
||||
'GLIGENLoader',
|
||||
'CLIPVisionLoader',
|
||||
'CLIPLoader',
|
||||
'AudioEncoderLoader',
|
||||
'ModelPatchLoader',
|
||||
'ADE_LoadAnimateDiffModel',
|
||||
'ADE_AnimateDiffLoRALoader'
|
||||
] as const
|
||||
|
||||
const mockNodeDefsByName = Object.fromEntries(
|
||||
MOCK_NODE_NAMES.map((name) => [name, createMockNodeDef(name)])
|
||||
)
|
||||
|
||||
// Mock nodeDefStore dependency - modelToNodeStore relies on this for registration
|
||||
// Most tests expect this to be populated; tests that need empty state can override
|
||||
vi.mock('@/stores/nodeDefStore', async (importOriginal) => {
|
||||
const original = await importOriginal<NodeDefStoreType>()
|
||||
|
||||
return {
|
||||
...original,
|
||||
useNodeDefStore: vi.fn(() => ({
|
||||
nodeDefsByName: mockNodeDefsByName
|
||||
}))
|
||||
}
|
||||
})
|
||||
|
||||
describe('useModelToNodeStore', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
describe('modelToNodeMap', () => {
|
||||
it('should initialize as empty', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
expect(Object.keys(modelToNodeStore.modelToNodeMap)).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should populate after registration', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
expect(Object.keys(modelToNodeStore.modelToNodeMap)).toEqual(
|
||||
expect.arrayContaining(['checkpoints', 'diffusion_models'])
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getNodeProvider', () => {
|
||||
it('should return provider for registered model type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
const provider = modelToNodeStore.getNodeProvider('checkpoints')
|
||||
expect(provider).toBeDefined()
|
||||
// After asserting provider is defined, we can safely access its properties
|
||||
expect(provider?.nodeDef?.name).toBe('CheckpointLoaderSimple')
|
||||
expect(provider?.key).toBe('ckpt_name')
|
||||
})
|
||||
|
||||
it('should return undefined for unregistered model type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
expect(modelToNodeStore.getNodeProvider('nonexistent')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return first registered provider when multiple providers exist for same model type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
const provider = modelToNodeStore.getNodeProvider('checkpoints')
|
||||
// Using optional chaining for safety since getNodeProvider() can return undefined
|
||||
expect(provider?.nodeDef?.name).toBe('CheckpointLoaderSimple')
|
||||
})
|
||||
|
||||
it('should trigger lazy registration when called before registerDefaults', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
|
||||
const provider = modelToNodeStore.getNodeProvider('checkpoints')
|
||||
expect(provider).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getAllNodeProviders', () => {
|
||||
it('should return all providers for model type with multiple nodes', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
const checkpointProviders =
|
||||
modelToNodeStore.getAllNodeProviders('checkpoints')
|
||||
expect(checkpointProviders).toHaveLength(2)
|
||||
expect(checkpointProviders).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
nodeDef: expect.objectContaining({ name: 'CheckpointLoaderSimple' })
|
||||
}),
|
||||
expect.objectContaining({
|
||||
nodeDef: expect.objectContaining({
|
||||
name: 'ImageOnlyCheckpointLoader'
|
||||
})
|
||||
})
|
||||
])
|
||||
)
|
||||
|
||||
const loraProviders = modelToNodeStore.getAllNodeProviders('loras')
|
||||
expect(loraProviders).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should return single provider for model type with one node', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
const diffusionModelProviders =
|
||||
modelToNodeStore.getAllNodeProviders('diffusion_models')
|
||||
expect(diffusionModelProviders).toHaveLength(1)
|
||||
expect(diffusionModelProviders[0].nodeDef.name).toBe('UNETLoader')
|
||||
})
|
||||
|
||||
it('should return empty array for unregistered model type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
expect(modelToNodeStore.getAllNodeProviders('nonexistent')).toEqual([])
|
||||
})
|
||||
|
||||
it('should trigger lazy registration when called before registerDefaults', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
|
||||
const providers = modelToNodeStore.getAllNodeProviders('checkpoints')
|
||||
expect(providers.length).toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
||||
describe('registerNodeProvider', () => {
|
||||
it('should not register provider when nodeDef is undefined', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
const providerWithoutNodeDef = new ModelNodeProvider(
|
||||
undefined as any,
|
||||
'custom_key'
|
||||
)
|
||||
|
||||
modelToNodeStore.registerNodeProvider(
|
||||
'custom_type',
|
||||
providerWithoutNodeDef
|
||||
)
|
||||
|
||||
const retrieved = modelToNodeStore.getNodeProvider('custom_type')
|
||||
expect(retrieved).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should register provider directly', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
const nodeDefStore = useNodeDefStore()
|
||||
const customProvider = new ModelNodeProvider(
|
||||
nodeDefStore.nodeDefsByName['UNETLoader'],
|
||||
'custom_key'
|
||||
)
|
||||
|
||||
modelToNodeStore.registerNodeProvider('custom_type', customProvider)
|
||||
|
||||
const retrieved = modelToNodeStore.getNodeProvider('custom_type')
|
||||
expect(retrieved).toStrictEqual(customProvider)
|
||||
// Optional chaining for consistency with getNodeProvider() return type
|
||||
expect(retrieved?.key).toBe('custom_key')
|
||||
})
|
||||
|
||||
it('should handle multiple providers for same model type and return first as primary', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
const nodeDefStore = useNodeDefStore()
|
||||
const provider1 = new ModelNodeProvider(
|
||||
nodeDefStore.nodeDefsByName['UNETLoader'],
|
||||
'key1'
|
||||
)
|
||||
const provider2 = new ModelNodeProvider(
|
||||
nodeDefStore.nodeDefsByName['VAELoader'],
|
||||
'key2'
|
||||
)
|
||||
|
||||
modelToNodeStore.registerNodeProvider('multi_type', provider1)
|
||||
modelToNodeStore.registerNodeProvider('multi_type', provider2)
|
||||
|
||||
const allProviders = modelToNodeStore.getAllNodeProviders('multi_type')
|
||||
expect(allProviders).toHaveLength(2)
|
||||
expect(modelToNodeStore.getNodeProvider('multi_type')).toStrictEqual(
|
||||
provider1
|
||||
)
|
||||
})
|
||||
|
||||
it('should initialize new model type when first provider is registered', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
const nodeDefStore = useNodeDefStore()
|
||||
expect(modelToNodeStore.modelToNodeMap['new_type']).toBeUndefined()
|
||||
|
||||
const provider = new ModelNodeProvider(
|
||||
nodeDefStore.nodeDefsByName['UNETLoader'],
|
||||
'test_key'
|
||||
)
|
||||
modelToNodeStore.registerNodeProvider('new_type', provider)
|
||||
|
||||
expect(modelToNodeStore.modelToNodeMap['new_type']).toBeDefined()
|
||||
expect(modelToNodeStore.modelToNodeMap['new_type']).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('quickRegister', () => {
|
||||
it('should connect node class to model type with parameter mapping', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.quickRegister('test_type', 'UNETLoader', 'test_param')
|
||||
|
||||
const provider = modelToNodeStore.getNodeProvider('test_type')
|
||||
expect(provider).toBeDefined()
|
||||
// After asserting provider is defined, we can safely access its properties
|
||||
expect(provider!.nodeDef.name).toBe('UNETLoader')
|
||||
expect(provider!.key).toBe('test_param')
|
||||
})
|
||||
|
||||
it('should handle registration of non-existent node classes gracefully', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
expect(() => {
|
||||
modelToNodeStore.quickRegister(
|
||||
'test_type',
|
||||
'NonExistentLoader',
|
||||
'test_param'
|
||||
)
|
||||
}).not.toThrow()
|
||||
|
||||
const provider = modelToNodeStore.getNodeProvider('test_type')
|
||||
expect(provider?.nodeDef).toBeUndefined()
|
||||
|
||||
expect(() => modelToNodeStore.getRegisteredNodeTypes()).not.toThrow()
|
||||
expect(() =>
|
||||
modelToNodeStore.getCategoryForNodeType('NonExistentLoader')
|
||||
).not.toThrow()
|
||||
|
||||
// Non-existent nodes are filtered out from registered types
|
||||
const types = modelToNodeStore.getRegisteredNodeTypes()
|
||||
expect(types['NonExistentLoader']).toBe(undefined)
|
||||
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType('NonExistentLoader')
|
||||
).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should allow multiple node classes for same model type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.quickRegister('multi_type', 'UNETLoader', 'param1')
|
||||
modelToNodeStore.quickRegister('multi_type', 'VAELoader', 'param2')
|
||||
|
||||
const providers = modelToNodeStore.getAllNodeProviders('multi_type')
|
||||
expect(providers).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
|
||||
describe('registerDefaults integration', () => {
|
||||
it('should register all expected model types based on mock data', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
for (const modelType of EXPECTED_DEFAULT_TYPES) {
|
||||
expect.soft(modelToNodeStore.getNodeProvider(modelType)).toBeDefined()
|
||||
}
|
||||
})
|
||||
|
||||
it('should be idempotent', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
const firstCheckpointCount =
|
||||
modelToNodeStore.getAllNodeProviders('checkpoints').length
|
||||
|
||||
modelToNodeStore.registerDefaults() // Call again
|
||||
const secondCheckpointCount =
|
||||
modelToNodeStore.getAllNodeProviders('checkpoints').length
|
||||
|
||||
expect(secondCheckpointCount).toBe(firstCheckpointCount)
|
||||
})
|
||||
|
||||
it('should not register when nodeDefStore is empty', () => {
|
||||
// Create fresh Pinia for this test to avoid state persistence
|
||||
setActivePinia(createPinia())
|
||||
|
||||
vi.mocked(useNodeDefStore, { partial: true }).mockReturnValue({
|
||||
nodeDefsByName: {}
|
||||
})
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
expect(modelToNodeStore.getNodeProvider('checkpoints')).toBeUndefined()
|
||||
|
||||
// Restore original mock for subsequent tests
|
||||
vi.mocked(useNodeDefStore, { partial: true }).mockReturnValue({
|
||||
nodeDefsByName: mockNodeDefsByName
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getRegisteredNodeTypes', () => {
|
||||
it('should return an object', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
const result = modelToNodeStore.getRegisteredNodeTypes()
|
||||
expect(result).toBeTypeOf('object')
|
||||
})
|
||||
|
||||
it('should return empty Record when nodeDefStore is empty', () => {
|
||||
// Create fresh Pinia for this test to avoid state persistence
|
||||
setActivePinia(createPinia())
|
||||
|
||||
vi.mocked(useNodeDefStore, { partial: true }).mockReturnValue({
|
||||
nodeDefsByName: {}
|
||||
})
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
|
||||
const result = modelToNodeStore.getRegisteredNodeTypes()
|
||||
expect(result).toStrictEqual({})
|
||||
|
||||
// Restore original mock for subsequent tests
|
||||
vi.mocked(useNodeDefStore, { partial: true }).mockReturnValue({
|
||||
nodeDefsByName: mockNodeDefsByName
|
||||
})
|
||||
})
|
||||
|
||||
it('should contain node types to resolve widget name', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
const result = modelToNodeStore.getRegisteredNodeTypes()
|
||||
|
||||
expect(result['CheckpointLoaderSimple']).toBe('ckpt_name')
|
||||
expect(result['LoraLoader']).toBe('lora_name')
|
||||
expect(result['NonExistentNode']).toBe(undefined)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getCategoryForNodeType', () => {
|
||||
it('should return category for known node type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType('CheckpointLoaderSimple')
|
||||
).toBe('checkpoints')
|
||||
expect(modelToNodeStore.getCategoryForNodeType('LoraLoader')).toBe(
|
||||
'loras'
|
||||
)
|
||||
expect(modelToNodeStore.getCategoryForNodeType('VAELoader')).toBe('vae')
|
||||
})
|
||||
|
||||
it('should return undefined for unknown node type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType('NonExistentNode')
|
||||
).toBeUndefined()
|
||||
expect(modelToNodeStore.getCategoryForNodeType('')).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should return first category when node type exists in multiple categories', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
|
||||
// Test with a node that exists in the defaults but add our own first
|
||||
// Since defaults register 'StyleModelLoader' in 'style_models',
|
||||
// we verify our custom registrations come after defaults in Object.entries iteration
|
||||
const result = modelToNodeStore.getCategoryForNodeType('StyleModelLoader')
|
||||
expect(result).toBe('style_models') // This proves the method works correctly
|
||||
|
||||
// Now test that custom registrations after defaults also work
|
||||
modelToNodeStore.quickRegister(
|
||||
'unicorn_styles',
|
||||
'StyleModelLoader',
|
||||
'param1'
|
||||
)
|
||||
const result2 =
|
||||
modelToNodeStore.getCategoryForNodeType('StyleModelLoader')
|
||||
// Should still be style_models since it was registered first by defaults
|
||||
expect(result2).toBe('style_models')
|
||||
})
|
||||
|
||||
it('should trigger lazy registration when called before registerDefaults', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
|
||||
const result = modelToNodeStore.getCategoryForNodeType(
|
||||
'CheckpointLoaderSimple'
|
||||
)
|
||||
expect(result).toBe('checkpoints')
|
||||
})
|
||||
|
||||
it('should be performant for repeated lookups', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
// Measure performance without assuming implementation
|
||||
const start = performance.now()
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
modelToNodeStore.getCategoryForNodeType('CheckpointLoaderSimple')
|
||||
}
|
||||
const end = performance.now()
|
||||
|
||||
// Should be fast enough for UI responsiveness
|
||||
expect(end - start).toBeLessThan(10)
|
||||
})
|
||||
|
||||
it('should handle invalid input types gracefully', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
// These should not throw but return undefined
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType(null as any)
|
||||
).toBeUndefined()
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType(undefined as any)
|
||||
).toBeUndefined()
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType(123 as any)
|
||||
).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should be case-sensitive for node type matching', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
modelToNodeStore.registerDefaults()
|
||||
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType('checkpointloadersimple')
|
||||
).toBeUndefined()
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType('CHECKPOINTLOADERSIMPLE')
|
||||
).toBeUndefined()
|
||||
expect(
|
||||
modelToNodeStore.getCategoryForNodeType('CheckpointLoaderSimple')
|
||||
).toBe('checkpoints')
|
||||
})
|
||||
})
|
||||
|
||||
describe('edge cases', () => {
|
||||
it('should handle empty string model type', () => {
|
||||
const modelToNodeStore = useModelToNodeStore()
|
||||
expect(modelToNodeStore.getNodeProvider('')).toBeUndefined()
|
||||
expect(modelToNodeStore.getAllNodeProviders('')).toEqual([])
|
||||
})
|
||||
})
|
||||
})
|
||||
309
src/stores/nodeDefStore.test.ts
Normal file
309
src/stores/nodeDefStore.test.ts
Normal file
@@ -0,0 +1,309 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
|
||||
import type { ComfyNodeDef } from '@/schemas/nodeDefSchema'
|
||||
import { useNodeDefStore } from '@/stores/nodeDefStore'
|
||||
import type { NodeDefFilter } from '@/stores/nodeDefStore'
|
||||
|
||||
describe('useNodeDefStore', () => {
|
||||
let store: ReturnType<typeof useNodeDefStore>
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useNodeDefStore()
|
||||
})
|
||||
|
||||
const createMockNodeDef = (
|
||||
overrides: Partial<ComfyNodeDef> = {}
|
||||
): ComfyNodeDef => ({
|
||||
name: 'TestNode',
|
||||
display_name: 'Test Node',
|
||||
category: 'test',
|
||||
python_module: 'test_module',
|
||||
description: 'Test node',
|
||||
input: {},
|
||||
output: [],
|
||||
output_is_list: [],
|
||||
output_name: [],
|
||||
output_node: false,
|
||||
deprecated: false,
|
||||
experimental: false,
|
||||
...overrides
|
||||
})
|
||||
|
||||
describe('filter registry', () => {
|
||||
it('should register a new filter', () => {
|
||||
const filter: NodeDefFilter = {
|
||||
id: 'test.filter',
|
||||
name: 'Test Filter',
|
||||
predicate: () => true
|
||||
}
|
||||
|
||||
store.registerNodeDefFilter(filter)
|
||||
expect(store.nodeDefFilters).toContainEqual(filter)
|
||||
})
|
||||
|
||||
it('should unregister a filter by id', () => {
|
||||
const filter: NodeDefFilter = {
|
||||
id: 'test.filter',
|
||||
name: 'Test Filter',
|
||||
predicate: () => true
|
||||
}
|
||||
|
||||
store.registerNodeDefFilter(filter)
|
||||
store.unregisterNodeDefFilter('test.filter')
|
||||
expect(store.nodeDefFilters).not.toContainEqual(filter)
|
||||
})
|
||||
|
||||
it('should register core filters on initialization', () => {
|
||||
const deprecatedFilter = store.nodeDefFilters.find(
|
||||
(f) => f.id === 'core.deprecated'
|
||||
)
|
||||
const experimentalFilter = store.nodeDefFilters.find(
|
||||
(f) => f.id === 'core.experimental'
|
||||
)
|
||||
|
||||
expect(deprecatedFilter).toBeDefined()
|
||||
expect(experimentalFilter).toBeDefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('filter application', () => {
|
||||
beforeEach(() => {
|
||||
// Clear existing filters for isolated tests
|
||||
store.nodeDefFilters.splice(0)
|
||||
})
|
||||
|
||||
it('should apply single filter to visible nodes', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
deprecated: false
|
||||
})
|
||||
const deprecatedNode = createMockNodeDef({
|
||||
name: 'deprecated',
|
||||
deprecated: true
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, deprecatedNode])
|
||||
|
||||
// Register filter that hides deprecated nodes
|
||||
store.registerNodeDefFilter({
|
||||
id: 'test.no-deprecated',
|
||||
name: 'Hide Deprecated',
|
||||
predicate: (node) => !node.deprecated
|
||||
})
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(1)
|
||||
expect(store.visibleNodeDefs[0].name).toBe('normal')
|
||||
})
|
||||
|
||||
it('should apply multiple filters with AND logic', () => {
|
||||
const node1 = createMockNodeDef({
|
||||
name: 'node1',
|
||||
deprecated: false,
|
||||
experimental: false
|
||||
})
|
||||
const node2 = createMockNodeDef({
|
||||
name: 'node2',
|
||||
deprecated: true,
|
||||
experimental: false
|
||||
})
|
||||
const node3 = createMockNodeDef({
|
||||
name: 'node3',
|
||||
deprecated: false,
|
||||
experimental: true
|
||||
})
|
||||
const node4 = createMockNodeDef({
|
||||
name: 'node4',
|
||||
deprecated: true,
|
||||
experimental: true
|
||||
})
|
||||
|
||||
store.updateNodeDefs([node1, node2, node3, node4])
|
||||
|
||||
// Register filters
|
||||
store.registerNodeDefFilter({
|
||||
id: 'test.no-deprecated',
|
||||
name: 'Hide Deprecated',
|
||||
predicate: (node) => !node.deprecated
|
||||
})
|
||||
|
||||
store.registerNodeDefFilter({
|
||||
id: 'test.no-experimental',
|
||||
name: 'Hide Experimental',
|
||||
predicate: (node) => !node.experimental
|
||||
})
|
||||
|
||||
// Only node1 should be visible (not deprecated AND not experimental)
|
||||
expect(store.visibleNodeDefs).toHaveLength(1)
|
||||
expect(store.visibleNodeDefs[0].name).toBe('node1')
|
||||
})
|
||||
|
||||
it('should show all nodes when no filters are registered', () => {
|
||||
const nodes = [
|
||||
createMockNodeDef({ name: 'node1' }),
|
||||
createMockNodeDef({ name: 'node2' }),
|
||||
createMockNodeDef({ name: 'node3' })
|
||||
]
|
||||
|
||||
store.updateNodeDefs(nodes)
|
||||
expect(store.visibleNodeDefs).toHaveLength(3)
|
||||
})
|
||||
|
||||
it('should update visibility when filter is removed', () => {
|
||||
const deprecatedNode = createMockNodeDef({
|
||||
name: 'deprecated',
|
||||
deprecated: true
|
||||
})
|
||||
store.updateNodeDefs([deprecatedNode])
|
||||
|
||||
const filter: NodeDefFilter = {
|
||||
id: 'test.no-deprecated',
|
||||
name: 'Hide Deprecated',
|
||||
predicate: (node) => !node.deprecated
|
||||
}
|
||||
|
||||
// Add filter - node should be hidden
|
||||
store.registerNodeDefFilter(filter)
|
||||
expect(store.visibleNodeDefs).toHaveLength(0)
|
||||
|
||||
// Remove filter - node should be visible
|
||||
store.unregisterNodeDefFilter('test.no-deprecated')
|
||||
expect(store.visibleNodeDefs).toHaveLength(1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('core filters behavior', () => {
|
||||
it('should hide deprecated nodes by default', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
deprecated: false
|
||||
})
|
||||
const deprecatedNode = createMockNodeDef({
|
||||
name: 'deprecated',
|
||||
deprecated: true
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, deprecatedNode])
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(1)
|
||||
expect(store.visibleNodeDefs[0].name).toBe('normal')
|
||||
})
|
||||
|
||||
it('should show deprecated nodes when showDeprecated is true', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
deprecated: false
|
||||
})
|
||||
const deprecatedNode = createMockNodeDef({
|
||||
name: 'deprecated',
|
||||
deprecated: true
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, deprecatedNode])
|
||||
store.showDeprecated = true
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should hide experimental nodes by default', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
experimental: false
|
||||
})
|
||||
const experimentalNode = createMockNodeDef({
|
||||
name: 'experimental',
|
||||
experimental: true
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, experimentalNode])
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(1)
|
||||
expect(store.visibleNodeDefs[0].name).toBe('normal')
|
||||
})
|
||||
|
||||
it('should show experimental nodes when showExperimental is true', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
experimental: false
|
||||
})
|
||||
const experimentalNode = createMockNodeDef({
|
||||
name: 'experimental',
|
||||
experimental: true
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, experimentalNode])
|
||||
store.showExperimental = true
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(2)
|
||||
})
|
||||
|
||||
it('should hide subgraph nodes by default', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
category: 'conditioning',
|
||||
python_module: 'nodes'
|
||||
})
|
||||
const subgraphNode = createMockNodeDef({
|
||||
name: 'MySubgraph',
|
||||
category: 'subgraph',
|
||||
python_module: 'nodes'
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, subgraphNode])
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(1)
|
||||
expect(store.visibleNodeDefs[0].name).toBe('normal')
|
||||
})
|
||||
|
||||
it('should show non-subgraph nodes with subgraph category', () => {
|
||||
const normalNode = createMockNodeDef({
|
||||
name: 'normal',
|
||||
category: 'conditioning',
|
||||
python_module: 'custom_extension'
|
||||
})
|
||||
const fakeSubgraphNode = createMockNodeDef({
|
||||
name: 'FakeSubgraph',
|
||||
category: 'subgraph',
|
||||
python_module: 'custom_extension' // Different python_module
|
||||
})
|
||||
|
||||
store.updateNodeDefs([normalNode, fakeSubgraphNode])
|
||||
|
||||
expect(store.visibleNodeDefs).toHaveLength(2)
|
||||
expect(store.visibleNodeDefs.map((n) => n.name)).toEqual([
|
||||
'normal',
|
||||
'FakeSubgraph'
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('performance', () => {
|
||||
it('should perform single traversal for multiple filters', () => {
|
||||
let filterCallCount = 0
|
||||
|
||||
// Register multiple filters that count their calls
|
||||
for (let i = 0; i < 5; i++) {
|
||||
store.registerNodeDefFilter({
|
||||
id: `test.counter-${i}`,
|
||||
name: `Counter ${i}`,
|
||||
predicate: () => {
|
||||
filterCallCount++
|
||||
return true
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const nodes = Array.from({ length: 10 }, (_, i) =>
|
||||
createMockNodeDef({ name: `node${i}` })
|
||||
)
|
||||
store.updateNodeDefs(nodes)
|
||||
|
||||
// Force recomputation by accessing visibleNodeDefs
|
||||
expect(store.visibleNodeDefs).toBeDefined()
|
||||
|
||||
// Each node (10) should be checked by each filter (5 test + 2 core = 7 total)
|
||||
expect(filterCallCount).toBe(10 * 5)
|
||||
})
|
||||
})
|
||||
})
|
||||
175
src/stores/queueStore.loadWorkflow.test.ts
Normal file
175
src/stores/queueStore.loadWorkflow.test.ts
Normal file
@@ -0,0 +1,175 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { ComfyApp } from '@/scripts/app'
|
||||
import type { ComfyWorkflowJSON } from '@/platform/workflow/validation/schemas/workflowSchema'
|
||||
import { TaskItemImpl } from '@/stores/queueStore'
|
||||
import * as getWorkflowModule from '@/platform/workflow/cloud'
|
||||
|
||||
vi.mock('@/platform/distribution/types', () => ({
|
||||
isCloud: true
|
||||
}))
|
||||
|
||||
vi.mock('@/services/extensionService', () => ({
|
||||
useExtensionService: vi.fn(() => ({
|
||||
invokeExtensions: vi.fn()
|
||||
}))
|
||||
}))
|
||||
|
||||
const mockWorkflow: ComfyWorkflowJSON = {
|
||||
id: 'test-workflow-id',
|
||||
revision: 0,
|
||||
last_node_id: 5,
|
||||
last_link_id: 3,
|
||||
nodes: [],
|
||||
links: [],
|
||||
groups: [],
|
||||
config: {},
|
||||
extra: {},
|
||||
version: 0.4
|
||||
}
|
||||
|
||||
const createHistoryTaskWithWorkflow = (): TaskItemImpl => {
|
||||
return new TaskItemImpl(
|
||||
'History',
|
||||
[
|
||||
0, // queueIndex
|
||||
'test-prompt-id', // promptId
|
||||
{}, // promptInputs
|
||||
{
|
||||
client_id: 'test-client',
|
||||
extra_pnginfo: {
|
||||
workflow: mockWorkflow
|
||||
}
|
||||
},
|
||||
[] // outputsToExecute
|
||||
],
|
||||
{
|
||||
status_str: 'success',
|
||||
completed: true,
|
||||
messages: []
|
||||
},
|
||||
{} // outputs
|
||||
)
|
||||
}
|
||||
|
||||
const createHistoryTaskWithoutWorkflow = (): TaskItemImpl => {
|
||||
return new TaskItemImpl(
|
||||
'History',
|
||||
[
|
||||
0,
|
||||
'test-prompt-id',
|
||||
{},
|
||||
{
|
||||
client_id: 'test-client'
|
||||
// No extra_pnginfo.workflow
|
||||
},
|
||||
[]
|
||||
],
|
||||
{
|
||||
status_str: 'success',
|
||||
completed: true,
|
||||
messages: []
|
||||
},
|
||||
{}
|
||||
)
|
||||
}
|
||||
|
||||
describe('TaskItemImpl.loadWorkflow - cloud history workflow fetching', () => {
|
||||
let mockApp: ComfyApp
|
||||
let mockFetchApi: ReturnType<typeof vi.fn>
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
vi.clearAllMocks()
|
||||
|
||||
mockFetchApi = vi.fn()
|
||||
mockApp = {
|
||||
loadGraphData: vi.fn(),
|
||||
nodeOutputs: {},
|
||||
api: {
|
||||
fetchApi: mockFetchApi
|
||||
}
|
||||
} as unknown as ComfyApp
|
||||
|
||||
vi.spyOn(getWorkflowModule, 'getWorkflowFromHistory')
|
||||
})
|
||||
|
||||
it('should load workflow directly when workflow is in extra_pnginfo', async () => {
|
||||
const task = createHistoryTaskWithWorkflow()
|
||||
|
||||
await task.loadWorkflow(mockApp)
|
||||
|
||||
expect(mockApp.loadGraphData).toHaveBeenCalledWith(mockWorkflow)
|
||||
expect(mockFetchApi).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should fetch workflow from cloud when workflow is missing from history task', async () => {
|
||||
const task = createHistoryTaskWithoutWorkflow()
|
||||
|
||||
// Mock getWorkflowFromHistory to return workflow
|
||||
vi.spyOn(getWorkflowModule, 'getWorkflowFromHistory').mockResolvedValue(
|
||||
mockWorkflow
|
||||
)
|
||||
|
||||
await task.loadWorkflow(mockApp)
|
||||
|
||||
expect(getWorkflowModule.getWorkflowFromHistory).toHaveBeenCalledWith(
|
||||
expect.any(Function),
|
||||
'test-prompt-id'
|
||||
)
|
||||
expect(mockApp.loadGraphData).toHaveBeenCalledWith(mockWorkflow)
|
||||
})
|
||||
|
||||
it('should not load workflow when fetch returns undefined', async () => {
|
||||
const task = createHistoryTaskWithoutWorkflow()
|
||||
|
||||
vi.spyOn(getWorkflowModule, 'getWorkflowFromHistory').mockResolvedValue(
|
||||
undefined
|
||||
)
|
||||
|
||||
await task.loadWorkflow(mockApp)
|
||||
|
||||
expect(getWorkflowModule.getWorkflowFromHistory).toHaveBeenCalled()
|
||||
expect(mockApp.loadGraphData).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should only fetch for history tasks, not running tasks', async () => {
|
||||
const runningTask = new TaskItemImpl(
|
||||
'Running',
|
||||
[
|
||||
0,
|
||||
'test-prompt-id',
|
||||
{},
|
||||
{
|
||||
client_id: 'test-client'
|
||||
},
|
||||
[]
|
||||
],
|
||||
undefined,
|
||||
{}
|
||||
)
|
||||
|
||||
vi.spyOn(getWorkflowModule, 'getWorkflowFromHistory').mockResolvedValue(
|
||||
mockWorkflow
|
||||
)
|
||||
|
||||
await runningTask.loadWorkflow(mockApp)
|
||||
|
||||
expect(getWorkflowModule.getWorkflowFromHistory).not.toHaveBeenCalled()
|
||||
expect(mockApp.loadGraphData).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle fetch errors gracefully by returning undefined', async () => {
|
||||
const task = createHistoryTaskWithoutWorkflow()
|
||||
|
||||
vi.spyOn(getWorkflowModule, 'getWorkflowFromHistory').mockResolvedValue(
|
||||
undefined
|
||||
)
|
||||
|
||||
await task.loadWorkflow(mockApp)
|
||||
|
||||
expect(getWorkflowModule.getWorkflowFromHistory).toHaveBeenCalled()
|
||||
expect(mockApp.loadGraphData).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
773
src/stores/queueStore.test.ts
Normal file
773
src/stores/queueStore.test.ts
Normal file
@@ -0,0 +1,773 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type {
|
||||
HistoryTaskItem,
|
||||
PendingTaskItem,
|
||||
RunningTaskItem,
|
||||
TaskOutput,
|
||||
TaskPrompt,
|
||||
TaskStatus
|
||||
} from '@/schemas/apiSchema'
|
||||
import { api } from '@/scripts/api'
|
||||
import { TaskItemImpl, useQueueStore } from '@/stores/queueStore'
|
||||
|
||||
// Fixture factories
|
||||
const createTaskPrompt = (
|
||||
queueIndex: number,
|
||||
promptId: string,
|
||||
inputs: Record<string, any> = {},
|
||||
extraData: Record<string, any> = {},
|
||||
outputsToExecute: any[] = []
|
||||
): TaskPrompt => [queueIndex, promptId, inputs, extraData, outputsToExecute]
|
||||
|
||||
const createTaskStatus = (
|
||||
statusStr: 'success' | 'error' = 'success',
|
||||
messages: any[] = []
|
||||
): TaskStatus => ({
|
||||
status_str: statusStr,
|
||||
completed: true,
|
||||
messages
|
||||
})
|
||||
|
||||
const createTaskOutput = (
|
||||
nodeId: string = 'node-1',
|
||||
images: any[] = []
|
||||
): TaskOutput => ({
|
||||
[nodeId]: {
|
||||
images
|
||||
}
|
||||
})
|
||||
|
||||
const createRunningTask = (
|
||||
queueIndex: number,
|
||||
promptId: string
|
||||
): RunningTaskItem => ({
|
||||
taskType: 'Running',
|
||||
prompt: createTaskPrompt(queueIndex, promptId),
|
||||
remove: { name: 'Cancel', cb: () => {} }
|
||||
})
|
||||
|
||||
const createPendingTask = (
|
||||
queueIndex: number,
|
||||
promptId: string
|
||||
): PendingTaskItem => ({
|
||||
taskType: 'Pending',
|
||||
prompt: createTaskPrompt(queueIndex, promptId)
|
||||
})
|
||||
|
||||
const createHistoryTask = (
|
||||
queueIndex: number,
|
||||
promptId: string,
|
||||
outputs: TaskOutput = createTaskOutput(),
|
||||
status: TaskStatus = createTaskStatus()
|
||||
): HistoryTaskItem => ({
|
||||
taskType: 'History',
|
||||
prompt: createTaskPrompt(queueIndex, promptId),
|
||||
status,
|
||||
outputs
|
||||
})
|
||||
|
||||
// Mock API
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
getQueue: vi.fn(),
|
||||
getHistory: vi.fn(),
|
||||
clearItems: vi.fn(),
|
||||
deleteItem: vi.fn(),
|
||||
apiURL: vi.fn((path) => `/api${path}`),
|
||||
addEventListener: vi.fn(),
|
||||
removeEventListener: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
describe('TaskItemImpl', () => {
|
||||
it('should remove animated property from outputs during construction', () => {
|
||||
const taskItem = new TaskItemImpl(
|
||||
'History',
|
||||
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
|
||||
{ status_str: 'success', messages: [], completed: true },
|
||||
{
|
||||
'node-1': {
|
||||
images: [{ filename: 'test.png', type: 'output', subfolder: '' }],
|
||||
animated: [false]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
// Check that animated property was removed
|
||||
expect('animated' in taskItem.outputs['node-1']).toBe(false)
|
||||
|
||||
expect(taskItem.outputs['node-1'].images).toBeDefined()
|
||||
expect(taskItem.outputs['node-1'].images?.[0]?.filename).toBe('test.png')
|
||||
})
|
||||
|
||||
it('should handle outputs without animated property', () => {
|
||||
const taskItem = new TaskItemImpl(
|
||||
'History',
|
||||
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
|
||||
{ status_str: 'success', messages: [], completed: true },
|
||||
{
|
||||
'node-1': {
|
||||
images: [{ filename: 'test.png', type: 'output', subfolder: '' }]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
expect(taskItem.outputs['node-1'].images).toBeDefined()
|
||||
expect(taskItem.outputs['node-1'].images?.[0]?.filename).toBe('test.png')
|
||||
})
|
||||
|
||||
it('should recognize webm video from core', () => {
|
||||
const taskItem = new TaskItemImpl(
|
||||
'History',
|
||||
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
|
||||
{ status_str: 'success', messages: [], completed: true },
|
||||
{
|
||||
'node-1': {
|
||||
video: [{ filename: 'test.webm', type: 'output', subfolder: '' }]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const output = taskItem.flatOutputs[0]
|
||||
|
||||
expect(output.htmlVideoType).toBe('video/webm')
|
||||
expect(output.isVideo).toBe(true)
|
||||
expect(output.isWebm).toBe(true)
|
||||
expect(output.isVhsFormat).toBe(false)
|
||||
expect(output.isImage).toBe(false)
|
||||
})
|
||||
|
||||
// https://github.com/Kosinkadink/ComfyUI-VideoHelperSuite/blob/0a75c7958fe320efcb052f1d9f8451fd20c730a8/videohelpersuite/nodes.py#L578-L590
|
||||
it('should recognize webm video from VHS', () => {
|
||||
const taskItem = new TaskItemImpl(
|
||||
'History',
|
||||
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
|
||||
{ status_str: 'success', messages: [], completed: true },
|
||||
{
|
||||
'node-1': {
|
||||
gifs: [
|
||||
{
|
||||
filename: 'test.webm',
|
||||
type: 'output',
|
||||
subfolder: '',
|
||||
format: 'video/webm',
|
||||
frame_rate: 30
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const output = taskItem.flatOutputs[0]
|
||||
|
||||
expect(output.htmlVideoType).toBe('video/webm')
|
||||
expect(output.isVideo).toBe(true)
|
||||
expect(output.isWebm).toBe(true)
|
||||
expect(output.isVhsFormat).toBe(true)
|
||||
expect(output.isImage).toBe(false)
|
||||
})
|
||||
|
||||
it('should recognize mp4 video from core', () => {
|
||||
const taskItem = new TaskItemImpl(
|
||||
'History',
|
||||
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
|
||||
{ status_str: 'success', messages: [], completed: true },
|
||||
{
|
||||
'node-1': {
|
||||
images: [
|
||||
{
|
||||
filename: 'test.mp4',
|
||||
type: 'output',
|
||||
subfolder: ''
|
||||
}
|
||||
],
|
||||
animated: [true]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const output = taskItem.flatOutputs[0]
|
||||
|
||||
expect(output.htmlVideoType).toBe('video/mp4')
|
||||
expect(output.isVideo).toBe(true)
|
||||
expect(output.isImage).toBe(false)
|
||||
})
|
||||
|
||||
describe('audio format detection', () => {
|
||||
const audioFormats = [
|
||||
{ extension: 'mp3', mimeType: 'audio/mpeg' },
|
||||
{ extension: 'wav', mimeType: 'audio/wav' },
|
||||
{ extension: 'ogg', mimeType: 'audio/ogg' },
|
||||
{ extension: 'flac', mimeType: 'audio/flac' }
|
||||
]
|
||||
|
||||
audioFormats.forEach(({ extension, mimeType }) => {
|
||||
it(`should recognize ${extension} audio`, () => {
|
||||
const taskItem = new TaskItemImpl(
|
||||
'History',
|
||||
[0, 'prompt-id', {}, { client_id: 'client-id' }, []],
|
||||
{ status_str: 'success', messages: [], completed: true },
|
||||
{
|
||||
'node-1': {
|
||||
audio: [
|
||||
{
|
||||
filename: `test.${extension}`,
|
||||
type: 'output',
|
||||
subfolder: ''
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const output = taskItem.flatOutputs[0]
|
||||
|
||||
expect(output.htmlAudioType).toBe(mimeType)
|
||||
expect(output.isAudio).toBe(true)
|
||||
expect(output.isVideo).toBe(false)
|
||||
expect(output.isImage).toBe(false)
|
||||
expect(output.supportsPreview).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('useQueueStore', () => {
|
||||
let store: ReturnType<typeof useQueueStore>
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useQueueStore()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
const mockGetQueue = vi.mocked(api.getQueue)
|
||||
const mockGetHistory = vi.mocked(api.getHistory)
|
||||
const mockClearItems = vi.mocked(api.clearItems)
|
||||
const mockDeleteItem = vi.mocked(api.deleteItem)
|
||||
|
||||
describe('initial state', () => {
|
||||
it('should have empty state on initialization', () => {
|
||||
expect(store.runningTasks).toEqual([])
|
||||
expect(store.pendingTasks).toEqual([])
|
||||
expect(store.historyTasks).toEqual([])
|
||||
expect(store.isLoading).toBe(false)
|
||||
expect(store.maxHistoryItems).toBe(64)
|
||||
})
|
||||
|
||||
it('should have empty computed tasks', () => {
|
||||
expect(store.tasks).toEqual([])
|
||||
expect(store.flatTasks).toEqual([])
|
||||
expect(store.hasPendingTasks).toBe(false)
|
||||
expect(store.lastHistoryQueueIndex).toBe(-1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('update() - basic functionality', () => {
|
||||
it('should load running and pending tasks from API', async () => {
|
||||
const runningTask = createRunningTask(1, 'run-1')
|
||||
const pendingTask1 = createPendingTask(2, 'pend-1')
|
||||
const pendingTask2 = createPendingTask(3, 'pend-2')
|
||||
|
||||
mockGetQueue.mockResolvedValue({
|
||||
Running: [runningTask],
|
||||
Pending: [pendingTask1, pendingTask2]
|
||||
})
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.runningTasks).toHaveLength(1)
|
||||
expect(store.pendingTasks).toHaveLength(2)
|
||||
expect(store.runningTasks[0].promptId).toBe('run-1')
|
||||
expect(store.pendingTasks[0].promptId).toBe('pend-2')
|
||||
expect(store.pendingTasks[1].promptId).toBe('pend-1')
|
||||
})
|
||||
|
||||
it('should load history tasks from API', async () => {
|
||||
const historyTask1 = createHistoryTask(5, 'hist-1')
|
||||
const historyTask2 = createHistoryTask(4, 'hist-2')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [historyTask1, historyTask2]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(2)
|
||||
expect(store.historyTasks[0].promptId).toBe('hist-1')
|
||||
expect(store.historyTasks[1].promptId).toBe('hist-2')
|
||||
})
|
||||
|
||||
it('should set loading state correctly', async () => {
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
expect(store.isLoading).toBe(false)
|
||||
|
||||
const updatePromise = store.update()
|
||||
expect(store.isLoading).toBe(true)
|
||||
|
||||
await updatePromise
|
||||
expect(store.isLoading).toBe(false)
|
||||
})
|
||||
|
||||
it('should clear loading state even if API fails', async () => {
|
||||
mockGetQueue.mockRejectedValue(new Error('API error'))
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await expect(store.update()).rejects.toThrow('API error')
|
||||
expect(store.isLoading).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('update() - sorting', () => {
|
||||
it('should sort tasks by queueIndex descending', async () => {
|
||||
const task1 = createHistoryTask(1, 'hist-1')
|
||||
const task2 = createHistoryTask(5, 'hist-2')
|
||||
const task3 = createHistoryTask(3, 'hist-3')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [task1, task2, task3]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks[0].queueIndex).toBe(5)
|
||||
expect(store.historyTasks[1].queueIndex).toBe(3)
|
||||
expect(store.historyTasks[2].queueIndex).toBe(1)
|
||||
})
|
||||
|
||||
it('should sort pending tasks by queueIndex descending', async () => {
|
||||
const pend1 = createPendingTask(10, 'pend-1')
|
||||
const pend2 = createPendingTask(15, 'pend-2')
|
||||
const pend3 = createPendingTask(12, 'pend-3')
|
||||
|
||||
mockGetQueue.mockResolvedValue({
|
||||
Running: [],
|
||||
Pending: [pend1, pend2, pend3]
|
||||
})
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.pendingTasks[0].queueIndex).toBe(15)
|
||||
expect(store.pendingTasks[1].queueIndex).toBe(12)
|
||||
expect(store.pendingTasks[2].queueIndex).toBe(10)
|
||||
})
|
||||
})
|
||||
|
||||
describe('update() - queue index collision (THE BUG FIX)', () => {
|
||||
it('should NOT confuse different prompts with same queueIndex', async () => {
|
||||
const hist1 = createHistoryTask(50, 'prompt-uuid-aaa')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [hist1] })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(1)
|
||||
expect(store.historyTasks[0].promptId).toBe('prompt-uuid-aaa')
|
||||
|
||||
const hist2 = createHistoryTask(51, 'prompt-uuid-bbb')
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist2]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(1)
|
||||
expect(store.historyTasks[0].promptId).toBe('prompt-uuid-bbb')
|
||||
expect(store.historyTasks[0].queueIndex).toBe(51)
|
||||
})
|
||||
|
||||
it('should correctly reconcile when queueIndex is reused', async () => {
|
||||
const hist1 = createHistoryTask(100, 'first-prompt-at-100')
|
||||
const hist2 = createHistoryTask(99, 'prompt-at-99')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [hist1, hist2] })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(2)
|
||||
|
||||
const hist3 = createHistoryTask(101, 'second-prompt-at-101')
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist3, hist2]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(2)
|
||||
const promptIds = store.historyTasks.map((t) => t.promptId)
|
||||
expect(promptIds).toContain('second-prompt-at-101')
|
||||
expect(promptIds).toContain('prompt-at-99')
|
||||
expect(promptIds).not.toContain('first-prompt-at-100')
|
||||
})
|
||||
|
||||
it('should handle multiple queueIndex collisions simultaneously', async () => {
|
||||
const hist1 = createHistoryTask(10, 'old-at-10')
|
||||
const hist2 = createHistoryTask(20, 'old-at-20')
|
||||
const hist3 = createHistoryTask(30, 'keep-at-30')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist3, hist2, hist1]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(3)
|
||||
|
||||
const newHist1 = createHistoryTask(31, 'new-at-31')
|
||||
const newHist2 = createHistoryTask(32, 'new-at-32')
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [newHist2, newHist1, hist3]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(3)
|
||||
const promptIds = store.historyTasks.map((t) => t.promptId)
|
||||
expect(promptIds).toEqual(['new-at-32', 'new-at-31', 'keep-at-30'])
|
||||
})
|
||||
})
|
||||
|
||||
describe('update() - history reconciliation', () => {
|
||||
it('should keep existing items still on server (by promptId)', async () => {
|
||||
const hist1 = createHistoryTask(10, 'existing-1')
|
||||
const hist2 = createHistoryTask(9, 'existing-2')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [hist1, hist2] })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(2)
|
||||
|
||||
const hist3 = createHistoryTask(11, 'new-1')
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist3, hist1, hist2]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(3)
|
||||
expect(store.historyTasks.map((t) => t.promptId)).toContain('existing-1')
|
||||
expect(store.historyTasks.map((t) => t.promptId)).toContain('existing-2')
|
||||
expect(store.historyTasks.map((t) => t.promptId)).toContain('new-1')
|
||||
})
|
||||
|
||||
it('should remove items no longer on server', async () => {
|
||||
const hist1 = createHistoryTask(10, 'remove-me')
|
||||
const hist2 = createHistoryTask(9, 'keep-me')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [hist1, hist2] })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(2)
|
||||
|
||||
mockGetHistory.mockResolvedValue({ History: [hist2] })
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(1)
|
||||
expect(store.historyTasks[0].promptId).toBe('keep-me')
|
||||
})
|
||||
|
||||
it('should add new items from server', async () => {
|
||||
const hist1 = createHistoryTask(5, 'old-1')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [hist1] })
|
||||
|
||||
await store.update()
|
||||
|
||||
const hist2 = createHistoryTask(6, 'new-1')
|
||||
const hist3 = createHistoryTask(7, 'new-2')
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist3, hist2, hist1]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(3)
|
||||
expect(store.historyTasks.map((t) => t.promptId)).toContain('new-1')
|
||||
expect(store.historyTasks.map((t) => t.promptId)).toContain('new-2')
|
||||
})
|
||||
})
|
||||
|
||||
describe('update() - maxHistoryItems limit', () => {
|
||||
it('should enforce maxHistoryItems limit', async () => {
|
||||
store.maxHistoryItems = 3
|
||||
|
||||
const tasks = Array.from({ length: 5 }, (_, i) =>
|
||||
createHistoryTask(10 - i, `hist-${i}`)
|
||||
)
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: tasks })
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(3)
|
||||
expect(store.historyTasks[0].queueIndex).toBe(10)
|
||||
expect(store.historyTasks[1].queueIndex).toBe(9)
|
||||
expect(store.historyTasks[2].queueIndex).toBe(8)
|
||||
})
|
||||
|
||||
it('should respect maxHistoryItems when combining new and existing', async () => {
|
||||
store.maxHistoryItems = 5
|
||||
|
||||
const initial = Array.from({ length: 3 }, (_, i) =>
|
||||
createHistoryTask(10 + i, `existing-${i}`)
|
||||
)
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: initial })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(3)
|
||||
|
||||
const newTasks = Array.from({ length: 4 }, (_, i) =>
|
||||
createHistoryTask(20 + i, `new-${i}`)
|
||||
)
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [...newTasks, ...initial]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(5)
|
||||
expect(store.historyTasks[0].queueIndex).toBe(23)
|
||||
})
|
||||
|
||||
it('should handle maxHistoryItems = 0', async () => {
|
||||
store.maxHistoryItems = 0
|
||||
|
||||
const tasks = [createHistoryTask(10, 'hist-1')]
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: tasks })
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should handle maxHistoryItems = 1', async () => {
|
||||
store.maxHistoryItems = 1
|
||||
|
||||
const tasks = [
|
||||
createHistoryTask(10, 'hist-1'),
|
||||
createHistoryTask(9, 'hist-2')
|
||||
]
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: tasks })
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.historyTasks).toHaveLength(1)
|
||||
expect(store.historyTasks[0].queueIndex).toBe(10)
|
||||
})
|
||||
|
||||
it('should dynamically adjust when maxHistoryItems changes', async () => {
|
||||
store.maxHistoryItems = 10
|
||||
|
||||
const tasks = Array.from({ length: 15 }, (_, i) =>
|
||||
createHistoryTask(20 - i, `hist-${i}`)
|
||||
)
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: tasks })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(10)
|
||||
|
||||
store.maxHistoryItems = 5
|
||||
mockGetHistory.mockResolvedValue({ History: tasks })
|
||||
|
||||
await store.update()
|
||||
expect(store.historyTasks).toHaveLength(5)
|
||||
})
|
||||
})
|
||||
|
||||
describe('computed properties', () => {
|
||||
it('tasks should combine pending, running, and history in correct order', async () => {
|
||||
const running = createRunningTask(5, 'run-1')
|
||||
const pending1 = createPendingTask(6, 'pend-1')
|
||||
const pending2 = createPendingTask(7, 'pend-2')
|
||||
const hist1 = createHistoryTask(3, 'hist-1')
|
||||
const hist2 = createHistoryTask(4, 'hist-2')
|
||||
|
||||
mockGetQueue.mockResolvedValue({
|
||||
Running: [running],
|
||||
Pending: [pending1, pending2]
|
||||
})
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist2, hist1]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
|
||||
expect(store.tasks).toHaveLength(5)
|
||||
expect(store.tasks[0].taskType).toBe('Pending')
|
||||
expect(store.tasks[1].taskType).toBe('Pending')
|
||||
expect(store.tasks[2].taskType).toBe('Running')
|
||||
expect(store.tasks[3].taskType).toBe('History')
|
||||
expect(store.tasks[4].taskType).toBe('History')
|
||||
})
|
||||
|
||||
it('hasPendingTasks should be true when pending tasks exist', async () => {
|
||||
mockGetQueue.mockResolvedValue({
|
||||
Running: [],
|
||||
Pending: [createPendingTask(1, 'pend-1')]
|
||||
})
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.update()
|
||||
expect(store.hasPendingTasks).toBe(true)
|
||||
})
|
||||
|
||||
it('hasPendingTasks should be false when no pending tasks', async () => {
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.update()
|
||||
expect(store.hasPendingTasks).toBe(false)
|
||||
})
|
||||
|
||||
it('lastHistoryQueueIndex should return highest queue index', async () => {
|
||||
const hist1 = createHistoryTask(10, 'hist-1')
|
||||
const hist2 = createHistoryTask(25, 'hist-2')
|
||||
const hist3 = createHistoryTask(15, 'hist-3')
|
||||
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [hist1, hist2, hist3]
|
||||
})
|
||||
|
||||
await store.update()
|
||||
expect(store.lastHistoryQueueIndex).toBe(25)
|
||||
})
|
||||
|
||||
it('lastHistoryQueueIndex should be -1 when no history', async () => {
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.update()
|
||||
expect(store.lastHistoryQueueIndex).toBe(-1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('clear()', () => {
|
||||
beforeEach(async () => {
|
||||
mockGetQueue.mockResolvedValue({
|
||||
Running: [createRunningTask(1, 'run-1')],
|
||||
Pending: [createPendingTask(2, 'pend-1')]
|
||||
})
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [createHistoryTask(3, 'hist-1')]
|
||||
})
|
||||
await store.update()
|
||||
})
|
||||
|
||||
it('should clear both queue and history by default', async () => {
|
||||
mockClearItems.mockResolvedValue(undefined)
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.clear()
|
||||
|
||||
expect(mockClearItems).toHaveBeenCalledTimes(2)
|
||||
expect(mockClearItems).toHaveBeenCalledWith('queue')
|
||||
expect(mockClearItems).toHaveBeenCalledWith('history')
|
||||
expect(store.runningTasks).toHaveLength(0)
|
||||
expect(store.pendingTasks).toHaveLength(0)
|
||||
expect(store.historyTasks).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should clear only queue when specified', async () => {
|
||||
mockClearItems.mockResolvedValue(undefined)
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({
|
||||
History: [createHistoryTask(3, 'hist-1')]
|
||||
})
|
||||
|
||||
await store.clear(['queue'])
|
||||
|
||||
expect(mockClearItems).toHaveBeenCalledTimes(1)
|
||||
expect(mockClearItems).toHaveBeenCalledWith('queue')
|
||||
expect(store.historyTasks).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should clear only history when specified', async () => {
|
||||
mockClearItems.mockResolvedValue(undefined)
|
||||
mockGetQueue.mockResolvedValue({
|
||||
Running: [createRunningTask(1, 'run-1')],
|
||||
Pending: [createPendingTask(2, 'pend-1')]
|
||||
})
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.clear(['history'])
|
||||
|
||||
expect(mockClearItems).toHaveBeenCalledTimes(1)
|
||||
expect(mockClearItems).toHaveBeenCalledWith('history')
|
||||
expect(store.runningTasks).toHaveLength(1)
|
||||
expect(store.pendingTasks).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should do nothing when empty array passed', async () => {
|
||||
await store.clear([])
|
||||
|
||||
expect(mockClearItems).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete()', () => {
|
||||
it('should delete task from queue', async () => {
|
||||
const task = new TaskItemImpl('Pending', createTaskPrompt(1, 'pend-1'))
|
||||
|
||||
mockDeleteItem.mockResolvedValue(undefined)
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.delete(task)
|
||||
|
||||
expect(mockDeleteItem).toHaveBeenCalledWith('queue', 'pend-1')
|
||||
})
|
||||
|
||||
it('should delete task from history', async () => {
|
||||
const task = new TaskItemImpl(
|
||||
'History',
|
||||
createTaskPrompt(1, 'hist-1'),
|
||||
createTaskStatus(),
|
||||
createTaskOutput()
|
||||
)
|
||||
|
||||
mockDeleteItem.mockResolvedValue(undefined)
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.delete(task)
|
||||
|
||||
expect(mockDeleteItem).toHaveBeenCalledWith('history', 'hist-1')
|
||||
})
|
||||
|
||||
it('should refresh store after deletion', async () => {
|
||||
const task = new TaskItemImpl('Pending', createTaskPrompt(1, 'pend-1'))
|
||||
|
||||
mockDeleteItem.mockResolvedValue(undefined)
|
||||
mockGetQueue.mockResolvedValue({ Running: [], Pending: [] })
|
||||
mockGetHistory.mockResolvedValue({ History: [] })
|
||||
|
||||
await store.delete(task)
|
||||
|
||||
expect(mockGetQueue).toHaveBeenCalled()
|
||||
expect(mockGetHistory).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
})
|
||||
287
src/stores/serverConfigStore.test.ts
Normal file
287
src/stores/serverConfigStore.test.ts
Normal file
@@ -0,0 +1,287 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
|
||||
import type { ServerConfig } from '@/constants/serverConfig'
|
||||
import type { FormItem } from '@/platform/settings/types'
|
||||
import { useServerConfigStore } from '@/stores/serverConfigStore'
|
||||
|
||||
const dummyFormItem: FormItem = {
|
||||
name: '',
|
||||
type: 'text'
|
||||
}
|
||||
|
||||
describe('useServerConfigStore', () => {
|
||||
let store: ReturnType<typeof useServerConfigStore>
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useServerConfigStore()
|
||||
})
|
||||
|
||||
it('should initialize with empty configs', () => {
|
||||
expect(store.serverConfigs).toHaveLength(0)
|
||||
expect(Object.keys(store.serverConfigById)).toHaveLength(0)
|
||||
expect(Object.keys(store.serverConfigsByCategory)).toHaveLength(0)
|
||||
expect(Object.keys(store.serverConfigValues)).toHaveLength(0)
|
||||
expect(Object.keys(store.launchArgs)).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should load server configs with default values', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1',
|
||||
category: ['Test']
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config2',
|
||||
defaultValue: 'default2'
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {})
|
||||
|
||||
expect(store.serverConfigs).toHaveLength(2)
|
||||
expect(store.serverConfigById['test.config1'].value).toBe('default1')
|
||||
expect(store.serverConfigById['test.config2'].value).toBe('default2')
|
||||
})
|
||||
|
||||
it('should load server configs with provided values', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1',
|
||||
category: ['Test']
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {
|
||||
'test.config1': 'custom1'
|
||||
})
|
||||
|
||||
expect(store.serverConfigs).toHaveLength(1)
|
||||
expect(store.serverConfigById['test.config1'].value).toBe('custom1')
|
||||
})
|
||||
|
||||
it('should organize configs by category', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1',
|
||||
category: ['Test']
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config2',
|
||||
defaultValue: 'default2',
|
||||
category: ['Other']
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config3',
|
||||
defaultValue: 'default3'
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {})
|
||||
|
||||
expect(Object.keys(store.serverConfigsByCategory)).toHaveLength(3)
|
||||
expect(store.serverConfigsByCategory['Test']).toHaveLength(1)
|
||||
expect(store.serverConfigsByCategory['Other']).toHaveLength(1)
|
||||
expect(store.serverConfigsByCategory['General']).toHaveLength(1)
|
||||
})
|
||||
|
||||
it('should generate server config values excluding defaults', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1'
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config2',
|
||||
defaultValue: 'default2'
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {
|
||||
'test.config1': 'custom1',
|
||||
'test.config2': 'default2'
|
||||
})
|
||||
|
||||
expect(Object.keys(store.serverConfigValues)).toHaveLength(2)
|
||||
expect(store.serverConfigValues['test.config1']).toBe('custom1')
|
||||
expect(store.serverConfigValues['test.config2']).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should generate launch arguments with custom getValue function', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1',
|
||||
getValue: (value: string) => ({ customArg: value })
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config2',
|
||||
defaultValue: 'default2'
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {
|
||||
'test.config1': 'custom1',
|
||||
'test.config2': 'custom2'
|
||||
})
|
||||
|
||||
expect(Object.keys(store.launchArgs)).toHaveLength(2)
|
||||
expect(store.launchArgs['customArg']).toBe('custom1')
|
||||
expect(store.launchArgs['test.config2']).toBe('custom2')
|
||||
})
|
||||
|
||||
it('should not include default values in launch arguments', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1'
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config2',
|
||||
defaultValue: 'default2'
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {
|
||||
'test.config1': 'custom1',
|
||||
'test.config2': 'default2'
|
||||
})
|
||||
|
||||
expect(Object.keys(store.launchArgs)).toHaveLength(1)
|
||||
expect(store.launchArgs['test.config1']).toBe('custom1')
|
||||
expect(store.launchArgs['test.config2']).toBeUndefined()
|
||||
})
|
||||
|
||||
it('should not include nullish values in launch arguments', () => {
|
||||
const configs: ServerConfig<any>[] = [
|
||||
{ ...dummyFormItem, id: 'test.config1', defaultValue: 'default1' },
|
||||
{ ...dummyFormItem, id: 'test.config2', defaultValue: 'default2' },
|
||||
{ ...dummyFormItem, id: 'test.config3', defaultValue: 'default3' },
|
||||
{ ...dummyFormItem, id: 'test.config4', defaultValue: null }
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {
|
||||
'test.config1': undefined,
|
||||
'test.config2': null,
|
||||
'test.config3': '',
|
||||
'test.config4': 0
|
||||
})
|
||||
|
||||
expect(Object.keys(store.launchArgs)).toEqual([
|
||||
'test.config3',
|
||||
'test.config4'
|
||||
])
|
||||
expect(Object.values(store.launchArgs)).toEqual(['', '0'])
|
||||
expect(store.serverConfigById['test.config3'].value).toBe('')
|
||||
expect(store.serverConfigById['test.config4'].value).toBe(0)
|
||||
expect(Object.values(store.serverConfigValues)).toEqual([
|
||||
undefined,
|
||||
undefined,
|
||||
'',
|
||||
0
|
||||
])
|
||||
})
|
||||
|
||||
it('should convert true to empty string in launch arguments', () => {
|
||||
store.loadServerConfig(
|
||||
[
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 0
|
||||
}
|
||||
],
|
||||
{
|
||||
'test.config1': true
|
||||
}
|
||||
)
|
||||
expect(store.launchArgs['test.config1']).toBe('')
|
||||
expect(store.commandLineArgs).toBe('--test.config1')
|
||||
})
|
||||
|
||||
it('should convert number to string in launch arguments', () => {
|
||||
store.loadServerConfig(
|
||||
[
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 1
|
||||
}
|
||||
],
|
||||
{
|
||||
'test.config1': 123
|
||||
}
|
||||
)
|
||||
expect(store.launchArgs['test.config1']).toBe('123')
|
||||
expect(store.commandLineArgs).toBe('--test.config1 123')
|
||||
})
|
||||
|
||||
it('should drop nullish values in launch arguments', () => {
|
||||
store.loadServerConfig(
|
||||
[
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 1
|
||||
}
|
||||
],
|
||||
{
|
||||
'test.config1': null
|
||||
}
|
||||
)
|
||||
expect(Object.keys(store.launchArgs)).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('should track modified configs', () => {
|
||||
const configs = [
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config1',
|
||||
defaultValue: 'default1'
|
||||
},
|
||||
{
|
||||
...dummyFormItem,
|
||||
id: 'test.config2',
|
||||
defaultValue: 'default2'
|
||||
}
|
||||
]
|
||||
|
||||
store.loadServerConfig(configs, {
|
||||
'test.config1': 'initial1'
|
||||
})
|
||||
|
||||
// Initially no modified configs
|
||||
expect(store.modifiedConfigs).toHaveLength(0)
|
||||
|
||||
// Modify config1's value after loading
|
||||
store.serverConfigById['test.config1'].value = 'custom1'
|
||||
|
||||
// Now config1 should be in modified configs
|
||||
expect(store.modifiedConfigs).toHaveLength(1)
|
||||
expect(store.modifiedConfigs[0].id).toBe('test.config1')
|
||||
expect(store.modifiedConfigs[0].value).toBe('custom1')
|
||||
expect(store.modifiedConfigs[0].initialValue).toBe('initial1')
|
||||
|
||||
// Change config1 back to default
|
||||
store.serverConfigById['test.config1'].value = 'initial1'
|
||||
|
||||
// Should go back to no modified configs
|
||||
expect(store.modifiedConfigs).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
184
src/stores/subgraphNavigationStore.test.ts
Normal file
184
src/stores/subgraphNavigationStore.test.ts
Normal file
@@ -0,0 +1,184 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { nextTick } from 'vue'
|
||||
|
||||
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
|
||||
import type { ComfyWorkflow } from '@/platform/workflow/management/stores/workflowStore'
|
||||
import { app } from '@/scripts/app'
|
||||
import { useSubgraphNavigationStore } from '@/stores/subgraphNavigationStore'
|
||||
|
||||
vi.mock('@/scripts/app', () => {
|
||||
const mockCanvas = {
|
||||
subgraph: null,
|
||||
ds: {
|
||||
scale: 1,
|
||||
offset: [0, 0],
|
||||
state: {
|
||||
scale: 1,
|
||||
offset: [0, 0]
|
||||
}
|
||||
},
|
||||
setDirty: vi.fn()
|
||||
}
|
||||
|
||||
return {
|
||||
app: {
|
||||
graph: {
|
||||
_nodes: [],
|
||||
nodes: [],
|
||||
subgraphs: new Map(),
|
||||
getNodeById: vi.fn()
|
||||
},
|
||||
canvas: mockCanvas
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
vi.mock('@/renderer/core/canvas/canvasStore', () => ({
|
||||
useCanvasStore: () => ({
|
||||
getCanvas: () => (app as any).canvas
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/graphTraversalUtil', () => ({
|
||||
findSubgraphPathById: vi.fn()
|
||||
}))
|
||||
|
||||
describe('useSubgraphNavigationStore', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
})
|
||||
|
||||
it('should not clear navigation stack when workflow internal state changes', async () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
// Mock a workflow
|
||||
const mockWorkflow = {
|
||||
path: 'test-workflow.json',
|
||||
filename: 'test-workflow.json',
|
||||
changeTracker: null
|
||||
} as ComfyWorkflow
|
||||
|
||||
// Set the active workflow (cast to bypass TypeScript check in test)
|
||||
workflowStore.activeWorkflow = mockWorkflow as any
|
||||
|
||||
// Simulate being in a subgraph by restoring state
|
||||
navigationStore.restoreState(['subgraph-1', 'subgraph-2'])
|
||||
|
||||
expect(navigationStore.exportState()).toHaveLength(2)
|
||||
|
||||
// Simulate a change to the workflow's internal state
|
||||
// (e.g., changeTracker.activeState being reassigned)
|
||||
mockWorkflow.changeTracker = { activeState: {} } as any
|
||||
|
||||
// The navigation stack should NOT be cleared because the path hasn't changed
|
||||
expect(navigationStore.exportState()).toHaveLength(2)
|
||||
expect(navigationStore.exportState()).toEqual(['subgraph-1', 'subgraph-2'])
|
||||
})
|
||||
|
||||
it('should preserve navigation stack per workflow', async () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
// Mock first workflow
|
||||
const workflow1 = {
|
||||
path: 'workflow1.json',
|
||||
filename: 'workflow1.json',
|
||||
changeTracker: {
|
||||
restore: vi.fn(),
|
||||
store: vi.fn()
|
||||
}
|
||||
} as unknown as ComfyWorkflow
|
||||
|
||||
// Set the active workflow
|
||||
workflowStore.activeWorkflow = workflow1 as any
|
||||
|
||||
// Simulate the restore process that happens when loading a workflow
|
||||
// Since subgraphState is private, we'll simulate the effect by directly restoring navigation
|
||||
navigationStore.restoreState(['subgraph-1', 'subgraph-2'])
|
||||
|
||||
// Verify navigation was set
|
||||
expect(navigationStore.exportState()).toHaveLength(2)
|
||||
expect(navigationStore.exportState()).toEqual(['subgraph-1', 'subgraph-2'])
|
||||
|
||||
// Switch to a different workflow with no subgraph state (root level)
|
||||
const workflow2 = {
|
||||
path: 'workflow2.json',
|
||||
filename: 'workflow2.json',
|
||||
changeTracker: {
|
||||
restore: vi.fn(),
|
||||
store: vi.fn()
|
||||
}
|
||||
} as unknown as ComfyWorkflow
|
||||
|
||||
workflowStore.activeWorkflow = workflow2 as any
|
||||
|
||||
// Simulate the restore process for workflow2
|
||||
// Since subgraphState is private, we'll simulate the effect by directly restoring navigation
|
||||
navigationStore.restoreState([])
|
||||
|
||||
// The navigation stack should be empty for workflow2 (at root level)
|
||||
expect(navigationStore.exportState()).toHaveLength(0)
|
||||
|
||||
// Switch back to workflow1
|
||||
workflowStore.activeWorkflow = workflow1 as any
|
||||
|
||||
// Simulate the restore process for workflow1 again
|
||||
// Since subgraphState is private, we'll simulate the effect by directly restoring navigation
|
||||
navigationStore.restoreState(['subgraph-1', 'subgraph-2'])
|
||||
|
||||
// The navigation stack should be restored for workflow1
|
||||
expect(navigationStore.exportState()).toHaveLength(2)
|
||||
expect(navigationStore.exportState()).toEqual(['subgraph-1', 'subgraph-2'])
|
||||
})
|
||||
|
||||
it('should clear navigation when activeSubgraph becomes undefined', async () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
const { findSubgraphPathById } = await import('@/utils/graphTraversalUtil')
|
||||
|
||||
// Create mock subgraph and graph structure
|
||||
const mockSubgraph = {
|
||||
id: 'subgraph-1',
|
||||
rootGraph: (app as any).graph,
|
||||
_nodes: [],
|
||||
nodes: []
|
||||
}
|
||||
|
||||
// Add the subgraph to the graph's subgraphs map
|
||||
;(app as any).graph.subgraphs.set('subgraph-1', mockSubgraph)
|
||||
|
||||
// First set an active workflow
|
||||
const mockWorkflow = {
|
||||
path: 'test-workflow.json',
|
||||
filename: 'test-workflow.json'
|
||||
} as ComfyWorkflow
|
||||
|
||||
workflowStore.activeWorkflow = mockWorkflow as any
|
||||
|
||||
// Mock findSubgraphPathById to return the correct path
|
||||
vi.mocked(findSubgraphPathById).mockReturnValue(['subgraph-1'])
|
||||
|
||||
// Set canvas.subgraph and trigger update to set activeSubgraph
|
||||
;(app as any).canvas.subgraph = mockSubgraph
|
||||
workflowStore.updateActiveGraph()
|
||||
|
||||
// Wait for Vue's reactivity to process the change
|
||||
await nextTick()
|
||||
|
||||
// Verify navigation was set by the watcher
|
||||
expect(navigationStore.exportState()).toHaveLength(1)
|
||||
expect(navigationStore.exportState()).toEqual(['subgraph-1'])
|
||||
|
||||
// Clear canvas.subgraph and trigger update (simulating navigating back to root)
|
||||
;(app as any).canvas.subgraph = null
|
||||
workflowStore.updateActiveGraph()
|
||||
|
||||
// Wait for Vue's reactivity to process the change
|
||||
await nextTick()
|
||||
|
||||
// Stack should be cleared when activeSubgraph becomes undefined
|
||||
expect(navigationStore.exportState()).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
254
src/stores/subgraphNavigationStore.viewport.test.ts
Normal file
254
src/stores/subgraphNavigationStore.viewport.test.ts
Normal file
@@ -0,0 +1,254 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { nextTick } from 'vue'
|
||||
|
||||
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
|
||||
import type { ComfyWorkflow } from '@/platform/workflow/management/stores/workflowStore'
|
||||
import { app } from '@/scripts/app'
|
||||
import { useSubgraphNavigationStore } from '@/stores/subgraphNavigationStore'
|
||||
|
||||
vi.mock('@/scripts/app', () => {
|
||||
const mockCanvas = {
|
||||
subgraph: null,
|
||||
ds: {
|
||||
scale: 1,
|
||||
offset: [0, 0],
|
||||
state: {
|
||||
scale: 1,
|
||||
offset: [0, 0]
|
||||
}
|
||||
},
|
||||
setDirty: vi.fn()
|
||||
}
|
||||
|
||||
return {
|
||||
app: {
|
||||
graph: {
|
||||
_nodes: [],
|
||||
nodes: [],
|
||||
subgraphs: new Map(),
|
||||
getNodeById: vi.fn()
|
||||
},
|
||||
canvas: mockCanvas
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// Mock canvasStore
|
||||
vi.mock('@/renderer/core/canvas/canvasStore', () => ({
|
||||
useCanvasStore: () => ({
|
||||
getCanvas: () => (app as any).canvas
|
||||
})
|
||||
}))
|
||||
|
||||
// Get reference to mock canvas
|
||||
const mockCanvas = app.canvas as any
|
||||
|
||||
describe('useSubgraphNavigationStore - Viewport Persistence', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
// Reset canvas state
|
||||
mockCanvas.ds.scale = 1
|
||||
mockCanvas.ds.offset = [0, 0]
|
||||
mockCanvas.ds.state.scale = 1
|
||||
mockCanvas.ds.state.offset = [0, 0]
|
||||
mockCanvas.setDirty.mockClear()
|
||||
})
|
||||
|
||||
describe('saveViewport', () => {
|
||||
it('should save viewport state for root graph', () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
|
||||
// Set viewport state
|
||||
mockCanvas.ds.state.scale = 2
|
||||
mockCanvas.ds.state.offset = [100, 200]
|
||||
|
||||
// Save viewport for root
|
||||
navigationStore.saveViewport('root')
|
||||
|
||||
// Check it was saved
|
||||
const saved = navigationStore.viewportCache.get('root')
|
||||
expect(saved).toEqual({
|
||||
scale: 2,
|
||||
offset: [100, 200]
|
||||
})
|
||||
})
|
||||
|
||||
it('should save viewport state for subgraph', () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
|
||||
// Set viewport state
|
||||
mockCanvas.ds.state.scale = 1.5
|
||||
mockCanvas.ds.state.offset = [50, 75]
|
||||
|
||||
// Save viewport for subgraph
|
||||
navigationStore.saveViewport('subgraph-123')
|
||||
|
||||
// Check it was saved
|
||||
const saved = navigationStore.viewportCache.get('subgraph-123')
|
||||
expect(saved).toEqual({
|
||||
scale: 1.5,
|
||||
offset: [50, 75]
|
||||
})
|
||||
})
|
||||
|
||||
it('should save viewport for current context when no ID provided', () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
// Mock being in a subgraph
|
||||
const mockSubgraph = { id: 'sub-456' }
|
||||
workflowStore.activeSubgraph = mockSubgraph as any
|
||||
|
||||
// Set viewport state
|
||||
mockCanvas.ds.state.scale = 3
|
||||
mockCanvas.ds.state.offset = [10, 20]
|
||||
|
||||
// Save viewport without ID (should default to root since activeSubgraph is not tracked by navigation store)
|
||||
navigationStore.saveViewport('sub-456')
|
||||
|
||||
// Should save for the specified subgraph
|
||||
const saved = navigationStore.viewportCache.get('sub-456')
|
||||
expect(saved).toEqual({
|
||||
scale: 3,
|
||||
offset: [10, 20]
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('restoreViewport', () => {
|
||||
it('should restore viewport state for root graph', () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
|
||||
// Save a viewport state
|
||||
navigationStore.viewportCache.set('root', {
|
||||
scale: 2.5,
|
||||
offset: [150, 250]
|
||||
})
|
||||
|
||||
// Restore it
|
||||
navigationStore.restoreViewport('root')
|
||||
|
||||
// Check canvas was updated
|
||||
expect(mockCanvas.ds.scale).toBe(2.5)
|
||||
expect(mockCanvas.ds.offset).toEqual([150, 250])
|
||||
expect(mockCanvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('should restore viewport state for subgraph', () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
|
||||
// Save a viewport state
|
||||
navigationStore.viewportCache.set('sub-789', {
|
||||
scale: 0.75,
|
||||
offset: [-50, -100]
|
||||
})
|
||||
|
||||
// Restore it
|
||||
navigationStore.restoreViewport('sub-789')
|
||||
|
||||
// Check canvas was updated
|
||||
expect(mockCanvas.ds.scale).toBe(0.75)
|
||||
expect(mockCanvas.ds.offset).toEqual([-50, -100])
|
||||
})
|
||||
|
||||
it('should do nothing if no saved viewport exists', () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
|
||||
// Reset canvas
|
||||
mockCanvas.ds.scale = 1
|
||||
mockCanvas.ds.offset = [0, 0]
|
||||
mockCanvas.setDirty.mockClear()
|
||||
|
||||
// Try to restore non-existent viewport
|
||||
navigationStore.restoreViewport('non-existent')
|
||||
|
||||
// Canvas should not change
|
||||
expect(mockCanvas.ds.scale).toBe(1)
|
||||
expect(mockCanvas.ds.offset).toEqual([0, 0])
|
||||
expect(mockCanvas.setDirty).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('navigation integration', () => {
|
||||
it('should save and restore viewport when navigating between subgraphs', async () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
// Create mock subgraph with both _nodes and nodes properties
|
||||
const mockRootGraph = {
|
||||
_nodes: [],
|
||||
nodes: [],
|
||||
subgraphs: new Map(),
|
||||
getNodeById: vi.fn()
|
||||
}
|
||||
const subgraph1 = {
|
||||
id: 'sub1',
|
||||
rootGraph: mockRootGraph,
|
||||
_nodes: [],
|
||||
nodes: []
|
||||
}
|
||||
|
||||
// Start at root with custom viewport
|
||||
mockCanvas.ds.state.scale = 2
|
||||
mockCanvas.ds.state.offset = [100, 100]
|
||||
|
||||
// Navigate to subgraph
|
||||
workflowStore.activeSubgraph = subgraph1 as any
|
||||
await nextTick()
|
||||
|
||||
// Root viewport should have been saved automatically
|
||||
const rootViewport = navigationStore.viewportCache.get('root')
|
||||
expect(rootViewport).toBeDefined()
|
||||
expect(rootViewport?.scale).toBe(2)
|
||||
expect(rootViewport?.offset).toEqual([100, 100])
|
||||
|
||||
// Change viewport in subgraph
|
||||
mockCanvas.ds.state.scale = 0.5
|
||||
mockCanvas.ds.state.offset = [-50, -50]
|
||||
|
||||
// Navigate back to root
|
||||
workflowStore.activeSubgraph = undefined
|
||||
await nextTick()
|
||||
|
||||
// Subgraph viewport should have been saved automatically
|
||||
const sub1Viewport = navigationStore.viewportCache.get('sub1')
|
||||
expect(sub1Viewport).toBeDefined()
|
||||
expect(sub1Viewport?.scale).toBe(0.5)
|
||||
expect(sub1Viewport?.offset).toEqual([-50, -50])
|
||||
|
||||
// Root viewport should be restored automatically
|
||||
expect(mockCanvas.ds.scale).toBe(2)
|
||||
expect(mockCanvas.ds.offset).toEqual([100, 100])
|
||||
})
|
||||
|
||||
it('should preserve viewport cache when switching workflows', async () => {
|
||||
const navigationStore = useSubgraphNavigationStore()
|
||||
const workflowStore = useWorkflowStore()
|
||||
|
||||
// Add some viewport states
|
||||
navigationStore.viewportCache.set('root', { scale: 2, offset: [0, 0] })
|
||||
navigationStore.viewportCache.set('sub1', {
|
||||
scale: 1.5,
|
||||
offset: [10, 10]
|
||||
})
|
||||
|
||||
expect(navigationStore.viewportCache.size).toBe(2)
|
||||
|
||||
// Switch workflows
|
||||
const workflow1 = { path: 'workflow1.json' } as ComfyWorkflow
|
||||
const workflow2 = { path: 'workflow2.json' } as ComfyWorkflow
|
||||
|
||||
workflowStore.activeWorkflow = workflow1 as any
|
||||
await nextTick()
|
||||
|
||||
workflowStore.activeWorkflow = workflow2 as any
|
||||
await nextTick()
|
||||
|
||||
// Cache should be preserved (LRU will manage memory)
|
||||
expect(navigationStore.viewportCache.size).toBe(2)
|
||||
expect(navigationStore.viewportCache.has('root')).toBe(true)
|
||||
expect(navigationStore.viewportCache.has('sub1')).toBe(true)
|
||||
})
|
||||
})
|
||||
})
|
||||
134
src/stores/subgraphStore.test.ts
Normal file
134
src/stores/subgraphStore.test.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { ComfyNodeDef as ComfyNodeDefV1 } from '@/schemas/nodeDefSchema'
|
||||
import { api } from '@/scripts/api'
|
||||
import { app as comfyApp } from '@/scripts/app'
|
||||
import { useLitegraphService } from '@/services/litegraphService'
|
||||
import { useNodeDefStore } from '@/stores/nodeDefStore'
|
||||
import { useSubgraphStore } from '@/stores/subgraphStore'
|
||||
|
||||
import {
|
||||
createTestSubgraph,
|
||||
createTestSubgraphNode
|
||||
} from '@/lib/litegraph/src/subgraph/__fixtures__/subgraphHelpers'
|
||||
|
||||
// Mock telemetry to break circular dependency (telemetry → workflowStore → app → telemetry)
|
||||
vi.mock('@/platform/telemetry', () => ({
|
||||
useTelemetry: () => null
|
||||
}))
|
||||
|
||||
// Add mock for api at the top of the file
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
getUserData: vi.fn(),
|
||||
storeUserData: vi.fn(),
|
||||
listUserDataFullInfo: vi.fn(),
|
||||
apiURL: vi.fn(),
|
||||
addEventListener: vi.fn()
|
||||
}
|
||||
}))
|
||||
vi.mock('@/services/dialogService', () => ({
|
||||
useDialogService: vi.fn(() => ({
|
||||
prompt: () => 'testname',
|
||||
confirm: () => true
|
||||
}))
|
||||
}))
|
||||
vi.mock('@/renderer/core/canvas/canvasStore', () => ({
|
||||
useCanvasStore: vi.fn(() => ({
|
||||
getCanvas: () => comfyApp.canvas
|
||||
}))
|
||||
}))
|
||||
|
||||
// Mock comfyApp globally for the store setup
|
||||
vi.mock('@/scripts/app', () => ({
|
||||
app: {
|
||||
canvas: {
|
||||
_deserializeItems: vi.fn((i) => i),
|
||||
ds: { visible_area: [0, 0, 0, 0] },
|
||||
selected_nodes: null
|
||||
},
|
||||
loadGraphData: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
const mockGraph = {
|
||||
nodes: [{ type: '123' }],
|
||||
definitions: { subgraphs: [{ id: '123' }] }
|
||||
}
|
||||
|
||||
describe('useSubgraphStore', () => {
|
||||
let store: ReturnType<typeof useSubgraphStore>
|
||||
const mockFetch = async (filenames: Record<string, unknown>) => {
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue(
|
||||
Object.keys(filenames).map((filename) => ({
|
||||
path: filename,
|
||||
modified: new Date().getTime(),
|
||||
size: 1 // size !== -1 for remote workflows
|
||||
}))
|
||||
)
|
||||
vi.mocked(api).getUserData = vi.fn(
|
||||
(f) =>
|
||||
({
|
||||
status: 200,
|
||||
text: () => JSON.stringify(filenames[f.slice(10)])
|
||||
}) as any
|
||||
)
|
||||
return await store.fetchSubgraphs()
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useSubgraphStore()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should allow publishing of a subgraph', async () => {
|
||||
//mock canvas to provide a minimal subgraphNode
|
||||
const subgraph = createTestSubgraph()
|
||||
const subgraphNode = createTestSubgraphNode(subgraph)
|
||||
const graph = subgraphNode.graph
|
||||
graph.add(subgraphNode)
|
||||
vi.mocked(comfyApp.canvas).selectedItems = new Set([subgraphNode])
|
||||
vi.mocked(comfyApp.canvas)._serializeItems = vi.fn(() => ({
|
||||
nodes: [subgraphNode.serialize()],
|
||||
subgraphs: [subgraph.serialize() as any]
|
||||
}))
|
||||
//mock saving of file
|
||||
vi.mocked(api.storeUserData).mockResolvedValue({
|
||||
status: 200,
|
||||
json: () =>
|
||||
Promise.resolve({
|
||||
path: 'subgraphs/testname.json',
|
||||
modified: Date.now(),
|
||||
size: 2
|
||||
})
|
||||
} as Response)
|
||||
await mockFetch({ 'testname.json': mockGraph })
|
||||
//Dialogue service already mocked
|
||||
await store.publishSubgraph()
|
||||
expect(api.storeUserData).toHaveBeenCalled()
|
||||
})
|
||||
it('should display published nodes in the node library', async () => {
|
||||
await mockFetch({ 'test.json': mockGraph })
|
||||
expect(
|
||||
useNodeDefStore().nodeDefs.filter(
|
||||
(d) => d.category == 'Subgraph Blueprints'
|
||||
)
|
||||
).toHaveLength(1)
|
||||
})
|
||||
it('should allow subgraphs to be edited', async () => {
|
||||
await mockFetch({ 'test.json': mockGraph })
|
||||
await store.editBlueprint(store.typePrefix + 'test')
|
||||
//check active graph
|
||||
expect(comfyApp.loadGraphData).toHaveBeenCalled()
|
||||
})
|
||||
it('should allow subgraphs to be added to graph', async () => {
|
||||
//mock
|
||||
await mockFetch({ 'test.json': mockGraph })
|
||||
const res = useLitegraphService().addNodeOnGraph({
|
||||
name: 'SubgraphBlueprint.test'
|
||||
} as ComfyNodeDefV1)
|
||||
expect(res).toBeTruthy()
|
||||
})
|
||||
})
|
||||
360
src/stores/systemStatsStore.test.ts
Normal file
360
src/stores/systemStatsStore.test.ts
Normal file
@@ -0,0 +1,360 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { api } from '@/scripts/api'
|
||||
import { useSystemStatsStore } from '@/stores/systemStatsStore'
|
||||
import { isElectron } from '@/utils/envUtil'
|
||||
|
||||
// Mock the API
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
getSystemStats: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
// Mock the envUtil
|
||||
vi.mock('@/utils/envUtil', () => ({
|
||||
isElectron: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/distribution/types', () => ({ isCloud: false }))
|
||||
|
||||
describe('useSystemStatsStore', () => {
|
||||
let store: ReturnType<typeof useSystemStatsStore>
|
||||
|
||||
beforeEach(() => {
|
||||
// Mock API to prevent automatic fetch on store creation
|
||||
vi.mocked(api.getSystemStats).mockResolvedValue(null as any)
|
||||
setActivePinia(createPinia())
|
||||
store = useSystemStatsStore()
|
||||
vi.clearAllMocks()
|
||||
})
|
||||
|
||||
it('should initialize and start fetching immediately', async () => {
|
||||
// useAsyncState with immediate: true starts loading right away
|
||||
// In test environment, the mock resolves immediately so loading might be false already
|
||||
expect(store.systemStats).toBeNull() // Initial value is null
|
||||
expect(store.error).toBeUndefined()
|
||||
|
||||
// Wait for initial fetch to complete
|
||||
await new Promise((resolve) => setTimeout(resolve, 0))
|
||||
expect(store.isInitialized).toBe(true) // Should be initialized after fetch
|
||||
})
|
||||
|
||||
describe('refetchSystemStats', () => {
|
||||
it('should fetch system stats successfully', async () => {
|
||||
const mockStats = {
|
||||
system: {
|
||||
os: 'Windows',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
required_frontend_version: '1.24.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
vi.mocked(api.getSystemStats).mockResolvedValue(mockStats)
|
||||
|
||||
await store.refetchSystemStats()
|
||||
|
||||
expect(store.systemStats).toEqual(mockStats)
|
||||
expect(store.isLoading).toBe(false)
|
||||
expect(store.error).toBeUndefined() // useAsyncState uses undefined for no error
|
||||
expect(store.isInitialized).toBe(true)
|
||||
expect(api.getSystemStats).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should handle API errors', async () => {
|
||||
const error = new Error('API Error')
|
||||
vi.mocked(api.getSystemStats).mockRejectedValue(error)
|
||||
|
||||
await store.refetchSystemStats()
|
||||
|
||||
expect(store.systemStats).toBeNull() // Initial value stays null on error
|
||||
expect(store.isLoading).toBe(false)
|
||||
expect(store.error).toEqual(error) // useAsyncState stores the actual error object
|
||||
})
|
||||
|
||||
it('should handle non-Error objects', async () => {
|
||||
vi.mocked(api.getSystemStats).mockRejectedValue('String error')
|
||||
|
||||
await store.refetchSystemStats()
|
||||
|
||||
expect(store.error).toBe('String error') // useAsyncState stores the actual error
|
||||
})
|
||||
|
||||
it('should set loading state correctly', async () => {
|
||||
let resolvePromise: (value: any) => void = () => {}
|
||||
const promise = new Promise<any>((resolve) => {
|
||||
resolvePromise = resolve
|
||||
})
|
||||
vi.mocked(api.getSystemStats).mockReturnValue(promise)
|
||||
|
||||
const fetchPromise = store.refetchSystemStats()
|
||||
expect(store.isLoading).toBe(true)
|
||||
|
||||
resolvePromise({})
|
||||
await fetchPromise
|
||||
|
||||
expect(store.isLoading).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle system stats updates', async () => {
|
||||
const updatedStats = {
|
||||
system: {
|
||||
os: 'Windows',
|
||||
python_version: '3.11.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.1.0',
|
||||
pytorch_version: '2.1.0',
|
||||
required_frontend_version: '1.25.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 7000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
vi.mocked(api.getSystemStats).mockResolvedValue(updatedStats)
|
||||
|
||||
await store.refetchSystemStats()
|
||||
|
||||
expect(store.systemStats).toEqual(updatedStats)
|
||||
expect(store.isLoading).toBe(false)
|
||||
expect(store.error).toBeUndefined()
|
||||
expect(store.isInitialized).toBe(true)
|
||||
expect(api.getSystemStats).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFormFactor', () => {
|
||||
beforeEach(() => {
|
||||
// Reset systemStats for each test
|
||||
store.systemStats = null
|
||||
})
|
||||
|
||||
it('should return "other" when systemStats is null', () => {
|
||||
expect(store.getFormFactor()).toBe('other')
|
||||
})
|
||||
|
||||
it('should return "other" when os is not available', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
} as any,
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('other')
|
||||
})
|
||||
|
||||
describe('desktop environment (Electron)', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(isElectron).mockReturnValue(true)
|
||||
})
|
||||
|
||||
it('should return "desktop-windows" for Windows desktop', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'Windows 11',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('desktop-windows')
|
||||
})
|
||||
|
||||
it('should return "desktop-mac" for macOS desktop', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'Darwin 22.0.0',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('desktop-mac')
|
||||
})
|
||||
|
||||
it('should return "desktop-mac" for Mac desktop', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'Mac OS X 13.0',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('desktop-mac')
|
||||
})
|
||||
|
||||
it('should return "other" for unknown desktop OS', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'Linux',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('other')
|
||||
})
|
||||
})
|
||||
|
||||
describe('git environment (non-Electron)', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(isElectron).mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('should return "git-windows" for Windows git', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'Windows 11',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('git-windows')
|
||||
})
|
||||
|
||||
it('should return "git-mac" for macOS git', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'Darwin 22.0.0',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('git-mac')
|
||||
})
|
||||
|
||||
it('should return "git-linux" for Linux git', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'linux Ubuntu 22.04',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('git-linux')
|
||||
})
|
||||
|
||||
it('should return "other" for unknown git OS', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'FreeBSD',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('other')
|
||||
})
|
||||
})
|
||||
|
||||
describe('case insensitive OS detection', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(isElectron).mockReturnValue(false)
|
||||
})
|
||||
|
||||
it('should handle uppercase OS names', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'WINDOWS',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('git-windows')
|
||||
})
|
||||
|
||||
it('should handle mixed case OS names', () => {
|
||||
store.systemStats = {
|
||||
system: {
|
||||
os: 'LiNuX',
|
||||
python_version: '3.10.0',
|
||||
embedded_python: false,
|
||||
comfyui_version: '1.0.0',
|
||||
pytorch_version: '2.0.0',
|
||||
argv: [],
|
||||
ram_total: 16000000000,
|
||||
ram_free: 8000000000
|
||||
},
|
||||
devices: []
|
||||
}
|
||||
|
||||
expect(store.getFormFactor()).toBe('git-linux')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
211
src/stores/userFileStore.test.ts
Normal file
211
src/stores/userFileStore.test.ts
Normal file
@@ -0,0 +1,211 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { api } from '@/scripts/api'
|
||||
import { UserFile, useUserFileStore } from '@/stores/userFileStore'
|
||||
|
||||
// Mock the api
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
listUserDataFullInfo: vi.fn(),
|
||||
getUserData: vi.fn(),
|
||||
storeUserData: vi.fn(),
|
||||
deleteUserData: vi.fn(),
|
||||
moveUserData: vi.fn()
|
||||
}
|
||||
}))
|
||||
|
||||
describe('useUserFileStore', () => {
|
||||
let store: ReturnType<typeof useUserFileStore>
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
store = useUserFileStore()
|
||||
vi.resetAllMocks()
|
||||
})
|
||||
|
||||
it('should initialize with empty files', () => {
|
||||
expect(store.userFiles).toHaveLength(0)
|
||||
expect(store.modifiedFiles).toHaveLength(0)
|
||||
expect(store.loadedFiles).toHaveLength(0)
|
||||
})
|
||||
|
||||
describe('syncFiles', () => {
|
||||
it('should add new files', async () => {
|
||||
const mockFiles = [
|
||||
{ path: 'file1.txt', modified: 123, size: 100 },
|
||||
{ path: 'file2.txt', modified: 456, size: 200 }
|
||||
]
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue(mockFiles)
|
||||
|
||||
await store.syncFiles('dir')
|
||||
|
||||
expect(store.userFiles).toHaveLength(2)
|
||||
expect(store.userFiles[0].path).toBe('dir/file1.txt')
|
||||
expect(store.userFiles[1].path).toBe('dir/file2.txt')
|
||||
})
|
||||
|
||||
it('should update existing files', async () => {
|
||||
const initialFile = { path: 'file1.txt', modified: 123, size: 100 }
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue([initialFile])
|
||||
await store.syncFiles('dir')
|
||||
|
||||
const updatedFile = { path: 'file1.txt', modified: 456, size: 200 }
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue([updatedFile])
|
||||
await store.syncFiles('dir')
|
||||
|
||||
expect(store.userFiles).toHaveLength(1)
|
||||
expect(store.userFiles[0].lastModified).toBe(456)
|
||||
expect(store.userFiles[0].size).toBe(200)
|
||||
})
|
||||
|
||||
it('should remove non-existent files', async () => {
|
||||
const initialFiles = [
|
||||
{ path: 'file1.txt', modified: 123, size: 100 },
|
||||
{ path: 'file2.txt', modified: 456, size: 200 }
|
||||
]
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue(initialFiles)
|
||||
await store.syncFiles('dir')
|
||||
|
||||
const updatedFiles = [{ path: 'file1.txt', modified: 123, size: 100 }]
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue(updatedFiles)
|
||||
await store.syncFiles('dir')
|
||||
|
||||
expect(store.userFiles).toHaveLength(1)
|
||||
expect(store.userFiles[0].path).toBe('dir/file1.txt')
|
||||
})
|
||||
|
||||
it('should sync root directory when no directory is specified', async () => {
|
||||
const mockFiles = [{ path: 'file1.txt', modified: 123, size: 100 }]
|
||||
vi.mocked(api.listUserDataFullInfo).mockResolvedValue(mockFiles)
|
||||
|
||||
await store.syncFiles()
|
||||
|
||||
expect(api.listUserDataFullInfo).toHaveBeenCalledWith('')
|
||||
expect(store.userFiles).toHaveLength(1)
|
||||
expect(store.userFiles[0].path).toBe('file1.txt')
|
||||
})
|
||||
})
|
||||
|
||||
describe('UserFile', () => {
|
||||
describe('load', () => {
|
||||
it('should load file content', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
vi.mocked(api.getUserData).mockResolvedValue({
|
||||
status: 200,
|
||||
text: () => Promise.resolve('file content')
|
||||
} as Response)
|
||||
|
||||
await file.load()
|
||||
|
||||
expect(file.content).toBe('file content')
|
||||
expect(file.originalContent).toBe('file content')
|
||||
expect(file.isLoading).toBe(false)
|
||||
expect(file.isLoaded).toBe(true)
|
||||
})
|
||||
|
||||
it('should throw error on failed load', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
vi.mocked(api.getUserData).mockResolvedValue({
|
||||
status: 404,
|
||||
statusText: 'Not Found'
|
||||
} as Response)
|
||||
|
||||
await expect(file.load()).rejects.toThrow(
|
||||
"Failed to load file 'file1.txt': 404 Not Found"
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('save', () => {
|
||||
it('should save modified file', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
file.content = 'modified content'
|
||||
file.originalContent = 'original content'
|
||||
vi.mocked(api.storeUserData).mockResolvedValue({
|
||||
status: 200,
|
||||
json: () => Promise.resolve({ modified: 456, size: 200 })
|
||||
} as Response)
|
||||
|
||||
await file.save()
|
||||
|
||||
expect(api.storeUserData).toHaveBeenCalledWith(
|
||||
'file1.txt',
|
||||
'modified content',
|
||||
{ throwOnError: true, full_info: true, overwrite: true }
|
||||
)
|
||||
expect(file.lastModified).toBe(456)
|
||||
expect(file.size).toBe(200)
|
||||
})
|
||||
|
||||
it('should not save unmodified file', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
file.content = 'content'
|
||||
file.originalContent = 'content'
|
||||
|
||||
await file.save()
|
||||
|
||||
expect(api.storeUserData).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('delete', () => {
|
||||
it('should delete file', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
vi.mocked(api.deleteUserData).mockResolvedValue({
|
||||
status: 204
|
||||
} as Response)
|
||||
|
||||
await file.delete()
|
||||
|
||||
expect(api.deleteUserData).toHaveBeenCalledWith('file1.txt')
|
||||
})
|
||||
})
|
||||
|
||||
describe('rename', () => {
|
||||
it('should rename file', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
vi.mocked(api.moveUserData).mockResolvedValue({
|
||||
status: 200,
|
||||
json: () => Promise.resolve({ modified: 456, size: 200 })
|
||||
} as Response)
|
||||
|
||||
await file.rename('newfile.txt')
|
||||
|
||||
expect(api.moveUserData).toHaveBeenCalledWith(
|
||||
'file1.txt',
|
||||
'newfile.txt'
|
||||
)
|
||||
expect(file.path).toBe('newfile.txt')
|
||||
expect(file.lastModified).toBe(456)
|
||||
expect(file.size).toBe(200)
|
||||
})
|
||||
})
|
||||
|
||||
describe('saveAs', () => {
|
||||
it('should save file with new path', async () => {
|
||||
const file = new UserFile('file1.txt', 123, 100)
|
||||
file.content = 'file content'
|
||||
vi.mocked(api.storeUserData).mockResolvedValue({
|
||||
status: 200,
|
||||
json: () => Promise.resolve({ modified: 456, size: 200 })
|
||||
} as Response)
|
||||
|
||||
const newFile = await file.saveAs('newfile.txt')
|
||||
|
||||
expect(api.storeUserData).toHaveBeenCalledWith(
|
||||
'newfile.txt',
|
||||
'file content',
|
||||
// SaveAs should create a new temporary file, which will mean
|
||||
// overwrite is false
|
||||
{ throwOnError: true, full_info: true, overwrite: false }
|
||||
)
|
||||
expect(newFile).toBeInstanceOf(UserFile)
|
||||
expect(newFile.path).toBe('newfile.txt')
|
||||
expect(newFile.lastModified).toBe(456)
|
||||
expect(newFile.size).toBe(200)
|
||||
expect(newFile.content).toBe('file content')
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
166
src/stores/workspace/bottomPanelStore.test.ts
Normal file
166
src/stores/workspace/bottomPanelStore.test.ts
Normal file
@@ -0,0 +1,166 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { useBottomPanelStore } from '@/stores/workspace/bottomPanelStore'
|
||||
import type { BottomPanelExtension } from '@/types/extensionTypes'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@/composables/bottomPanelTabs/useShortcutsTab', () => ({
|
||||
useShortcutsTab: () => [
|
||||
{
|
||||
id: 'shortcuts-essentials',
|
||||
title: 'Essentials',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'shortcuts'
|
||||
},
|
||||
{
|
||||
id: 'shortcuts-view-controls',
|
||||
title: 'View Controls',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'shortcuts'
|
||||
}
|
||||
]
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/bottomPanelTabs/useTerminalTabs', () => ({
|
||||
useLogsTerminalTab: () => ({
|
||||
id: 'logs',
|
||||
title: 'Logs',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'terminal'
|
||||
}),
|
||||
useCommandTerminalTab: () => ({
|
||||
id: 'command',
|
||||
title: 'Command',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'terminal'
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@/stores/commandStore', () => ({
|
||||
useCommandStore: () => ({
|
||||
registerCommand: vi.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/envUtil', () => ({
|
||||
isElectron: () => false
|
||||
}))
|
||||
|
||||
describe('useBottomPanelStore', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
})
|
||||
|
||||
it('should initialize with empty panels', () => {
|
||||
const store = useBottomPanelStore()
|
||||
|
||||
expect(store.activePanel).toBeNull()
|
||||
expect(store.bottomPanelVisible).toBe(false)
|
||||
expect(store.bottomPanelTabs).toEqual([])
|
||||
expect(store.activeBottomPanelTab).toBeNull()
|
||||
})
|
||||
|
||||
it('should register bottom panel tabs', () => {
|
||||
const store = useBottomPanelStore()
|
||||
const tab: BottomPanelExtension = {
|
||||
id: 'test-tab',
|
||||
title: 'Test Tab',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'terminal'
|
||||
}
|
||||
|
||||
store.registerBottomPanelTab(tab)
|
||||
|
||||
expect(store.panels.terminal.tabs.find((t) => t.id === 'test-tab')).toEqual(
|
||||
tab
|
||||
)
|
||||
expect(store.panels.terminal.activeTabId).toBe('test-tab')
|
||||
})
|
||||
|
||||
it('should toggle panel visibility', () => {
|
||||
const store = useBottomPanelStore()
|
||||
const tab: BottomPanelExtension = {
|
||||
id: 'test-tab',
|
||||
title: 'Test Tab',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'shortcuts'
|
||||
}
|
||||
|
||||
store.registerBottomPanelTab(tab)
|
||||
|
||||
// Panel should be hidden initially
|
||||
expect(store.activePanel).toBeNull()
|
||||
|
||||
// Toggle should show panel
|
||||
store.togglePanel('shortcuts')
|
||||
expect(store.activePanel).toBe('shortcuts')
|
||||
expect(store.bottomPanelVisible).toBe(true)
|
||||
|
||||
// Toggle again should hide panel
|
||||
store.togglePanel('shortcuts')
|
||||
expect(store.activePanel).toBeNull()
|
||||
expect(store.bottomPanelVisible).toBe(false)
|
||||
})
|
||||
|
||||
it('should switch between panel types', () => {
|
||||
const store = useBottomPanelStore()
|
||||
|
||||
const terminalTab: BottomPanelExtension = {
|
||||
id: 'terminal-tab',
|
||||
title: 'Terminal',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'terminal'
|
||||
}
|
||||
|
||||
const shortcutsTab: BottomPanelExtension = {
|
||||
id: 'shortcuts-tab',
|
||||
title: 'Shortcuts',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'shortcuts'
|
||||
}
|
||||
|
||||
store.registerBottomPanelTab(terminalTab)
|
||||
store.registerBottomPanelTab(shortcutsTab)
|
||||
|
||||
// Show terminal panel
|
||||
store.togglePanel('terminal')
|
||||
expect(store.activePanel).toBe('terminal')
|
||||
expect(store.activeBottomPanelTab?.id).toBe('terminal-tab')
|
||||
|
||||
// Switch to shortcuts panel
|
||||
store.togglePanel('shortcuts')
|
||||
expect(store.activePanel).toBe('shortcuts')
|
||||
expect(store.activeBottomPanelTab?.id).toBe('shortcuts-tab')
|
||||
})
|
||||
|
||||
it('should toggle specific tabs', () => {
|
||||
const store = useBottomPanelStore()
|
||||
const tab: BottomPanelExtension = {
|
||||
id: 'specific-tab',
|
||||
title: 'Specific Tab',
|
||||
component: {},
|
||||
type: 'vue',
|
||||
targetPanel: 'shortcuts'
|
||||
}
|
||||
|
||||
store.registerBottomPanelTab(tab)
|
||||
|
||||
// Toggle specific tab should show it
|
||||
store.toggleBottomPanelTab('specific-tab')
|
||||
expect(store.activePanel).toBe('shortcuts')
|
||||
expect(store.panels.shortcuts.activeTabId).toBe('specific-tab')
|
||||
|
||||
// Toggle same tab again should hide panel
|
||||
store.toggleBottomPanelTab('specific-tab')
|
||||
expect(store.activePanel).toBeNull()
|
||||
})
|
||||
})
|
||||
399
src/stores/workspace/nodeHelpStore.test.ts
Normal file
399
src/stores/workspace/nodeHelpStore.test.ts
Normal file
@@ -0,0 +1,399 @@
|
||||
import { flushPromises } from '@vue/test-utils'
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { nextTick } from 'vue'
|
||||
|
||||
import { useNodeHelpStore } from '@/stores/workspace/nodeHelpStore'
|
||||
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
fileURL: vi.fn((url) => url)
|
||||
}
|
||||
}))
|
||||
|
||||
vi.mock('@/i18n', () => ({
|
||||
i18n: {
|
||||
global: {
|
||||
locale: {
|
||||
value: 'en'
|
||||
}
|
||||
}
|
||||
}
|
||||
}))
|
||||
|
||||
vi.mock('@/types/nodeSource', () => ({
|
||||
NodeSourceType: {
|
||||
Core: 'core',
|
||||
CustomNodes: 'custom_nodes'
|
||||
},
|
||||
getNodeSource: vi.fn((pythonModule) => {
|
||||
if (pythonModule?.startsWith('custom_nodes.')) {
|
||||
return { type: 'custom_nodes' }
|
||||
}
|
||||
return { type: 'core' }
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('dompurify', () => ({
|
||||
default: {
|
||||
sanitize: vi.fn((html) => html)
|
||||
}
|
||||
}))
|
||||
|
||||
vi.mock('marked', () => ({
|
||||
marked: {
|
||||
parse: vi.fn((markdown, options) => {
|
||||
if (options?.renderer) {
|
||||
if (markdown.includes('![')) {
|
||||
const matches = markdown.match(/!\[(.*?)\]\((.*?)\)/)
|
||||
if (matches) {
|
||||
const [, text, href] = matches
|
||||
return options.renderer.image({ href, text, title: '' })
|
||||
}
|
||||
}
|
||||
}
|
||||
return `<p>${markdown}</p>`
|
||||
})
|
||||
},
|
||||
Renderer: class Renderer {
|
||||
image = vi.fn(
|
||||
({ href, title, text }) =>
|
||||
`<img src="${href}" alt="${text}"${title ? ` title="${title}"` : ''} />`
|
||||
)
|
||||
link = vi.fn(
|
||||
({ href, title, text }) =>
|
||||
`<a href="${href}"${title ? ` title="${title}"` : ''}>${text}</a>`
|
||||
)
|
||||
}
|
||||
}))
|
||||
|
||||
describe('nodeHelpStore', () => {
|
||||
// Define a mock node for testing
|
||||
const mockCoreNode = {
|
||||
name: 'TestNode',
|
||||
display_name: 'Test Node',
|
||||
description: 'A test node',
|
||||
inputs: {},
|
||||
outputs: [],
|
||||
python_module: 'comfy.test_node'
|
||||
}
|
||||
|
||||
const mockCustomNode = {
|
||||
name: 'CustomNode',
|
||||
display_name: 'Custom Node',
|
||||
description: 'A custom node',
|
||||
inputs: {},
|
||||
outputs: [],
|
||||
python_module: 'custom_nodes.test_module.custom@1.0.0'
|
||||
}
|
||||
|
||||
// Mock fetch responses
|
||||
const mockFetch = vi.fn()
|
||||
global.fetch = mockFetch
|
||||
|
||||
beforeEach(() => {
|
||||
// Setup Pinia
|
||||
setActivePinia(createPinia())
|
||||
mockFetch.mockReset()
|
||||
})
|
||||
|
||||
it('should initialize with empty state', () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
expect(nodeHelpStore.currentHelpNode).toBeNull()
|
||||
expect(nodeHelpStore.isHelpOpen).toBe(false)
|
||||
})
|
||||
|
||||
it('should open help for a node', () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
|
||||
expect(nodeHelpStore.currentHelpNode).toStrictEqual(mockCoreNode)
|
||||
expect(nodeHelpStore.isHelpOpen).toBe(true)
|
||||
})
|
||||
|
||||
it('should close help', () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
expect(nodeHelpStore.isHelpOpen).toBe(true)
|
||||
|
||||
nodeHelpStore.closeHelp()
|
||||
expect(nodeHelpStore.currentHelpNode).toBeNull()
|
||||
expect(nodeHelpStore.isHelpOpen).toBe(false)
|
||||
})
|
||||
|
||||
it('should generate correct baseUrl for core nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await nextTick()
|
||||
|
||||
expect(nodeHelpStore.baseUrl).toBe(`/docs/${mockCoreNode.name}/`)
|
||||
})
|
||||
|
||||
it('should generate correct baseUrl for custom nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await nextTick()
|
||||
|
||||
expect(nodeHelpStore.baseUrl).toBe('/extensions/test_module/docs/')
|
||||
})
|
||||
|
||||
it('should render markdown content correctly', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '# Test Help\nThis is test help content'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await flushPromises()
|
||||
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
'This is test help content'
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle fetch errors and fall back to description', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
statusText: 'Not Found'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await flushPromises()
|
||||
|
||||
expect(nodeHelpStore.error).toBe('Not Found')
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(mockCoreNode.description)
|
||||
})
|
||||
|
||||
it('should include alt attribute for images', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => ''
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain('alt="image"')
|
||||
})
|
||||
|
||||
it('should prefix relative video src in custom nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '<video src="video.mp4"></video>'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
'src="/extensions/test_module/docs/video.mp4"'
|
||||
)
|
||||
})
|
||||
|
||||
it('should prefix relative video src for core nodes with node-specific base URL', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '<video src="video.mp4"></video>'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src="/docs/${mockCoreNode.name}/video.mp4"`
|
||||
)
|
||||
})
|
||||
|
||||
it('should prefix relative source src in custom nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () =>
|
||||
'<video><source src="video.mp4" type="video/mp4" /></video>'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
'src="/extensions/test_module/docs/video.mp4"'
|
||||
)
|
||||
})
|
||||
|
||||
it('should prefix relative source src for core nodes with node-specific base URL', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () =>
|
||||
'<video><source src="video.webm" type="video/webm" /></video>'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src="/docs/${mockCoreNode.name}/video.webm"`
|
||||
)
|
||||
})
|
||||
|
||||
it('should handle loading state', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockImplementationOnce(() => new Promise(() => {})) // Never resolves
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await nextTick()
|
||||
|
||||
expect(nodeHelpStore.isLoading).toBe(true)
|
||||
})
|
||||
|
||||
it('should try fallback URL for custom nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch
|
||||
.mockResolvedValueOnce({
|
||||
ok: false,
|
||||
statusText: 'Not Found'
|
||||
})
|
||||
.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '# Fallback content'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledTimes(2)
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
'/extensions/test_module/docs/CustomNode/en.md'
|
||||
)
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
'/extensions/test_module/docs/CustomNode.md'
|
||||
)
|
||||
})
|
||||
|
||||
it('should prefix relative img src in raw HTML for custom nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '# Test\n<img src="image.png" alt="Test image">'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
'src="/extensions/test_module/docs/image.png"'
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain('alt="Test image"')
|
||||
})
|
||||
|
||||
it('should prefix relative img src in raw HTML for core nodes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '# Test\n<img src="image.png" alt="Test image">'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src="/docs/${mockCoreNode.name}/image.png"`
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain('alt="Test image"')
|
||||
})
|
||||
|
||||
it('should not prefix absolute img src in raw HTML', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => '<img src="/absolute/image.png" alt="Absolute">'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
'src="/absolute/image.png"'
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain('alt="Absolute"')
|
||||
})
|
||||
|
||||
it('should not prefix external img src in raw HTML', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () =>
|
||||
'<img src="https://example.com/image.png" alt="External">'
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCustomNode as any)
|
||||
await flushPromises()
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
'src="https://example.com/image.png"'
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain('alt="External"')
|
||||
})
|
||||
|
||||
it('should handle various quote styles in media src attributes', async () => {
|
||||
const nodeHelpStore = useNodeHelpStore()
|
||||
|
||||
mockFetch.mockResolvedValueOnce({
|
||||
ok: true,
|
||||
text: async () => `# Media Test
|
||||
|
||||
Testing quote styles in properly formed HTML:
|
||||
|
||||
<video src="video1.mp4" controls></video>
|
||||
<video src='video2.mp4' controls></video>
|
||||
<img src="image1.png" alt="Double quotes">
|
||||
<img src='image2.png' alt='Single quotes'>
|
||||
|
||||
<video controls>
|
||||
<source src="video3.mp4" type="video/mp4">
|
||||
<source src='video3.webm' type='video/webm'>
|
||||
</video>
|
||||
|
||||
The MEDIA_SRC_REGEX handles both single and double quotes in img, video and source tags.`
|
||||
})
|
||||
|
||||
nodeHelpStore.openHelp(mockCoreNode as any)
|
||||
await flushPromises()
|
||||
|
||||
// Check that all media elements with different quote styles are prefixed correctly
|
||||
// Double quotes remain as double quotes
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src="/docs/${mockCoreNode.name}/video1.mp4"`
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src="/docs/${mockCoreNode.name}/image1.png"`
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src="/docs/${mockCoreNode.name}/video3.mp4"`
|
||||
)
|
||||
|
||||
// Single quotes remain as single quotes in the output
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src='/docs/${mockCoreNode.name}/video2.mp4'`
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src='/docs/${mockCoreNode.name}/image2.png'`
|
||||
)
|
||||
expect(nodeHelpStore.renderedHelpHtml).toContain(
|
||||
`src='/docs/${mockCoreNode.name}/video3.webm'`
|
||||
)
|
||||
})
|
||||
})
|
||||
137
src/stores/workspace/searchBoxStore.test.ts
Normal file
137
src/stores/workspace/searchBoxStore.test.ts
Normal file
@@ -0,0 +1,137 @@
|
||||
import { createPinia, setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type NodeSearchBoxPopover from '@/components/searchbox/NodeSearchBoxPopover.vue'
|
||||
import type { useSettingStore } from '@/platform/settings/settingStore'
|
||||
import { useSearchBoxStore } from '@/stores/workspace/searchBoxStore'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@vueuse/core', () => ({
|
||||
useMouse: vi.fn(() => ({
|
||||
x: { value: 100 },
|
||||
y: { value: 200 }
|
||||
}))
|
||||
}))
|
||||
|
||||
const mockSettingStore = createMockSettingStore()
|
||||
vi.mock('@/platform/settings/settingStore', () => ({
|
||||
useSettingStore: vi.fn(() => mockSettingStore)
|
||||
}))
|
||||
|
||||
function createMockPopover(): InstanceType<typeof NodeSearchBoxPopover> {
|
||||
return { showSearchBox: vi.fn() } satisfies Partial<
|
||||
InstanceType<typeof NodeSearchBoxPopover>
|
||||
> as unknown as InstanceType<typeof NodeSearchBoxPopover>
|
||||
}
|
||||
|
||||
function createMockSettingStore(): ReturnType<typeof useSettingStore> {
|
||||
return {
|
||||
get: vi.fn()
|
||||
} satisfies Partial<
|
||||
ReturnType<typeof useSettingStore>
|
||||
> as unknown as ReturnType<typeof useSettingStore>
|
||||
}
|
||||
|
||||
describe('useSearchBoxStore', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createPinia())
|
||||
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe('when user has new search box enabled', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(mockSettingStore.get).mockReturnValue('default')
|
||||
})
|
||||
|
||||
it('should show new search box is enabled', () => {
|
||||
const store = useSearchBoxStore()
|
||||
expect(store.newSearchBoxEnabled).toBe(true)
|
||||
})
|
||||
|
||||
it('should toggle search box visibility when user presses shortcut', () => {
|
||||
const store = useSearchBoxStore()
|
||||
|
||||
expect(store.visible).toBe(false)
|
||||
|
||||
store.toggleVisible()
|
||||
expect(store.visible).toBe(true)
|
||||
|
||||
store.toggleVisible()
|
||||
expect(store.visible).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when user has legacy search box enabled', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(mockSettingStore.get).mockReturnValue('legacy')
|
||||
})
|
||||
|
||||
it('should show new search box is disabled', () => {
|
||||
const store = useSearchBoxStore()
|
||||
expect(store.newSearchBoxEnabled).toBe(false)
|
||||
})
|
||||
|
||||
it('should open legacy search box at mouse position when user presses shortcut', () => {
|
||||
const store = useSearchBoxStore()
|
||||
const mockPopover = createMockPopover()
|
||||
store.setPopoverRef(mockPopover)
|
||||
|
||||
expect(vi.mocked(store.visible)).toBe(false)
|
||||
|
||||
store.toggleVisible()
|
||||
|
||||
expect(vi.mocked(store.visible)).toBe(false) // Doesn't become visible in legacy mode.
|
||||
|
||||
expect(vi.mocked(mockPopover.showSearchBox)).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
clientX: 100,
|
||||
clientY: 200
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should do nothing when user presses shortcut but popover is not ready', () => {
|
||||
const store = useSearchBoxStore()
|
||||
store.setPopoverRef(null)
|
||||
|
||||
store.toggleVisible()
|
||||
|
||||
expect(store.visible).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('when user configures popover reference', () => {
|
||||
beforeEach(() => {
|
||||
vi.mocked(mockSettingStore.get).mockReturnValue('legacy')
|
||||
})
|
||||
|
||||
it('should enable legacy search when popover is set', () => {
|
||||
const store = useSearchBoxStore()
|
||||
const mockPopover = createMockPopover()
|
||||
store.setPopoverRef(mockPopover)
|
||||
|
||||
store.toggleVisible()
|
||||
|
||||
expect(vi.mocked(mockPopover.showSearchBox)).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should disable legacy search when popover is cleared', () => {
|
||||
const store = useSearchBoxStore()
|
||||
const mockPopover = createMockPopover()
|
||||
store.setPopoverRef(mockPopover)
|
||||
store.setPopoverRef(null)
|
||||
|
||||
store.toggleVisible()
|
||||
|
||||
expect(vi.mocked(mockPopover.showSearchBox)).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('when user first loads the application', () => {
|
||||
it('should have search box hidden by default', () => {
|
||||
const store = useSearchBoxStore()
|
||||
expect(store.visible).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user