Compare commits

..

8 Commits

Author SHA1 Message Date
Benjamin Lu
699e6995a9 test: remove fixture unit test scaffolding 2026-04-15 21:04:30 -07:00
Benjamin Lu
ea35401536 test: align in-memory jobs limit handling 2026-04-15 20:58:29 -07:00
Benjamin Lu
f257b7136e test: add in-memory jobs backend fixture 2026-04-15 15:13:55 -07:00
Benjamin Lu
0e62ef0cbc test: extract asset api browser fixture 2026-04-15 15:09:40 -07:00
pythongosssss
a8e1fa8bef test: add regression test for WEBP RIFF padding (#8527) (#11267)
## Summary

Add a regression test for #8527 (handle RIFF padding for odd-sized WEBP
chunks). The fix added + (chunk_length % 2) to the chunk-stride
calculation in getWebpMetadata so EXIF chunks following an odd-sized
chunk are still located correctly. There was no existing unit test
covering getWebpMetadata, so without a regression test the fix could
silently break in a future
  refactor. 

## Changes

- **What**: 
- New unit test file src/scripts/pnginfo.test.ts covering
getWebpMetadata's RIFF chunk traversal.
- Helpers build a minimal in-memory WEBP with one VP8 chunk of
configurable length followed by an EXIF chunk encoding workflow:<json>.
- Odd-length case (regression for #8527): without the % 2 padding
adjustment, the parser walks one byte short and returns {}.
- Even-length case: guards against an over-correction that always adds
1.
- Verified RED→GREEN locally.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-11267-test-add-regression-test-for-WEBP-RIFF-padding-8527-3436d73d36508117a66edf3cb108ded0)
by [Unito](https://www.unito.io)
2026-04-15 18:14:49 +00:00
pythongosssss
83ceef8cb3 test: add regression test for non-string serverLogs (#8460) (#11268)
## Summary

Add a regression test for #8460 (handle non-string `serverLogs` in error
report). The fix added `typeof error.serverLogs === 'string' ? ... :
JSON.stringify(...)` in `errorReportUtil.ts` so object-shaped logs no
longer render as `[object Object]`. There was no existing unit test for
`generateErrorReport`, so this regression could silently return.

## Changes

- **What**: New unit test file `src/utils/errorReportUtil.test.ts`
covering `generateErrorReport`'s `serverLogs` rendering.
- String case: verifies plain-string logs still appear verbatim and
`[object Object]` is absent.
- Object case (regression for #8460): verifies object logs are
JSON-stringified instead of coerced to `[object Object]`.
- Verified RED→GREEN locally.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-11268-test-add-regression-test-for-non-string-serverLogs-8460-3436d73d36508195a32fc559ab7ce5bb)
by [Unito](https://www.unito.io)
2026-04-15 18:14:17 +00:00
Christian Byrne
4885ef856c [chore] Update Comfy Registry API types from comfy-api@113318d (#11261)
## Automated API Type Update

This PR updates the Comfy Registry API types from the latest comfy-api
OpenAPI specification.

- API commit: 113318d
- Generated on: 2026-04-15T04:26:33Z

These types are automatically generated using openapi-typescript.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-11261-chore-Update-Comfy-Registry-API-types-from-comfy-api-113318d-3436d73d3650816784d4efd98d6a665a)
by [Unito](https://www.unito.io)

Co-authored-by: bigcat88 <13381981+bigcat88@users.noreply.github.com>
2026-04-15 11:16:10 -07:00
Christian Byrne
873a75d607 test: add unit tests for usePainter composable (#11137)
## Summary

Add 25 behavioral unit tests for `usePainter` composable, bringing
coverage from 0% to ~35% lines / ~57% functions.

## Changes

- **What**: New test file `src/composables/painter/usePainter.test.ts`
covering widget sync, settings persistence, canvas sizing, brush display
scaling, serialization, restore, pointer event guards, and cursor
visibility.

## Review Focus

- Mock patterns: singleton factory mocks for stores, wrapper component
for lifecycle hooks
- Test coverage prioritization: focused on mount-time sync, reactive
watchers, and computed behavior rather than canvas pixel output

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-11137-test-add-unit-tests-for-usePainter-composable-33e6d73d36508147bde7e9c349c743ca)
by [Unito](https://www.unito.io)
2026-04-15 11:13:31 -07:00
19 changed files with 2088 additions and 288 deletions

View File

@@ -30,8 +30,6 @@ import {
} from '@e2e/fixtures/components/SidebarTab'
import { Topbar } from '@e2e/fixtures/components/Topbar'
import { AppModeHelper } from '@e2e/fixtures/helpers/AppModeHelper'
import type { AssetHelper } from '@e2e/fixtures/helpers/AssetHelper'
import { createAssetHelper } from '@e2e/fixtures/helpers/AssetHelper'
import { AssetsHelper } from '@e2e/fixtures/helpers/AssetsHelper'
import { CanvasHelper } from '@e2e/fixtures/helpers/CanvasHelper'
import { ClipboardHelper } from '@e2e/fixtures/helpers/ClipboardHelper'
@@ -179,7 +177,6 @@ export class ComfyPage {
public readonly queuePanel: QueuePanel
public readonly perf: PerformanceHelper
public readonly assets: AssetsHelper
public readonly assetApi: AssetHelper
public readonly modelLibrary: ModelLibraryHelper
public readonly cloudAuth: CloudAuthHelper
public readonly visibleToasts: Locator
@@ -233,7 +230,6 @@ export class ComfyPage {
this.queuePanel = new QueuePanel(page)
this.perf = new PerformanceHelper(page)
this.assets = new AssetsHelper(page)
this.assetApi = createAssetHelper(page)
this.modelLibrary = new ModelLibraryHelper(page)
this.cloudAuth = new CloudAuthHelper(page)
}
@@ -499,7 +495,6 @@ export const comfyPageFixture = base.extend<{
await use(comfyPage)
await comfyPage.assetApi.clearMocks()
if (needsPerf) await comfyPage.perf.dispose()
},
comfyMouse: async ({ comfyPage }, use) => {

View File

@@ -0,0 +1,16 @@
import { test as base } from '@playwright/test'
import type { AssetHelper } from '@e2e/fixtures/helpers/AssetHelper'
import { createAssetHelper } from '@e2e/fixtures/helpers/AssetHelper'
export const assetApiFixture = base.extend<{
assetApi: AssetHelper
}>({
assetApi: async ({ page }, use) => {
const assetApi = createAssetHelper(page)
await use(assetApi)
await assetApi.clearMocks()
}
})

View File

@@ -0,0 +1,239 @@
import type { Page, Route } from '@playwright/test'
import type {
JobDetailResponse,
JobEntry,
JobsListResponse
} from '@comfyorg/ingest-types'
const jobsListRoutePattern = /\/api\/jobs(?:\?.*)?$/
const jobDetailRoutePattern = /\/api\/jobs\/[^/?#]+(?:\?.*)?$/
const historyRoutePattern = /\/api\/history(?:\?.*)?$/
export type SeededJob = {
listItem: JobEntry
detail: JobDetailResponse
}
type JobsListFixtureResponse = Omit<JobsListResponse, 'pagination'> & {
pagination: Omit<JobsListResponse['pagination'], 'limit'> & {
limit: number | null
}
}
function parseLimit(url: URL): { error?: string; limit?: number } {
if (!url.searchParams.has('limit')) {
return {}
}
const value = Number(url.searchParams.get('limit'))
if (!Number.isInteger(value)) {
return { error: 'limit must be an integer' }
}
if (value <= 0) {
return { error: 'limit must be a positive integer' }
}
return { limit: value }
}
function parseOffset(url: URL): number {
const value = Number(url.searchParams.get('offset'))
if (!Number.isInteger(value) || value < 0) {
return 0
}
return value
}
function getExecutionDuration(job: JobEntry): number {
const start = job.execution_start_time ?? 0
const end = job.execution_end_time ?? 0
return end - start
}
function getJobIdFromRequest(route: Route): string | null {
const url = new URL(route.request().url())
const jobId = url.pathname.split('/').at(-1)
return jobId ? decodeURIComponent(jobId) : null
}
export class InMemoryJobsBackend {
private listRouteHandler: ((route: Route) => Promise<void>) | null = null
private detailRouteHandler: ((route: Route) => Promise<void>) | null = null
private historyRouteHandler: ((route: Route) => Promise<void>) | null = null
private seededJobs = new Map<string, SeededJob>()
constructor(private readonly page: Page) {}
async seed(jobs: SeededJob[]): Promise<void> {
this.seededJobs = new Map(
jobs.map((job) => [job.listItem.id, job] satisfies [string, SeededJob])
)
await this.ensureRoutesRegistered()
}
async clear(): Promise<void> {
this.seededJobs.clear()
if (this.listRouteHandler) {
await this.page.unroute(jobsListRoutePattern, this.listRouteHandler)
this.listRouteHandler = null
}
if (this.detailRouteHandler) {
await this.page.unroute(jobDetailRoutePattern, this.detailRouteHandler)
this.detailRouteHandler = null
}
if (this.historyRouteHandler) {
await this.page.unroute(historyRoutePattern, this.historyRouteHandler)
this.historyRouteHandler = null
}
}
private async ensureRoutesRegistered(): Promise<void> {
if (!this.listRouteHandler) {
this.listRouteHandler = async (route: Route) => {
const url = new URL(route.request().url())
const statuses = url.searchParams
.get('status')
?.split(',')
.map((status) => status.trim())
.filter(Boolean)
const workflowId = url.searchParams.get('workflow_id')
const sortBy = url.searchParams.get('sort_by')
const sortOrder = url.searchParams.get('sort_order') === 'asc' ? 1 : -1
let filteredJobs = Array.from(
this.seededJobs.values(),
({ listItem }) => listItem
)
if (statuses?.length) {
filteredJobs = filteredJobs.filter((job) =>
statuses.includes(job.status)
)
}
if (workflowId) {
filteredJobs = filteredJobs.filter(
(job) => job.workflow_id === workflowId
)
}
filteredJobs.sort((left, right) => {
const leftValue =
sortBy === 'execution_duration'
? getExecutionDuration(left)
: left.create_time
const rightValue =
sortBy === 'execution_duration'
? getExecutionDuration(right)
: right.create_time
return (leftValue - rightValue) * sortOrder
})
const offset = parseOffset(url)
const { error: limitError, limit } = parseLimit(url)
if (limitError) {
await route.fulfill({
status: 400,
contentType: 'application/json',
body: JSON.stringify({ error: limitError })
})
return
}
const total = filteredJobs.length
const visibleJobs =
limit === undefined
? filteredJobs.slice(offset)
: filteredJobs.slice(offset, offset + limit)
const response = {
jobs: visibleJobs,
pagination: {
offset,
limit: limit ?? null,
total,
has_more: offset + visibleJobs.length < total
}
} satisfies JobsListFixtureResponse
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify(response)
})
}
await this.page.route(jobsListRoutePattern, this.listRouteHandler)
}
if (!this.detailRouteHandler) {
this.detailRouteHandler = async (route: Route) => {
const jobId = getJobIdFromRequest(route)
const job = jobId ? this.seededJobs.get(jobId) : undefined
if (!job) {
await route.fulfill({
status: 404,
contentType: 'application/json',
body: JSON.stringify({ error: 'Job not found' })
})
return
}
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify(job.detail)
})
}
await this.page.route(jobDetailRoutePattern, this.detailRouteHandler)
}
if (!this.historyRouteHandler) {
this.historyRouteHandler = async (route: Route) => {
const request = route.request()
if (request.method() !== 'POST') {
await route.continue()
return
}
const requestBody = request.postDataJSON() as
| { delete?: string[]; clear?: boolean }
| undefined
if (requestBody?.clear) {
this.seededJobs = new Map(
Array.from(this.seededJobs).filter(([, job]) => {
const status = job.listItem.status
return status === 'pending' || status === 'in_progress'
})
)
}
if (requestBody?.delete?.length) {
for (const jobId of requestBody.delete) {
this.seededJobs.delete(jobId)
}
}
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({})
})
}
await this.page.route(historyRoutePattern, this.historyRouteHandler)
}
}
}

View File

@@ -0,0 +1,50 @@
import type { JobDetailResponse, JobEntry } from '@comfyorg/ingest-types'
import type { SeededJob } from '@e2e/fixtures/helpers/InMemoryJobsBackend'
export function createMockJob(
overrides: Partial<JobEntry> & { id: string }
): JobEntry {
const now = Date.now()
return {
status: 'completed',
create_time: now,
execution_start_time: now,
execution_end_time: now + 5_000,
preview_output: {
filename: `output_${overrides.id}.png`,
subfolder: '',
type: 'output',
nodeId: '1',
mediaType: 'images'
},
outputs_count: 1,
...overrides
}
}
function isTerminalStatus(status: JobEntry['status']) {
return status === 'completed' || status === 'failed' || status === 'cancelled'
}
function createSeededJob(listItem: JobEntry): SeededJob {
const updateTime =
listItem.execution_end_time ??
listItem.execution_start_time ??
listItem.create_time
const detail: JobDetailResponse = {
...listItem,
update_time: updateTime,
...(isTerminalStatus(listItem.status) ? { outputs: {} } : {})
}
return {
listItem,
detail
}
}
export function createSeededJobs(listItems: readonly JobEntry[]): SeededJob[] {
return listItems.map(createSeededJob)
}

View File

@@ -0,0 +1,15 @@
import { test as base } from '@playwright/test'
import { InMemoryJobsBackend } from '@e2e/fixtures/helpers/InMemoryJobsBackend'
export const jobsBackendFixture = base.extend<{
jobsBackend: InMemoryJobsBackend
}>({
jobsBackend: async ({ page }, use) => {
const jobsBackend = new InMemoryJobsBackend(page)
await use(jobsBackend)
await jobsBackend.clear()
}
})

View File

@@ -1,6 +1,7 @@
import { expect } from '@playwright/test'
import { expect, mergeTests } from '@playwright/test'
import { comfyPageFixture as test } from '@e2e/fixtures/ComfyPage'
import { assetApiFixture } from '@e2e/fixtures/assetApiFixture'
import { comfyPageFixture } from '@e2e/fixtures/ComfyPage'
import {
createAssetHelper,
withModels,
@@ -17,6 +18,8 @@ import {
STABLE_OUTPUT
} from '@e2e/fixtures/data/assetFixtures'
const test = mergeTests(comfyPageFixture, assetApiFixture)
test.describe('AssetHelper', () => {
test.describe('operators and configuration', () => {
test('creates helper with models via withModels operator', async ({
@@ -66,8 +69,7 @@ test.describe('AssetHelper', () => {
})
test.describe('mock API routes', () => {
test('GET /assets returns all assets', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('GET /assets returns all assets', async ({ comfyPage, assetApi }) => {
assetApi.configure(
withAsset(STABLE_CHECKPOINT),
withAsset(STABLE_INPUT_IMAGE)
@@ -87,12 +89,12 @@ test.describe('AssetHelper', () => {
expect(data.assets).toHaveLength(2)
expect(data.total).toBe(2)
expect(data.has_more).toBe(false)
await assetApi.clearMocks()
})
test('GET /assets respects pagination params', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('GET /assets respects pagination params', async ({
comfyPage,
assetApi
}) => {
assetApi.configure(
withModels(5),
withPagination({ total: 10, hasMore: true })
@@ -110,12 +112,12 @@ test.describe('AssetHelper', () => {
expect(data.assets).toHaveLength(2)
expect(data.total).toBe(10)
expect(data.has_more).toBe(true)
await assetApi.clearMocks()
})
test('GET /assets filters by include_tags', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('GET /assets filters by include_tags', async ({
comfyPage,
assetApi
}) => {
assetApi.configure(
withAsset(STABLE_CHECKPOINT),
withAsset(STABLE_LORA),
@@ -129,14 +131,12 @@ test.describe('AssetHelper', () => {
const data = body as { assets: Array<{ id: string }> }
expect(data.assets).toHaveLength(1)
expect(data.assets[0].id).toBe(STABLE_CHECKPOINT.id)
await assetApi.clearMocks()
})
test('GET /assets/:id returns single asset or 404', async ({
comfyPage
comfyPage,
assetApi
}) => {
const { assetApi } = comfyPage
assetApi.configure(withAsset(STABLE_CHECKPOINT))
await assetApi.mock()
@@ -151,12 +151,12 @@ test.describe('AssetHelper', () => {
`${comfyPage.url}/api/assets/nonexistent-id`
)
expect(notFound.status).toBe(404)
await assetApi.clearMocks()
})
test('PUT /assets/:id updates asset in store', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('PUT /assets/:id updates asset in store', async ({
comfyPage,
assetApi
}) => {
assetApi.configure(withAsset(STABLE_CHECKPOINT))
await assetApi.mock()
@@ -175,14 +175,12 @@ test.describe('AssetHelper', () => {
expect(assetApi.getAsset(STABLE_CHECKPOINT.id)?.name).toBe(
'renamed.safetensors'
)
await assetApi.clearMocks()
})
test('DELETE /assets/:id removes asset from store', async ({
comfyPage
comfyPage,
assetApi
}) => {
const { assetApi } = comfyPage
assetApi.configure(withAsset(STABLE_CHECKPOINT), withAsset(STABLE_LORA))
await assetApi.mock()
@@ -193,11 +191,12 @@ test.describe('AssetHelper', () => {
expect(status).toBe(204)
expect(assetApi.assetCount).toBe(1)
expect(assetApi.getAsset(STABLE_CHECKPOINT.id)).toBeUndefined()
await assetApi.clearMocks()
})
test('POST /assets returns upload response', async ({ comfyPage }) => {
test('POST /assets returns upload response', async ({
comfyPage,
assetApi
}) => {
const customUpload = {
id: 'custom-upload-001',
name: 'custom.safetensors',
@@ -205,7 +204,6 @@ test.describe('AssetHelper', () => {
created_at: '2025-01-01T00:00:00Z',
created_new: true
}
const { assetApi } = comfyPage
assetApi.configure(withUploadResponse(customUpload))
await assetApi.mock()
@@ -217,14 +215,12 @@ test.describe('AssetHelper', () => {
const data = body as { id: string; name: string }
expect(data.id).toBe('custom-upload-001')
expect(data.name).toBe('custom.safetensors')
await assetApi.clearMocks()
})
test('POST /assets/download returns async download response', async ({
comfyPage
comfyPage,
assetApi
}) => {
const { assetApi } = comfyPage
await assetApi.mock()
const { status, body } = await assetApi.fetch(
@@ -235,14 +231,14 @@ test.describe('AssetHelper', () => {
const data = body as { task_id: string; status: string }
expect(data.task_id).toBe('download-task-001')
expect(data.status).toBe('created')
await assetApi.clearMocks()
})
})
test.describe('mutation tracking', () => {
test('tracks POST, PUT, DELETE mutations', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('tracks POST, PUT, DELETE mutations', async ({
comfyPage,
assetApi
}) => {
assetApi.configure(withAsset(STABLE_CHECKPOINT))
await assetApi.mock()
@@ -265,12 +261,12 @@ test.describe('AssetHelper', () => {
expect(mutations[0].method).toBe('POST')
expect(mutations[1].method).toBe('PUT')
expect(mutations[2].method).toBe('DELETE')
await assetApi.clearMocks()
})
test('GET requests are not tracked as mutations', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('GET requests are not tracked as mutations', async ({
comfyPage,
assetApi
}) => {
assetApi.configure(withAsset(STABLE_CHECKPOINT))
await assetApi.mock()
@@ -280,14 +276,14 @@ test.describe('AssetHelper', () => {
)
expect(assetApi.getMutations()).toHaveLength(0)
await assetApi.clearMocks()
})
})
test.describe('mockError', () => {
test('returns error status for all asset routes', async ({ comfyPage }) => {
const { assetApi } = comfyPage
test('returns error status for all asset routes', async ({
comfyPage,
assetApi
}) => {
await assetApi.mockError(503, 'Service Unavailable')
const { status, body } = await assetApi.fetch(
@@ -296,16 +292,14 @@ test.describe('AssetHelper', () => {
expect(status).toBe(503)
const data = body as { error: string }
expect(data.error).toBe('Service Unavailable')
await assetApi.clearMocks()
})
})
test.describe('clearMocks', () => {
test('resets store, mutations, and unroutes handlers', async ({
comfyPage
comfyPage,
assetApi
}) => {
const { assetApi } = comfyPage
assetApi.configure(withAsset(STABLE_CHECKPOINT))
await assetApi.mock()

View File

@@ -1,13 +1,19 @@
import { expect } from '@playwright/test'
import { expect, mergeTests } from '@playwright/test'
import type { JobEntry } from '@comfyorg/ingest-types'
import { comfyPageFixture as test } from '@e2e/fixtures/ComfyPage'
import { createMockJob } from '@e2e/fixtures/helpers/AssetsHelper'
import { comfyPageFixture } from '@e2e/fixtures/ComfyPage'
import { jobsBackendFixture } from '@e2e/fixtures/jobsBackendFixture'
import {
createMockJob,
createSeededJobs
} from '@e2e/fixtures/helpers/jobFixtures'
import { TestIds } from '@e2e/fixtures/selectors'
import type { RawJobListItem } from '@/platform/remote/comfyui/jobs/jobTypes'
const test = mergeTests(comfyPageFixture, jobsBackendFixture)
const now = Date.now()
const MOCK_JOBS: RawJobListItem[] = [
const MOCK_JOBS: JobEntry[] = [
createMockJob({
id: 'job-completed-1',
status: 'completed',
@@ -35,16 +41,14 @@ const MOCK_JOBS: RawJobListItem[] = [
]
test.describe('Queue overlay', () => {
test.beforeEach(async ({ comfyPage }) => {
await comfyPage.assets.mockOutputHistory(MOCK_JOBS)
await comfyPage.settings.setSetting('Comfy.Queue.QPOV2', false)
test.beforeEach(async ({ comfyPage, jobsBackend }) => {
await jobsBackend.seed(createSeededJobs(MOCK_JOBS))
await comfyPage.setupSettings({
'Comfy.Queue.QPOV2': false
})
await comfyPage.setup()
})
test.afterEach(async ({ comfyPage }) => {
await comfyPage.assets.clearMocks()
})
test('Toggle button opens expanded queue overlay', async ({ comfyPage }) => {
const toggle = comfyPage.page.getByTestId(TestIds.queue.overlayToggle)
await toggle.click()

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,446 @@
import { createTestingPinia } from '@pinia/testing'
import { render } from '@testing-library/vue'
import { setActivePinia } from 'pinia'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { defineComponent, nextTick, ref } from 'vue'
import type { LGraphNode } from '@/lib/litegraph/src/LGraphNode'
import type { IBaseWidget } from '@/lib/litegraph/src/types/widgets'
import { api } from '@/scripts/api'
import { usePainter } from './usePainter'
vi.mock('vue-i18n', () => ({
useI18n: vi.fn(() => ({
t: (key: string, params?: Record<string, unknown>) =>
params ? `${key}:${JSON.stringify(params)}` : key
}))
}))
vi.mock('@vueuse/core', () => ({
useElementSize: vi.fn(() => ({
width: ref(512),
height: ref(512)
}))
}))
vi.mock('@/composables/maskeditor/StrokeProcessor', () => ({
StrokeProcessor: vi.fn(() => ({
addPoint: vi.fn(() => []),
endStroke: vi.fn(() => [])
}))
}))
vi.mock('@/platform/distribution/types', () => ({
isCloud: false
}))
vi.mock('@/platform/updates/common/toastStore', () => {
const store = { addAlert: vi.fn() }
return { useToastStore: () => store }
})
vi.mock('@/stores/nodeOutputStore', () => {
const store = {
getNodeImageUrls: vi.fn(() => undefined),
nodeOutputs: {},
nodePreviewImages: {}
}
return { useNodeOutputStore: () => store }
})
vi.mock('@/scripts/api', () => ({
api: {
apiURL: vi.fn((path: string) => `http://localhost:8188${path}`),
fetchApi: vi.fn()
}
}))
const mockWidgets: IBaseWidget[] = []
const mockProperties: Record<string, unknown> = {}
const mockIsInputConnected = vi.fn(() => false)
const mockGetInputNode = vi.fn(() => null)
vi.mock('@/scripts/app', () => ({
app: {
canvas: {
graph: {
getNodeById: vi.fn(() => ({
get widgets() {
return mockWidgets
},
get properties() {
return mockProperties
},
isInputConnected: mockIsInputConnected,
getInputNode: mockGetInputNode
}))
}
}
}
}))
type PainterResult = ReturnType<typeof usePainter>
function makeWidget(name: string, value: unknown = null): IBaseWidget {
return {
name,
value,
callback: vi.fn(),
serializeValue: undefined
} as unknown as IBaseWidget
}
/**
* Mounts a thin wrapper component so Vue lifecycle hooks fire.
*/
function mountPainter(nodeId = 'test-node', initialModelValue = '') {
let painter!: PainterResult
const canvasEl = ref<HTMLCanvasElement | null>(null)
const cursorEl = ref<HTMLElement | null>(null)
const modelValue = ref(initialModelValue)
const Wrapper = defineComponent({
setup() {
painter = usePainter(nodeId, {
canvasEl,
cursorEl,
modelValue
})
return {}
},
render() {
return null
}
})
render(Wrapper)
return { painter, canvasEl, cursorEl, modelValue }
}
describe('usePainter', () => {
beforeEach(() => {
setActivePinia(createTestingPinia({ stubActions: false }))
vi.resetAllMocks()
mockWidgets.length = 0
for (const key of Object.keys(mockProperties)) {
delete mockProperties[key]
}
mockIsInputConnected.mockReturnValue(false)
mockGetInputNode.mockReturnValue(null)
})
describe('syncCanvasSizeFromWidgets', () => {
it('reads width/height from widget values on initialization', () => {
mockWidgets.push(makeWidget('width', 1024), makeWidget('height', 768))
const { painter } = mountPainter()
expect(painter.canvasWidth.value).toBe(1024)
expect(painter.canvasHeight.value).toBe(768)
})
it('defaults to 512 when widgets are missing', () => {
const { painter } = mountPainter()
expect(painter.canvasWidth.value).toBe(512)
expect(painter.canvasHeight.value).toBe(512)
})
})
describe('restoreSettingsFromProperties', () => {
it('restores tool and brush settings from node properties on init', () => {
mockProperties.painterTool = 'eraser'
mockProperties.painterBrushSize = 42
mockProperties.painterBrushColor = '#ff0000'
mockProperties.painterBrushOpacity = 0.5
mockProperties.painterBrushHardness = 0.8
const { painter } = mountPainter()
expect(painter.tool.value).toBe('eraser')
expect(painter.brushSize.value).toBe(42)
expect(painter.brushColor.value).toBe('#ff0000')
expect(painter.brushOpacity.value).toBe(0.5)
expect(painter.brushHardness.value).toBe(0.8)
})
it('restores backgroundColor from bg_color widget', () => {
mockWidgets.push(makeWidget('bg_color', '#123456'))
const { painter } = mountPainter()
expect(painter.backgroundColor.value).toBe('#123456')
})
it('keeps defaults when no properties are stored', () => {
const { painter } = mountPainter()
expect(painter.tool.value).toBe('brush')
expect(painter.brushSize.value).toBe(20)
expect(painter.brushColor.value).toBe('#ffffff')
expect(painter.brushOpacity.value).toBe(1)
expect(painter.brushHardness.value).toBe(1)
})
})
describe('saveSettingsToProperties', () => {
it('persists tool settings to node properties when they change', async () => {
const { painter } = mountPainter()
painter.tool.value = 'eraser'
painter.brushSize.value = 50
painter.brushColor.value = '#00ff00'
painter.brushOpacity.value = 0.7
painter.brushHardness.value = 0.3
await nextTick()
expect(mockProperties.painterTool).toBe('eraser')
expect(mockProperties.painterBrushSize).toBe(50)
expect(mockProperties.painterBrushColor).toBe('#00ff00')
expect(mockProperties.painterBrushOpacity).toBe(0.7)
expect(mockProperties.painterBrushHardness).toBe(0.3)
})
})
describe('syncCanvasSizeToWidgets', () => {
it('syncs canvas dimensions to widgets when size changes', async () => {
const widthWidget = makeWidget('width', 512)
const heightWidget = makeWidget('height', 512)
mockWidgets.push(widthWidget, heightWidget)
const { painter } = mountPainter()
painter.canvasWidth.value = 800
painter.canvasHeight.value = 600
await nextTick()
expect(widthWidget.value).toBe(800)
expect(heightWidget.value).toBe(600)
expect(widthWidget.callback).toHaveBeenCalledWith(800)
expect(heightWidget.callback).toHaveBeenCalledWith(600)
})
})
describe('syncBackgroundColorToWidget', () => {
it('syncs background color to widget when color changes', async () => {
const bgWidget = makeWidget('bg_color', '#000000')
mockWidgets.push(bgWidget)
const { painter } = mountPainter()
painter.backgroundColor.value = '#ff00ff'
await nextTick()
expect(bgWidget.value).toBe('#ff00ff')
expect(bgWidget.callback).toHaveBeenCalledWith('#ff00ff')
})
})
describe('updateInputImageUrl', () => {
it('sets isImageInputConnected to false when input is not connected', () => {
const { painter } = mountPainter()
expect(painter.isImageInputConnected.value).toBe(false)
expect(painter.inputImageUrl.value).toBeNull()
})
it('sets isImageInputConnected to true when input is connected', () => {
mockIsInputConnected.mockReturnValue(true)
const { painter } = mountPainter()
expect(painter.isImageInputConnected.value).toBe(true)
})
})
describe('handleInputImageLoad', () => {
it('updates canvas size and widgets from loaded image dimensions', () => {
const widthWidget = makeWidget('width', 512)
const heightWidget = makeWidget('height', 512)
mockWidgets.push(widthWidget, heightWidget)
const { painter } = mountPainter()
const fakeEvent = {
target: {
naturalWidth: 1920,
naturalHeight: 1080
}
} as unknown as Event
painter.handleInputImageLoad(fakeEvent)
expect(painter.canvasWidth.value).toBe(1920)
expect(painter.canvasHeight.value).toBe(1080)
expect(widthWidget.value).toBe(1920)
expect(heightWidget.value).toBe(1080)
})
})
describe('cursor visibility', () => {
it('sets cursorVisible to true on pointer enter', () => {
const { painter } = mountPainter()
painter.handlePointerEnter()
expect(painter.cursorVisible.value).toBe(true)
})
it('sets cursorVisible to false on pointer leave', () => {
const { painter } = mountPainter()
painter.handlePointerEnter()
painter.handlePointerLeave()
expect(painter.cursorVisible.value).toBe(false)
})
})
describe('displayBrushSize', () => {
it('scales brush size by canvas display ratio', () => {
const { painter } = mountPainter()
// canvasDisplayWidth=512, canvasWidth=512 → ratio=1
// hardness=1 → effectiveRadius = radius * 1.0
// displayBrushSize = (20/2) * 1.0 * 2 * 1 = 20
expect(painter.displayBrushSize.value).toBe(20)
})
it('increases for soft brush hardness', () => {
const { painter } = mountPainter()
painter.brushHardness.value = 0
// hardness=0 → effectiveRadius = 10 * 1.5 = 15
// displayBrushSize = 15 * 2 * 1 = 30
expect(painter.displayBrushSize.value).toBe(30)
})
})
describe('activeHardness (via displayBrushSize)', () => {
it('returns 1 for eraser regardless of brushHardness', () => {
const { painter } = mountPainter()
painter.brushHardness.value = 0.3
painter.tool.value = 'eraser'
// eraser hardness=1 → displayBrushSize = 10 * 1.0 * 2 = 20
expect(painter.displayBrushSize.value).toBe(20)
})
it('uses brushHardness for brush tool', () => {
const { painter } = mountPainter()
painter.tool.value = 'brush'
painter.brushHardness.value = 0.5
// hardness=0.5 → scale=1.25 → 10*1.25*2 = 25
expect(painter.displayBrushSize.value).toBe(25)
})
})
describe('registerWidgetSerialization', () => {
it('attaches serializeValue to the mask widget on init', () => {
const maskWidget = makeWidget('mask', '')
mockWidgets.push(maskWidget)
mountPainter()
expect(maskWidget.serializeValue).toBeTypeOf('function')
})
})
describe('serializeValue', () => {
it('returns empty string when canvas has no strokes', async () => {
const maskWidget = makeWidget('mask', '')
mockWidgets.push(maskWidget)
mountPainter()
const result = await maskWidget.serializeValue!({} as LGraphNode, 0)
expect(result).toBe('')
})
it('returns existing modelValue when not dirty', async () => {
const maskWidget = makeWidget('mask', '')
mockWidgets.push(maskWidget)
const { modelValue } = mountPainter()
modelValue.value = 'painter/existing.png [temp]'
const result = await maskWidget.serializeValue!({} as LGraphNode, 0)
// isCanvasEmpty() is true (no strokes drawn), so returns ''
expect(result).toBe('')
})
})
describe('restoreCanvas', () => {
it('builds correct URL from modelValue on mount', () => {
const { modelValue } = mountPainter()
// Before mount, set the modelValue
// restoreCanvas is called in onMounted, so we test by observing api.apiURL calls
// With empty modelValue, restoreCanvas exits early
expect(modelValue.value).toBe('')
})
it('calls api.apiURL with parsed filename params when modelValue is set', () => {
vi.mocked(api.apiURL).mockClear()
mountPainter('test-node', 'painter/my-image.png [temp]')
expect(api.apiURL).toHaveBeenCalledWith(
expect.stringContaining('filename=my-image.png')
)
expect(api.apiURL).toHaveBeenCalledWith(
expect.stringContaining('subfolder=painter')
)
expect(api.apiURL).toHaveBeenCalledWith(
expect.stringContaining('type=temp')
)
})
})
describe('handleClear', () => {
it('does not throw when canvas element is null', () => {
const { painter } = mountPainter()
expect(() => painter.handleClear()).not.toThrow()
})
})
describe('handlePointerDown', () => {
it('ignores non-primary button clicks', () => {
const { painter } = mountPainter()
const mockSetPointerCapture = vi.fn()
const event = new PointerEvent('pointerdown', {
button: 2
})
Object.defineProperty(event, 'target', {
value: {
setPointerCapture: mockSetPointerCapture
}
})
painter.handlePointerDown(event)
expect(mockSetPointerCapture).not.toHaveBeenCalled()
})
})
describe('handlePointerUp', () => {
it('ignores non-primary button releases', () => {
const { painter } = mountPainter()
const mockReleasePointerCapture = vi.fn()
const event = {
button: 2,
target: {
releasePointerCapture: mockReleasePointerCapture
}
} as unknown as PointerEvent
painter.handlePointerUp(event)
expect(mockReleasePointerCapture).not.toHaveBeenCalled()
})
})
})

View File

@@ -6,9 +6,10 @@ import { downloadFile } from '@/base/common/downloadUtil'
import Popover from '@/components/ui/Popover.vue'
import Button from '@/components/ui/button/Button.vue'
import { useAppMode } from '@/composables/useAppMode'
import { useMediaAssetActions } from '@/platform/assets/composables/useMediaAssetActions'
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
import type { JobListItem } from '@/platform/remote/comfyui/jobs/jobTypes'
import { extractWorkflowFromAsset } from '@/platform/workflow/utils/workflowExtractionUtil'
import ImagePreview from '@/renderer/extensions/linearMode/ImagePreview.vue'
import LatentPreview from '@/renderer/extensions/linearMode/LatentPreview.vue'
import LinearWelcome from '@/renderer/extensions/linearMode/LinearWelcome.vue'
@@ -18,12 +19,11 @@ import MediaOutputPreview from '@/renderer/extensions/linearMode/MediaOutputPrev
import OutputHistory from '@/renderer/extensions/linearMode/OutputHistory.vue'
import { useOutputHistory } from '@/renderer/extensions/linearMode/useOutputHistory'
import type { OutputSelection } from '@/renderer/extensions/linearMode/linearModeTypes'
import { extractWorkflow } from '@/platform/remote/comfyui/jobs/fetchJobs'
import { api } from '@/scripts/api'
import { app } from '@/scripts/app'
import type { ResultItemImpl } from '@/stores/queueStore'
const { t } = useI18n()
const mediaActions = useMediaAssetActions()
const { isBuilderMode, isArrangeMode } = useAppMode()
const { allOutputs, isWorkflowActive, cancelActiveWorkflowJobs } =
useOutputHistory()
@@ -33,28 +33,28 @@ const { runButtonClick, mobile, typeformWidgetId } = defineProps<{
typeformWidgetId?: string
}>()
const selectedItem = ref<JobListItem>()
const selectedItem = ref<AssetItem>()
const selectedOutput = ref<ResultItemImpl>()
const canShowPreview = ref(true)
const latentPreview = ref<string>()
const showSkeleton = ref(false)
function handleSelection(sel: OutputSelection) {
selectedItem.value = sel.job
selectedItem.value = sel.asset
selectedOutput.value = sel.output
canShowPreview.value = sel.canShowPreview
latentPreview.value = sel.latentPreviewUrl
showSkeleton.value = sel.showSkeleton ?? false
}
function downloadJob(item?: JobListItem) {
function downloadAsset(item?: AssetItem) {
for (const output of allOutputs(item))
downloadFile(output.url, output.filename)
}
async function loadWorkflow(item: JobListItem | undefined) {
async function loadWorkflow(item: AssetItem | undefined) {
if (!item) return
const workflow = await extractWorkflow(item)
const { workflow } = await extractWorkflowFromAsset(item)
if (!workflow) return
if (workflow.id !== app.rootGraph.id) return app.loadGraphData(workflow)
@@ -120,7 +120,7 @@ async function rerun(e: Event) {
label: t('linearMode.downloadAll', {
count: allOutputs(selectedItem).length
}),
command: () => downloadJob(selectedItem)
command: () => downloadAsset(selectedItem)
},
{ separator: true }
]
@@ -128,7 +128,7 @@ async function rerun(e: Event) {
{
icon: 'icon-[lucide--trash-2]',
label: t('linearMode.deleteAllAssets'),
command: () => api.deleteItem('output', selectedItem!.id)
command: () => mediaActions.deleteAssets(selectedItem!)
}
]"
/>

View File

@@ -28,13 +28,12 @@ import OutputPreviewItem from '@/renderer/extensions/linearMode/OutputPreviewIte
import { useOutputHistory } from '@/renderer/extensions/linearMode/useOutputHistory'
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
import { useAppModeStore } from '@/stores/appModeStore'
import { useHistoryStore, useQueueStore } from '@/stores/queueStore'
import { useQueueStore } from '@/stores/queueStore'
import { cn } from '@/utils/tailwindUtil'
const { outputs, allOutputs, selectFirstHistory, mayBeActiveWorkflowPending } =
useOutputHistory()
const { hasOutputs } = storeToRefs(useAppModeStore())
const historyStore = useHistoryStore()
const queueStore = useQueueStore()
const store = useLinearOutputStore()
const workflowStore = useWorkflowStore()
@@ -57,7 +56,7 @@ const hasActiveContent = computed(
)
const visibleHistory = computed(() =>
outputs.value.filter((a) => allOutputs(a).length > 0)
outputs.media.value.filter((a) => allOutputs(a).length > 0)
)
const selectableItems = computed(() => {
@@ -72,7 +71,7 @@ const selectableItems = computed(() => {
itemId: item.id
})
}
for (const asset of outputs.value) {
for (const asset of outputs.media.value) {
const outs = allOutputs(asset)
for (let k = 0; k < outs.length; k++) {
items.push({
@@ -138,11 +137,11 @@ function doEmit() {
}
return
}
const job = outputs.value.find((a) => a.id === sel.assetId)
const output = job ? allOutputs(job)[sel.key] : undefined
const isFirst = outputs.value[0]?.id === sel.assetId
const asset = outputs.media.value.find((a) => a.id === sel.assetId)
const output = asset ? allOutputs(asset)[sel.key] : undefined
const isFirst = outputs.media.value[0]?.id === sel.assetId
emit('updateSelection', {
job,
asset,
output,
canShowPreview: isFirst
})
@@ -171,7 +170,7 @@ watch(
// Keep history selection stable on media changes
watch(
() => outputs.value,
() => outputs.media.value,
(newAssets, oldAssets) => {
if (
newAssets.length === oldAssets.length ||
@@ -220,8 +219,8 @@ watch(
}
)
useInfiniteScroll(outputsRef, historyStore.loadMoreHistory, {
canLoadMore: () => historyStore.hasMoreHistory
useInfiniteScroll(outputsRef, outputs.loadMore, {
canLoadMore: () => outputs.hasMore.value
})
function navigateToAdjacent(direction: 1 | -1) {

View File

@@ -1,4 +1,4 @@
import type { JobListItem } from '@/platform/remote/comfyui/jobs/jobTypes'
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
import type { ResultItemImpl } from '@/stores/queueStore'
export interface InProgressItem {
@@ -10,7 +10,7 @@ export interface InProgressItem {
}
export interface OutputSelection {
job?: JobListItem
asset?: AssetItem
output?: ResultItemImpl
canShowPreview: boolean
latentPreviewUrl?: string

View File

@@ -2,13 +2,13 @@ import { createPinia, setActivePinia } from 'pinia'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { nextTick, ref } from 'vue'
import type { JobListItem } from '@/platform/remote/comfyui/jobs/jobTypes'
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
import type { InProgressItem } from '@/renderer/extensions/linearMode/linearModeTypes'
import { useOutputHistory } from '@/renderer/extensions/linearMode/useOutputHistory'
import { useAppModeStore } from '@/stores/appModeStore'
import { ResultItemImpl } from '@/stores/queueStore'
const mediaRef = ref<JobListItem[]>([])
const mediaRef = ref<AssetItem[]>([])
const pendingResolveRef = ref(new Set<string>())
const inProgressItemsRef = ref<InProgressItem[]>([])
const activeWorkflowInProgressItemsRef = ref<InProgressItem[]>([])
@@ -115,29 +115,25 @@ vi.mock('@/renderer/extensions/linearMode/flattenNodeOutput', () => ({
}
}))
function makeJob(
function makeAsset(
id: string,
jobId: string,
opts?: { allOutputs?: ResultItemImpl[]; outputCount?: number }
): JobListItem {
): AssetItem {
return {
id,
status: 'completed',
create_time: 0,
//name: `${id}.png`,
//tags: [],
output_count: opts?.outputCount,
outputs: opts?.allOutputs && unflatResults(opts.allOutputs),
priority: 0
//preview_url: `/view?filename=${id}.png`,
//user_metadata: {
// jobId,
// nodeId: '1',
// subfolder: '',
// ...(opts?.allOutputs ? { allOutputs: opts.allOutputs } : {}),
// ...(opts?.outputCount !== undefined
// ? { outputCount: opts.outputCount }
// : {})
//}
name: `${id}.png`,
tags: [],
preview_url: `/view?filename=${id}.png`,
user_metadata: {
jobId,
nodeId: '1',
subfolder: '',
...(opts?.allOutputs ? { allOutputs: opts.allOutputs } : {}),
...(opts?.outputCount !== undefined
? { outputCount: opts.outputCount }
: {})
}
}
}
@@ -151,18 +147,6 @@ function makeResult(filename: string, nodeId: string = '1'): ResultItemImpl {
})
}
function unflatResults(results: ResultItemImpl[]) {
const ret: Record<string, Record<string, ResultItemImpl[]>> = {}
for (const result of results) {
ret[result.nodeId] ??= {}
const nodeOutputs = ret[result.nodeId]
nodeOutputs[result.mediaType] ??= []
nodeOutputs[result.mediaType].push(result)
}
return ret
}
describe(useOutputHistory, () => {
beforeEach(() => {
setActivePinia(createPinia())
@@ -188,22 +172,22 @@ describe(useOutputHistory, () => {
['job-1', 'workflows/test.json'],
['job-2', 'workflows/other.json']
])
mediaRef.value = [makeJob('job-1'), makeJob('job-2')]
mediaRef.value = [makeAsset('a1', 'job-1'), makeAsset('a2', 'job-2')]
const { outputs } = useOutputHistory()
expect(outputs.value).toHaveLength(1)
expect(outputs.value[0].id).toBe('a1')
expect(outputs.media.value).toHaveLength(1)
expect(outputs.media.value[0].id).toBe('a1')
})
it('returns empty when no workflow is active', () => {
activeWorkflowPathRef.value = ''
jobIdToPathRef.value = new Map([['job-1', 'workflows/test.json']])
mediaRef.value = [makeJob('job-1')]
mediaRef.value = [makeAsset('a1', 'job-1')]
const { outputs } = useOutputHistory()
expect(outputs.value).toHaveLength(0)
expect(outputs.media.value).toHaveLength(0)
})
it('updates when active workflow changes', async () => {
@@ -211,19 +195,19 @@ describe(useOutputHistory, () => {
['job-1', 'workflows/a.json'],
['job-2', 'workflows/b.json']
])
mediaRef.value = [makeJob('job-1'), makeJob('job-2')]
mediaRef.value = [makeAsset('a1', 'job-1'), makeAsset('a2', 'job-2')]
activeWorkflowPathRef.value = 'workflows/a.json'
const { outputs } = useOutputHistory()
expect(outputs.value).toHaveLength(1)
expect(outputs.value[0].id).toBe('a1')
expect(outputs.media.value).toHaveLength(1)
expect(outputs.media.value[0].id).toBe('a1')
activeWorkflowPathRef.value = 'workflows/b.json'
await nextTick()
expect(outputs.value).toHaveLength(1)
expect(outputs.value[0].id).toBe('a2')
expect(outputs.media.value).toHaveLength(1)
expect(outputs.media.value[0].id).toBe('a2')
})
})
@@ -238,7 +222,7 @@ describe(useOutputHistory, () => {
it('returns outputs from metadata allOutputs when count matches', () => {
useAppModeStore().selectedOutputs.push('1')
const results = [makeResult('a.png'), makeResult('b.png')]
const asset = makeJob('job-1', {
const asset = makeAsset('a1', 'job-1', {
allOutputs: results,
outputCount: 2
})
@@ -258,7 +242,7 @@ describe(useOutputHistory, () => {
makeResult('b.png', '2'),
makeResult('c.png', '3')
]
const asset = makeJob('job-1', {
const asset = makeAsset('a1', 'job-1', {
allOutputs: results,
outputCount: 3
})
@@ -275,7 +259,7 @@ describe(useOutputHistory, () => {
it('returns empty when no output nodes are selected', () => {
const results = [makeResult('a.png', '1'), makeResult('b.png', '2')]
const asset = makeJob('job-1', {
const asset = makeAsset('a1', 'job-1', {
allOutputs: results,
outputCount: 2
})
@@ -288,7 +272,7 @@ describe(useOutputHistory, () => {
it('returns consistent filtered outputs across repeated calls', () => {
const results = [makeResult('a.png', '1'), makeResult('b.png', '2')]
const asset = makeJob('job-1', {
const asset = makeAsset('a1', 'job-1', {
allOutputs: results,
outputCount: 2
})
@@ -322,7 +306,7 @@ describe(useOutputHistory, () => {
output: makeResult('b.png')
}
]
const asset = makeJob('job-1')
const asset = makeAsset('a1', 'job-1')
const { allOutputs } = useOutputHistory()
const outputs = allOutputs(asset)
@@ -345,7 +329,7 @@ describe(useOutputHistory, () => {
}
}
})
const asset = makeJob('job-1')
const asset = makeAsset('a1', 'job-1')
const { allOutputs } = useOutputHistory()
@@ -364,7 +348,7 @@ describe(useOutputHistory, () => {
it('resolves pending jobs when history outputs load', async () => {
useAppModeStore().selectedOutputs.push('1')
const results = [makeResult('a.png')]
const asset = makeJob('job-1', {
const asset = makeAsset('a1', 'job-1', {
allOutputs: results,
outputCount: 1
})
@@ -383,7 +367,7 @@ describe(useOutputHistory, () => {
it('does not select first history when a selection exists', async () => {
useAppModeStore().selectedOutputs.push('1')
const results = [makeResult('a.png')]
const asset = makeJob('job-1', {
const asset = makeAsset('a1', 'job-1', {
allOutputs: results,
outputCount: 1
})
@@ -413,7 +397,7 @@ describe(useOutputHistory, () => {
describe('selectFirstHistory', () => {
it('selects first media item', () => {
jobIdToPathRef.value = new Map([['job-1', 'workflows/test.json']])
mediaRef.value = [makeJob('job-1')]
mediaRef.value = [makeAsset('a1', 'job-1')]
const { selectFirstHistory } = useOutputHistory()
selectFirstHistory()

View File

@@ -1,35 +1,37 @@
import { useAsyncState } from '@vueuse/core'
import type { ComputedRef } from 'vue'
import { computed, watchEffect } from 'vue'
import { computed, ref, watchEffect } from 'vue'
import type { JobListItem } from '@/platform/remote/comfyui/jobs/jobTypes'
import type { IAssetsProvider } from '@/platform/assets/composables/media/IAssetsProvider'
import { useMediaAssets } from '@/platform/assets/composables/media/useMediaAssets'
import { getOutputAssetMetadata } from '@/platform/assets/schemas/assetMetadataSchema'
import type { AssetItem } from '@/platform/assets/schemas/assetSchema'
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
import { flattenNodeOutput } from '@/renderer/extensions/linearMode/flattenNodeOutput'
import { useLinearOutputStore } from '@/renderer/extensions/linearMode/linearOutputStore'
import { getJobDetail } from '@/services/jobOutputCache'
import { api } from '@/scripts/api'
import { useAppModeStore } from '@/stores/appModeStore'
import { useCommandStore } from '@/stores/commandStore'
import { useExecutionStore } from '@/stores/executionStore'
import {
TaskItemImpl,
useHistoryStore,
useQueueStore
} from '@/stores/queueStore'
import { useQueueStore } from '@/stores/queueStore'
import type { ResultItemImpl } from '@/stores/queueStore'
export function useOutputHistory(): {
outputs: ComputedRef<JobListItem[]>
allOutputs: (item?: JobListItem) => readonly ResultItemImpl[]
outputs: IAssetsProvider
allOutputs: (item?: AssetItem) => ResultItemImpl[]
selectFirstHistory: () => void
mayBeActiveWorkflowPending: ComputedRef<boolean>
isWorkflowActive: ComputedRef<boolean>
cancelActiveWorkflowJobs: () => Promise<void>
} {
const backingOutputs = useMediaAssets('output')
void backingOutputs.fetchMediaList()
const linearStore = useLinearOutputStore()
const workflowStore = useWorkflowStore()
const executionStore = useExecutionStore()
const appModeStore = useAppModeStore()
const queueStore = useQueueStore()
const historyStore = useHistoryStore()
function matchesActiveWorkflow(task: { jobId: string | number }): boolean {
const path = workflowStore.activeWorkflow?.path
@@ -58,9 +60,7 @@ export function useOutputHistory(): {
hasActiveWorkflowJobs()
)
function filterByOutputNodes(
items: readonly ResultItemImpl[]
): readonly ResultItemImpl[] {
function filterByOutputNodes(items: ResultItemImpl[]): ResultItemImpl[] {
const nodeIds = appModeStore.selectedOutputs
if (!nodeIds.length) return []
return items.filter((r) =>
@@ -74,45 +74,76 @@ export function useOutputHistory(): {
const pathMap = executionStore.jobIdToSessionWorkflowPath
return historyStore.historyItems.filter(
(item) => pathMap.get(item.id) === path
)
return backingOutputs.media.value.filter((asset) => {
const m = getOutputAssetMetadata(asset?.user_metadata)
return m ? pathMap.get(m.jobId) === path : false
})
})
const outputs: IAssetsProvider = {
...backingOutputs,
media: sessionMedia,
hasMore: ref(false),
isLoadingMore: ref(false),
loadMore: async () => {}
}
const resolvedCache = linearStore.resolvedOutputsCache
const asyncRefs = new Map<
string,
ReturnType<typeof useAsyncState<readonly ResultItemImpl[]>>['state']
ReturnType<typeof useAsyncState<ResultItemImpl[]>>['state']
>()
function allOutputs(item?: JobListItem): readonly ResultItemImpl[] {
function allOutputs(item?: AssetItem): ResultItemImpl[] {
if (!item?.id) return []
const cached = resolvedCache.get(item.id)
if (cached) return filterByOutputNodes(cached)
/*FIXME
const user_metadata = getOutputAssetMetadata(item.user_metadata)
if (!user_metadata) return []
// For recently completed jobs still pending resolve, derive order from
// the in-progress items which are in correct execution order.
if (linearStore.pendingResolve.has(item.Id)) {
if (linearStore.pendingResolve.has(user_metadata.jobId)) {
const ordered = linearStore.inProgressItems
.filter((i) => i.id === item.id && i.output)
.filter((i) => i.jobId === user_metadata.jobId && i.output)
.map((i) => i.output!)
if (ordered.length > 0) {
resolvedCache.set(item.id, ordered)
return filterByOutputNodes(ordered)
}
}*/
}
// Use metadata when all outputs are present. The /jobs list endpoint
// only returns preview_output (single item), so outputCount may exceed
// allOutputs.length for multi-output jobs.
if (
user_metadata.allOutputs?.length &&
(!user_metadata.outputCount ||
user_metadata.outputCount <= user_metadata.allOutputs.length) &&
item.preview_url
) {
const reversed = user_metadata.allOutputs.toReversed()
resolvedCache.set(item.id, reversed)
return filterByOutputNodes(reversed)
}
// Async fallback for multi-output jobs — fetch full /jobs/{id} detail.
// This can be hit if the user executes the job then switches tabs.
const existing = asyncRefs.get(item.id)
if (existing) return filterByOutputNodes(existing.value)
const itemId = item.id
const outputRef = useAsyncState(
new TaskItemImpl(item)
.loadFullOutputs()
.then((item) => item.calculateFlatOutputs()),
getJobDetail(user_metadata.jobId).then((jobDetail) => {
if (!jobDetail?.outputs) return []
const results = Object.entries(jobDetail.outputs)
.flatMap(flattenNodeOutput)
.toReversed()
resolvedCache.set(itemId, results)
return results
}),
[]
).state
asyncRefs.set(item.id, outputRef)
@@ -120,7 +151,7 @@ export function useOutputHistory(): {
}
function selectFirstHistory() {
const first = historyStore.historyItems[0]
const first = outputs.media.value[0]
if (first) {
linearStore.selectAsLatest(`history:${first.id}:0`)
} else {
@@ -132,9 +163,12 @@ export function useOutputHistory(): {
watchEffect(() => {
if (linearStore.pendingResolve.size === 0) return
for (const jobId of linearStore.pendingResolve) {
const job = historyStore.historyItems.find((j) => j.id === jobId)
if (!job) continue
const loaded = allOutputs(job).length > 0
const asset = outputs.media.value.find((a) => {
const m = getOutputAssetMetadata(a?.user_metadata)
return m?.jobId === jobId
})
if (!asset) continue
const loaded = allOutputs(asset).length > 0
if (loaded) {
linearStore.resolveIfReady(jobId, true)
if (!linearStore.selectedId) selectFirstHistory()
@@ -160,7 +194,7 @@ export function useOutputHistory(): {
}
return {
outputs: sessionMedia,
outputs,
allOutputs,
selectFirstHistory,
mayBeActiveWorkflowPending,

View File

@@ -0,0 +1,67 @@
import { describe, expect, it } from 'vitest'
import { getWebpMetadata } from './pnginfo'
function buildExifPayload(workflowJson: string): Uint8Array {
const fullStr = `workflow:${workflowJson}\0`
const strBytes = new TextEncoder().encode(fullStr)
const headerSize = 22
const buf = new Uint8Array(headerSize + strBytes.length)
const dv = new DataView(buf.buffer)
buf.set([0x49, 0x49], 0)
dv.setUint16(2, 0x002a, true)
dv.setUint32(4, 8, true)
dv.setUint16(8, 1, true)
dv.setUint16(10, 0, true)
dv.setUint16(12, 2, true)
dv.setUint32(14, strBytes.length, true)
dv.setUint32(18, 22, true)
buf.set(strBytes, 22)
return buf
}
function buildWebp(precedingChunkLength: number, workflowJson: string): File {
const exifPayload = buildExifPayload(workflowJson)
const precedingPadded = precedingChunkLength + (precedingChunkLength % 2)
const totalSize = 12 + (8 + precedingPadded) + (8 + exifPayload.length)
const buffer = new Uint8Array(totalSize)
const dv = new DataView(buffer.buffer)
buffer.set([0x52, 0x49, 0x46, 0x46], 0)
dv.setUint32(4, totalSize - 8, true)
buffer.set([0x57, 0x45, 0x42, 0x50], 8)
buffer.set([0x56, 0x50, 0x38, 0x20], 12)
dv.setUint32(16, precedingChunkLength, true)
const exifStart = 20 + precedingPadded
buffer.set([0x45, 0x58, 0x49, 0x46], exifStart)
dv.setUint32(exifStart + 4, exifPayload.length, true)
buffer.set(exifPayload, exifStart + 8)
return new File([buffer], 'test.webp', { type: 'image/webp' })
}
describe('getWebpMetadata', () => {
it('finds workflow when a preceding chunk has odd length (RIFF padding)', async () => {
const workflow = '{"nodes":[]}'
const file = buildWebp(3, workflow)
const metadata = await getWebpMetadata(file)
expect(metadata.workflow).toBe(workflow)
})
it('finds workflow when preceding chunk has even length (no padding)', async () => {
const workflow = '{"nodes":[1]}'
const file = buildWebp(4, workflow)
const metadata = await getWebpMetadata(file)
expect(metadata.workflow).toBe(workflow)
})
})

View File

@@ -1,6 +1,6 @@
import { useAsyncState, whenever } from '@vueuse/core'
import { difference } from 'es-toolkit'
import { defineStore, storeToRefs } from 'pinia'
import { defineStore } from 'pinia'
import { computed, reactive, ref, shallowReactive } from 'vue'
import {
mapInputFileToAssetItem,
@@ -13,7 +13,7 @@ import { isCloud } from '@/platform/distribution/types'
import type { JobListItem } from '@/platform/remote/comfyui/jobs/jobTypes'
import { api } from '@/scripts/api'
import { TaskItemImpl, useHistoryStore } from './queueStore'
import { TaskItemImpl } from './queueStore'
import { useAssetDownloadStore } from './assetDownloadStore'
import { useModelToNodeStore } from './modelToNodeStore'
@@ -84,16 +84,12 @@ function mapHistoryToAssets(historyItems: JobListItem[]): AssetItem[] {
)
}
const BATCH_SIZE = 200
const MAX_HISTORY_ITEMS = 1000 // Maximum items to keep in memory
export const useAssetsStore = defineStore('assets', () => {
const assetDownloadStore = useAssetDownloadStore()
const modelToNodeStore = useModelToNodeStore()
const historyStore = useHistoryStore()
const { isLoadingMore, hasMoreHistory, historyError } =
storeToRefs(historyStore)
const historyAssets = computed(() =>
mapHistoryToAssets(historyStore.historyItems)
)
// Track assets currently being deleted (for loading overlay)
const deletingAssetIds = shallowReactive(new Set<string>())
@@ -110,6 +106,15 @@ export const useAssetsStore = defineStore('assets', () => {
return deletingAssetIds.has(assetId)
}
// Pagination state
const historyOffset = ref(0)
const hasMoreHistory = ref(true)
const isLoadingMore = ref(false)
const allHistoryItems = ref<AssetItem[]>([])
const loadedIds = shallowReactive(new Set<string>())
const fetchInputFiles = isCloud
? fetchInputFilesFromCloud
: fetchInputFilesFromAPI
@@ -127,6 +132,120 @@ export const useAssetsStore = defineStore('assets', () => {
}
})
/**
* Fetch history assets with pagination support
* @param loadMore - true for pagination (append), false for initial load (replace)
*/
const fetchHistoryAssets = async (loadMore = false): Promise<AssetItem[]> => {
// Reset state for initial load
if (!loadMore) {
historyOffset.value = 0
hasMoreHistory.value = true
allHistoryItems.value = []
loadedIds.clear()
}
// Fetch from server with offset
const history = await api.getHistory(BATCH_SIZE, {
offset: historyOffset.value
})
// Convert JobListItems to AssetItems
const newAssets = mapHistoryToAssets(history)
if (loadMore) {
// Filter out duplicates and insert in sorted order
for (const asset of newAssets) {
if (loadedIds.has(asset.id)) {
continue // Skip duplicates
}
loadedIds.add(asset.id)
// Find insertion index to maintain sorted order (newest first)
const assetTime = new Date(asset.created_at ?? 0).getTime()
const insertIndex = allHistoryItems.value.findIndex(
(item) => new Date(item.created_at ?? 0).getTime() < assetTime
)
if (insertIndex === -1) {
// Asset is oldest, append to end
allHistoryItems.value.push(asset)
} else {
// Insert at the correct position
allHistoryItems.value.splice(insertIndex, 0, asset)
}
}
} else {
// Initial load: replace all
allHistoryItems.value = newAssets
newAssets.forEach((asset) => loadedIds.add(asset.id))
}
// Update pagination state
historyOffset.value += BATCH_SIZE
hasMoreHistory.value = history.length === BATCH_SIZE
if (allHistoryItems.value.length > MAX_HISTORY_ITEMS) {
const removed = allHistoryItems.value.slice(MAX_HISTORY_ITEMS)
allHistoryItems.value = allHistoryItems.value.slice(0, MAX_HISTORY_ITEMS)
// Clean up Set
removed.forEach((item) => loadedIds.delete(item.id))
}
return allHistoryItems.value
}
const historyAssets = ref<AssetItem[]>([])
const historyLoading = ref(false)
const historyError = ref<unknown>(null)
/**
* Initial load of history assets
*/
const updateHistory = async () => {
historyLoading.value = true
historyError.value = null
try {
await fetchHistoryAssets(false)
historyAssets.value = allHistoryItems.value
} catch (err) {
console.error('Error fetching history assets:', err)
historyError.value = err
// Keep existing data when error occurs
if (!historyAssets.value.length) {
historyAssets.value = []
}
} finally {
historyLoading.value = false
}
}
/**
* Load more history items (infinite scroll)
*/
const loadMoreHistory = async () => {
// Guard: prevent concurrent loads and check if more items available
if (!hasMoreHistory.value || isLoadingMore.value) return
isLoadingMore.value = true
historyError.value = null
try {
await fetchHistoryAssets(true)
historyAssets.value = allHistoryItems.value
} catch (err) {
console.error('Error loading more history:', err)
historyError.value = err
// Keep existing data when error occurs (consistent with updateHistory)
if (!historyAssets.value.length) {
historyAssets.value = []
}
} finally {
isLoadingMore.value = false
}
}
/**
* Map of asset hash filename to asset item for O(1) lookup
* Cloud assets use asset_hash for the hash-based filename
@@ -607,7 +726,7 @@ export const useAssetsStore = defineStore('assets', () => {
inputAssets,
historyAssets,
inputLoading,
historyLoading: isLoadingMore,
historyLoading,
inputError,
historyError,
hasMoreHistory,
@@ -620,8 +739,8 @@ export const useAssetsStore = defineStore('assets', () => {
// Actions
updateInputs,
updateHistory: historyStore.updateHistory,
loadMoreHistory: historyStore.loadMoreHistory,
updateHistory,
loadMoreHistory,
// Input mapping helpers
inputAssetsByFilename,

View File

@@ -1,13 +1,5 @@
import { defineStore } from 'pinia'
import {
computed,
ref,
shallowRef,
toRaw,
toValue,
watch,
watchEffect
} from 'vue'
import { computed, ref, shallowRef, toRaw, toValue } from 'vue'
import { extractWorkflow } from '@/platform/remote/comfyui/jobs/fetchJobs'
import type {
@@ -482,76 +474,6 @@ export class TaskItemImpl {
)
}
}
export const useHistoryStore = defineStore('history', () => {
const BATCH_SIZE = 200
const MAX_HISTORY_ITEMS = 1000 // Maximum items to keep in memory
let offset = 0
const hasMoreHistory = ref(true)
const isLoadingMore = ref(false)
const historyItems = ref<JobListItem[]>([])
const historyError = ref<unknown>(null)
const loadedIds = new Set<string>()
const fetchHistory = async (): Promise<JobListItem[]> => {
const history = await api.getHistory(BATCH_SIZE, { offset })
const newHistory = history.filter((item) => !loadedIds.has(item.id))
historyItems.value.push(...newHistory)
historyItems.value.sort((a, b) => a.create_time - b.create_time)
newHistory.forEach((item) => loadedIds.add(item.id))
offset += BATCH_SIZE
hasMoreHistory.value = history.length === BATCH_SIZE
if (historyItems.value.length > MAX_HISTORY_ITEMS) {
const removed = historyItems.value.slice(MAX_HISTORY_ITEMS)
historyItems.value = historyItems.value.slice(0, MAX_HISTORY_ITEMS)
removed.forEach((item) => loadedIds.delete(item.id))
}
return historyItems.value
}
const updateHistory = async () => {
offset = 0
hasMoreHistory.value = true
historyItems.value = []
loadedIds.clear()
await loadMoreHistory()
}
const loadMoreHistory = async () => {
if (!hasMoreHistory.value || isLoadingMore.value) return
if (isLoadingMore.value) {
await new Promise((r) => watch(isLoadingMore, r, { once: true }))
return
}
isLoadingMore.value = true
historyError.value = null
try {
await fetchHistory()
} catch (err) {
console.error('Error loading more history:', err)
historyError.value = err
} finally {
isLoadingMore.value = false
}
}
void loadMoreHistory()
return {
hasMoreHistory,
historyError,
historyItems,
isLoadingMore,
loadMoreHistory,
updateHistory
}
})
export const useQueueStore = defineStore('queue', () => {
// Use shallowRef because TaskItemImpl instances are immutable and arrays are
@@ -563,15 +485,6 @@ export const useQueueStore = defineStore('queue', () => {
const maxHistoryItems = ref(64)
const isLoading = ref(false)
const historyStore = useHistoryStore()
//TODO: Fix tests so this can be a computed
watchEffect(
() =>
(historyTasks.value = historyStore.historyItems
.slice(0, toValue(maxHistoryItems))
.map((job) => new TaskItemImpl(job)))
)
// Single-flight coalescing: at most one fetch in flight at a time.
// If update() is called while a fetch is running, the call is coalesced
// and a single re-fetch fires after the current one completes.
@@ -612,15 +525,17 @@ export const useQueueStore = defineStore('queue', () => {
dirty = false
isLoading.value = true
try {
const [queue] = await Promise.all([
const [queue, history] = await Promise.all([
api.getQueue(),
historyStore.updateHistory()
api.getHistory(maxHistoryItems.value)
])
// API returns pre-sorted data (sort_by=create_time&order=desc)
runningTasks.value = queue.Running.map((job) => new TaskItemImpl(job))
pendingTasks.value = queue.Pending.map((job) => new TaskItemImpl(job))
const currentHistory = toValue(historyTasks)
const appearedTasks = [...pendingTasks.value, ...runningTasks.value]
const executionStore = useExecutionStore()
appearedTasks.forEach((task) => {
@@ -642,6 +557,36 @@ export const useQueueStore = defineStore('queue', () => {
])
executionStore.reconcileInitializingJobs(activeJobIds)
}
// Sort by create_time descending and limit to maxItems
const sortedHistory = [...history]
.sort((a, b) => b.create_time - a.create_time)
.slice(0, toValue(maxHistoryItems))
// Reuse existing TaskItemImpl instances or create new
// Must recreate if outputs_count changed (e.g., API started returning it)
const existingByJobId = new Map(
currentHistory.map((impl) => [impl.jobId, impl])
)
const nextHistoryTasks = sortedHistory.map((job) => {
const existing = existingByJobId.get(job.id)
if (!existing) return new TaskItemImpl(job)
// Recreate if outputs_count changed to ensure lazy loading works
if (existing.outputsCount !== (job.outputs_count ?? undefined)) {
return new TaskItemImpl(job)
}
return existing
})
const isHistoryUnchanged =
nextHistoryTasks.length === currentHistory.length &&
nextHistoryTasks.every((task, index) => task === currentHistory[index])
if (!isHistoryUnchanged) {
historyTasks.value = nextHistoryTasks
}
hasFetchedHistorySnapshot.value = true
} finally {
isLoading.value = false
inFlight = false

View File

@@ -0,0 +1,52 @@
import { describe, expect, it } from 'vitest'
import type { ISerialisedGraph } from '@/lib/litegraph/src/litegraph'
import type { SystemStats } from '@/schemas/apiSchema'
import type { ErrorReportData } from './errorReportUtil'
import { generateErrorReport } from './errorReportUtil'
const baseSystemStats: SystemStats = {
system: {
os: 'linux',
comfyui_version: '1.0.0',
python_version: '3.11',
pytorch_version: '2.0',
embedded_python: false,
argv: ['main.py'],
ram_total: 0,
ram_free: 0
},
devices: []
}
const baseWorkflow = { nodes: [], links: [] } as unknown as ISerialisedGraph
function buildError(serverLogs: unknown): ErrorReportData {
return {
exceptionType: 'RuntimeError',
exceptionMessage: 'boom',
systemStats: baseSystemStats,
serverLogs: serverLogs as string,
workflow: baseWorkflow
}
}
describe('generateErrorReport', () => {
it('embeds string serverLogs verbatim', () => {
const report = generateErrorReport(buildError('line one\nline two'))
expect(report).toContain('line one\nline two')
expect(report).not.toContain('[object Object]')
})
it('stringifies object serverLogs instead of rendering [object Object]', () => {
const report = generateErrorReport(
buildError({ entries: [{ msg: 'hello' }] })
)
expect(report).not.toContain('[object Object]')
expect(report).toContain('"entries"')
expect(report).toContain('"msg": "hello"')
})
})

View File

@@ -76,6 +76,7 @@ import { app } from '@/scripts/app'
import { setupAutoQueueHandler } from '@/services/autoQueueService'
import { useKeybindingService } from '@/platform/keybindings/keybindingService'
import { useAppMode } from '@/composables/useAppMode'
import { useAssetsStore } from '@/stores/assetsStore'
import { useCommandStore } from '@/stores/commandStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useAuthStore } from '@/stores/authStore'
@@ -105,6 +106,7 @@ const settingStore = useSettingStore()
const executionStore = useExecutionStore()
const colorPaletteStore = useColorPaletteStore()
const queueStore = useQueueStore()
const assetsStore = useAssetsStore()
const versionCompatibilityStore = useVersionCompatibilityStore()
const graphCanvasContainerRef = ref<HTMLDivElement | null>(null)
const { isBuilderMode } = useAppMode()
@@ -224,14 +226,25 @@ void useBottomPanelStore().registerCoreBottomPanelTabs()
useQueuePolling()
const queuePendingTaskCountStore = useQueuePendingTaskCountStore()
const sidebarTabStore = useSidebarTabStore()
const onStatus = async (e: CustomEvent<StatusWsMessageStatus>) => {
queuePendingTaskCountStore.update(e)
await queueStore.update()
// Only update assets if the assets sidebar is currently open
// When sidebar is closed, AssetsSidebarTab.vue will refresh on mount
if (sidebarTabStore.activeSidebarTabId === 'assets' || linearMode.value) {
await assetsStore.updateHistory()
}
}
const onExecutionSuccess = async () => {
await queueStore.update()
// Only update assets if the assets sidebar is currently open
// When sidebar is closed, AssetsSidebarTab.vue will refresh on mount
if (sidebarTabStore.activeSidebarTabId === 'assets' || linearMode.value) {
await assetsStore.updateHistory()
}
}
const { onReconnecting, onReconnected } = useReconnectingNotification()