mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-02-19 22:34:15 +00:00
chore: migrate tests from tests-ui/ to colocate with source files (#7811)
## Summary Migrates all unit tests from `tests-ui/` to colocate with their source files in `src/`, improving discoverability and maintainability. ## Changes - **What**: Relocated all unit tests to be adjacent to the code they test, following the `<source>.test.ts` naming convention - **Config**: Updated `vitest.config.ts` to remove `tests-ui` include pattern and `@tests-ui` alias - **Docs**: Moved testing documentation to `docs/testing/` with updated paths and patterns ## Review Focus - Migration patterns documented in `temp/plans/migrate-tests-ui-to-src.md` - Tests use `@/` path aliases instead of relative imports - Shared fixtures placed in `__fixtures__/` directories ┆Issue is synchronized with this [Notion page](https://www.notion.so/PR-7811-chore-migrate-tests-from-tests-ui-to-colocate-with-source-files-2da6d73d36508147a4cce85365dee614) by [Unito](https://www.unito.io) --------- Co-authored-by: Amp <amp@ampcode.com> Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
210
src/scripts/api.featureFlags.test.ts
Normal file
210
src/scripts/api.featureFlags.test.ts
Normal file
@@ -0,0 +1,210 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { api } from '@/scripts/api'
|
||||
|
||||
describe('API Feature Flags', () => {
|
||||
let mockWebSocket: any
|
||||
const wsEventHandlers: { [key: string]: (event: any) => void } = {}
|
||||
|
||||
beforeEach(() => {
|
||||
// Use fake timers
|
||||
vi.useFakeTimers()
|
||||
|
||||
// Mock WebSocket
|
||||
mockWebSocket = {
|
||||
readyState: 1, // WebSocket.OPEN
|
||||
send: vi.fn(),
|
||||
close: vi.fn(),
|
||||
addEventListener: vi.fn(
|
||||
(event: string, handler: (event: any) => void) => {
|
||||
wsEventHandlers[event] = handler
|
||||
}
|
||||
),
|
||||
removeEventListener: vi.fn()
|
||||
}
|
||||
|
||||
// Mock WebSocket constructor
|
||||
vi.stubGlobal('WebSocket', function (this: WebSocket) {
|
||||
Object.assign(this, mockWebSocket)
|
||||
})
|
||||
|
||||
// Reset API state
|
||||
api.serverFeatureFlags = {}
|
||||
|
||||
// Mock getClientFeatureFlags to return test feature flags
|
||||
vi.spyOn(api, 'getClientFeatureFlags').mockReturnValue({
|
||||
supports_preview_metadata: true,
|
||||
api_version: '1.0.0',
|
||||
capabilities: ['bulk_operations', 'async_nodes']
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.useRealTimers()
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
describe('Feature flags negotiation', () => {
|
||||
it('should send client feature flags as first message on connection', async () => {
|
||||
// Initialize API connection
|
||||
const initPromise = api.init()
|
||||
|
||||
// Simulate connection open
|
||||
wsEventHandlers['open'](new Event('open'))
|
||||
|
||||
// Check that feature flags were sent as first message
|
||||
expect(mockWebSocket.send).toHaveBeenCalledTimes(1)
|
||||
const sentMessage = JSON.parse(mockWebSocket.send.mock.calls[0][0])
|
||||
expect(sentMessage).toEqual({
|
||||
type: 'feature_flags',
|
||||
data: {
|
||||
supports_preview_metadata: true,
|
||||
api_version: '1.0.0',
|
||||
capabilities: ['bulk_operations', 'async_nodes']
|
||||
}
|
||||
})
|
||||
|
||||
// Simulate server response with status message
|
||||
wsEventHandlers['message']({
|
||||
data: JSON.stringify({
|
||||
type: 'status',
|
||||
data: {
|
||||
status: { exec_info: { queue_remaining: 0 } },
|
||||
sid: 'test-sid'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Simulate server feature flags response
|
||||
wsEventHandlers['message']({
|
||||
data: JSON.stringify({
|
||||
type: 'feature_flags',
|
||||
data: {
|
||||
supports_preview_metadata: true,
|
||||
async_execution: true,
|
||||
supported_formats: ['webp', 'jpeg', 'png'],
|
||||
api_version: '1.0.0',
|
||||
max_upload_size: 104857600,
|
||||
capabilities: ['isolated_nodes', 'dynamic_models']
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
await initPromise
|
||||
|
||||
// Check that server features were stored
|
||||
expect(api.serverFeatureFlags).toEqual({
|
||||
supports_preview_metadata: true,
|
||||
async_execution: true,
|
||||
supported_formats: ['webp', 'jpeg', 'png'],
|
||||
api_version: '1.0.0',
|
||||
max_upload_size: 104857600,
|
||||
capabilities: ['isolated_nodes', 'dynamic_models']
|
||||
})
|
||||
})
|
||||
|
||||
it('should handle server without feature flags support', async () => {
|
||||
// Initialize API connection
|
||||
const initPromise = api.init()
|
||||
|
||||
// Simulate connection open
|
||||
wsEventHandlers['open'](new Event('open'))
|
||||
|
||||
// Clear the send mock to reset
|
||||
mockWebSocket.send.mockClear()
|
||||
|
||||
// Simulate server response with status but no feature flags
|
||||
wsEventHandlers['message']({
|
||||
data: JSON.stringify({
|
||||
type: 'status',
|
||||
data: {
|
||||
status: { exec_info: { queue_remaining: 0 } },
|
||||
sid: 'test-sid'
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
// Simulate some other message (not feature flags)
|
||||
wsEventHandlers['message']({
|
||||
data: JSON.stringify({
|
||||
type: 'execution_start',
|
||||
data: {}
|
||||
})
|
||||
})
|
||||
|
||||
await initPromise
|
||||
|
||||
// Server features should remain empty
|
||||
expect(api.serverFeatureFlags).toEqual({})
|
||||
})
|
||||
})
|
||||
|
||||
describe('Feature checking methods', () => {
|
||||
beforeEach(() => {
|
||||
// Set up some test features
|
||||
api.serverFeatureFlags = {
|
||||
supports_preview_metadata: true,
|
||||
async_execution: false,
|
||||
capabilities: ['isolated_nodes', 'dynamic_models']
|
||||
}
|
||||
})
|
||||
|
||||
it('should check if server supports a boolean feature', () => {
|
||||
expect(api.serverSupportsFeature('supports_preview_metadata')).toBe(true)
|
||||
expect(api.serverSupportsFeature('async_execution')).toBe(false)
|
||||
expect(api.serverSupportsFeature('non_existent_feature')).toBe(false)
|
||||
})
|
||||
|
||||
it('should get server feature value', () => {
|
||||
expect(api.getServerFeature('supports_preview_metadata')).toBe(true)
|
||||
expect(api.getServerFeature('capabilities')).toEqual([
|
||||
'isolated_nodes',
|
||||
'dynamic_models'
|
||||
])
|
||||
expect(api.getServerFeature('non_existent_feature')).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Client feature flags configuration', () => {
|
||||
it('should use mocked client feature flags', () => {
|
||||
// Verify mocked flags are returned
|
||||
const clientFlags = api.getClientFeatureFlags()
|
||||
expect(clientFlags).toEqual({
|
||||
supports_preview_metadata: true,
|
||||
api_version: '1.0.0',
|
||||
capabilities: ['bulk_operations', 'async_nodes']
|
||||
})
|
||||
})
|
||||
|
||||
it('should return a copy of client feature flags', () => {
|
||||
// Temporarily restore the real implementation for this test
|
||||
vi.mocked(api.getClientFeatureFlags).mockRestore()
|
||||
|
||||
// Verify that modifications to returned object don't affect original
|
||||
const clientFlags1 = api.getClientFeatureFlags()
|
||||
const clientFlags2 = api.getClientFeatureFlags()
|
||||
|
||||
// Should be different objects
|
||||
expect(clientFlags1).not.toBe(clientFlags2)
|
||||
|
||||
// But with same content
|
||||
expect(clientFlags1).toEqual(clientFlags2)
|
||||
|
||||
// Modifying one should not affect the other
|
||||
clientFlags1.test_flag = true
|
||||
expect(api.getClientFeatureFlags()).not.toHaveProperty('test_flag')
|
||||
})
|
||||
})
|
||||
|
||||
describe('Integration with preview messages', () => {
|
||||
it('should affect preview message handling based on feature support', () => {
|
||||
// Test with metadata support
|
||||
api.serverFeatureFlags = { supports_preview_metadata: true }
|
||||
expect(api.serverSupportsFeature('supports_preview_metadata')).toBe(true)
|
||||
|
||||
// Test without metadata support
|
||||
api.serverFeatureFlags = {}
|
||||
expect(api.serverSupportsFeature('supports_preview_metadata')).toBe(false)
|
||||
})
|
||||
})
|
||||
})
|
||||
171
src/scripts/api.fetchApi.test.ts
Normal file
171
src/scripts/api.fetchApi.test.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { api } from '@/scripts/api'
|
||||
|
||||
// Mock global fetch
|
||||
vi.stubGlobal('fetch', vi.fn())
|
||||
|
||||
describe('api.fetchApi', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks()
|
||||
|
||||
// Reset api state
|
||||
api.user = 'test-user'
|
||||
})
|
||||
|
||||
describe('header handling', () => {
|
||||
it('should add Comfy-User header with plain object headers', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test', {
|
||||
headers: {}
|
||||
})
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/test'),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'Comfy-User': 'test-user'
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should add Comfy-User header with Headers instance', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
const headers = new Headers()
|
||||
|
||||
await api.fetchApi('/test', { headers })
|
||||
|
||||
expect(mockFetch).toHaveBeenCalled()
|
||||
const callHeaders = mockFetch.mock.calls[0][1]?.headers
|
||||
expect(callHeaders).toEqual(headers)
|
||||
})
|
||||
|
||||
it('should add Comfy-User header with array headers', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
const headers: [string, string][] = []
|
||||
|
||||
await api.fetchApi('/test', { headers })
|
||||
|
||||
expect(mockFetch).toHaveBeenCalled()
|
||||
const callHeaders = mockFetch.mock.calls[0][1]?.headers
|
||||
expect(callHeaders).toContainEqual(['Comfy-User', 'test-user'])
|
||||
})
|
||||
|
||||
it('should preserve existing headers when adding Comfy-User', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test', {
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Custom': 'value'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/test'),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'X-Custom': 'value',
|
||||
'Comfy-User': 'test-user'
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should not allow developer-specified headers to be overridden by options', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test', {
|
||||
headers: {
|
||||
'Comfy-User': 'fennec-girl'
|
||||
}
|
||||
})
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/test'),
|
||||
expect.objectContaining({
|
||||
headers: {
|
||||
'Comfy-User': 'test-user'
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('default options', () => {
|
||||
it('should set cache to no-cache by default', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test')
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
cache: 'no-cache'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should include required headers even when no headers option is provided', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test')
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
headers: expect.objectContaining({
|
||||
'Comfy-User': 'test-user'
|
||||
})
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('should not override existing cache option', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test', { cache: 'force-cache' })
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.objectContaining({
|
||||
cache: 'force-cache'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('URL construction', () => {
|
||||
it('should use apiURL for route construction', async () => {
|
||||
const mockFetch = vi
|
||||
.mocked(global.fetch)
|
||||
.mockResolvedValue(new Response())
|
||||
|
||||
await api.fetchApi('/test/route')
|
||||
|
||||
expect(mockFetch).toHaveBeenCalledWith(
|
||||
expect.stringContaining('/api/test/route'),
|
||||
expect.any(Object)
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
30
src/scripts/api.folderPaths.test.ts
Normal file
30
src/scripts/api.folderPaths.test.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import axios from 'axios'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { api } from '@/scripts/api'
|
||||
|
||||
vi.mock('axios')
|
||||
|
||||
describe('getFolderPaths', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetAllMocks()
|
||||
})
|
||||
|
||||
it('returns legacy API response when available', async () => {
|
||||
const mockResponse = { checkpoints: ['/test/checkpoints'] }
|
||||
vi.mocked(axios.get).mockResolvedValueOnce({ data: mockResponse })
|
||||
|
||||
const result = await api.getFolderPaths()
|
||||
|
||||
expect(result).toEqual(mockResponse)
|
||||
})
|
||||
|
||||
it('returns empty object when legacy API unavailable (dynamic discovery)', async () => {
|
||||
vi.mocked(axios.get).mockRejectedValueOnce(new Error())
|
||||
|
||||
const result = await api.getFolderPaths()
|
||||
|
||||
// With dynamic discovery, we don't pre-generate directories when API is unavailable
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
82
src/scripts/domWidget.test.ts
Normal file
82
src/scripts/domWidget.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { describe, expect, test, vi } from 'vitest'
|
||||
|
||||
import { LGraphNode } from '@/lib/litegraph/src/litegraph'
|
||||
import { ComponentWidgetImpl, DOMWidgetImpl } from '@/scripts/domWidget'
|
||||
|
||||
// Mock dependencies
|
||||
vi.mock('@/stores/domWidgetStore', () => ({
|
||||
useDomWidgetStore: () => ({
|
||||
unregisterWidget: vi.fn()
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@/utils/formatUtil', () => ({
|
||||
generateUUID: () => 'test-uuid'
|
||||
}))
|
||||
|
||||
describe('DOMWidget Y Position Preservation', () => {
|
||||
test('BaseDOMWidgetImpl createCopyForNode preserves Y position', () => {
|
||||
const mockNode = new LGraphNode('test-node')
|
||||
const originalWidget = new ComponentWidgetImpl({
|
||||
node: mockNode,
|
||||
name: 'test-widget',
|
||||
component: { template: '<div></div>' },
|
||||
inputSpec: { name: 'test', type: 'string' },
|
||||
options: {}
|
||||
})
|
||||
|
||||
// Set a specific Y position
|
||||
originalWidget.y = 66
|
||||
|
||||
const newNode = new LGraphNode('new-node')
|
||||
const clonedWidget = originalWidget.createCopyForNode(newNode)
|
||||
|
||||
// Verify Y position is preserved
|
||||
expect(clonedWidget.y).toBe(66)
|
||||
expect(clonedWidget.node).toBe(newNode)
|
||||
expect(clonedWidget.name).toBe('test-widget')
|
||||
})
|
||||
|
||||
test('DOMWidgetImpl createCopyForNode preserves Y position', () => {
|
||||
const mockNode = new LGraphNode('test-node')
|
||||
const mockElement = document.createElement('div')
|
||||
|
||||
const originalWidget = new DOMWidgetImpl({
|
||||
node: mockNode,
|
||||
name: 'test-dom-widget',
|
||||
type: 'test',
|
||||
element: mockElement,
|
||||
options: {}
|
||||
})
|
||||
|
||||
// Set a specific Y position
|
||||
originalWidget.y = 42
|
||||
|
||||
const newNode = new LGraphNode('new-node')
|
||||
const clonedWidget = originalWidget.createCopyForNode(newNode)
|
||||
|
||||
// Verify Y position is preserved
|
||||
expect(clonedWidget.y).toBe(42)
|
||||
expect(clonedWidget.node).toBe(newNode)
|
||||
expect(clonedWidget.element).toBe(mockElement)
|
||||
expect(clonedWidget.name).toBe('test-dom-widget')
|
||||
})
|
||||
|
||||
test('Y position defaults to 0 when not set', () => {
|
||||
const mockNode = new LGraphNode('test-node')
|
||||
const originalWidget = new ComponentWidgetImpl({
|
||||
node: mockNode,
|
||||
name: 'test-widget',
|
||||
component: { template: '<div></div>' },
|
||||
inputSpec: { name: 'test', type: 'string' },
|
||||
options: {}
|
||||
})
|
||||
|
||||
// Don't explicitly set Y (should be 0 by default)
|
||||
const newNode = new LGraphNode('new-node')
|
||||
const clonedWidget = originalWidget.createCopyForNode(newNode)
|
||||
|
||||
// Verify Y position is preserved (should be 0)
|
||||
expect(clonedWidget.y).toBe(0)
|
||||
})
|
||||
})
|
||||
163
src/scripts/metadata/gltf.test.ts
Normal file
163
src/scripts/metadata/gltf.test.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { ASCII, GltfSizeBytes } from '@/types/metadataTypes'
|
||||
|
||||
import { getGltfBinaryMetadata } from './gltf'
|
||||
|
||||
describe('GLTF binary metadata parser', () => {
|
||||
const createGLTFFileStructure = () => {
|
||||
const header = new ArrayBuffer(GltfSizeBytes.HEADER)
|
||||
const headerView = new DataView(header)
|
||||
return { header, headerView }
|
||||
}
|
||||
|
||||
const jsonToBinary = (json: object) => {
|
||||
const jsonString = JSON.stringify(json)
|
||||
const jsonData = new TextEncoder().encode(jsonString)
|
||||
return jsonData
|
||||
}
|
||||
|
||||
const createJSONChunk = (jsonData: ArrayBuffer) => {
|
||||
const chunkHeader = new ArrayBuffer(GltfSizeBytes.CHUNK_HEADER)
|
||||
const chunkView = new DataView(chunkHeader)
|
||||
chunkView.setUint32(0, jsonData.byteLength, true)
|
||||
chunkView.setUint32(4, ASCII.JSON, true)
|
||||
return chunkHeader
|
||||
}
|
||||
|
||||
const setVersionHeader = (headerView: DataView, version: number) => {
|
||||
headerView.setUint32(4, version, true)
|
||||
}
|
||||
|
||||
const setTypeHeader = (headerView: DataView, type: number) => {
|
||||
headerView.setUint32(0, type, true)
|
||||
}
|
||||
|
||||
const setTotalLengthHeader = (headerView: DataView, length: number) => {
|
||||
headerView.setUint32(8, length, true)
|
||||
}
|
||||
|
||||
const setHeaders = (headerView: DataView, jsonData: ArrayBuffer) => {
|
||||
setTypeHeader(headerView, ASCII.GLTF)
|
||||
setVersionHeader(headerView, 2)
|
||||
setTotalLengthHeader(
|
||||
headerView,
|
||||
GltfSizeBytes.HEADER + GltfSizeBytes.CHUNK_HEADER + jsonData.byteLength
|
||||
)
|
||||
}
|
||||
|
||||
function createMockGltfFile(jsonContent: object): File {
|
||||
const jsonData = jsonToBinary(jsonContent)
|
||||
const { header, headerView } = createGLTFFileStructure()
|
||||
|
||||
setHeaders(headerView, jsonData.buffer)
|
||||
|
||||
const chunkHeader = createJSONChunk(jsonData.buffer)
|
||||
|
||||
const fileContent = new Uint8Array(
|
||||
header.byteLength + chunkHeader.byteLength + jsonData.byteLength
|
||||
)
|
||||
fileContent.set(new Uint8Array(header), 0)
|
||||
fileContent.set(new Uint8Array(chunkHeader), header.byteLength)
|
||||
fileContent.set(jsonData, header.byteLength + chunkHeader.byteLength)
|
||||
|
||||
return new File([fileContent], 'test.glb', { type: 'model/gltf-binary' })
|
||||
}
|
||||
|
||||
it('should extract workflow metadata from GLTF binary file', async () => {
|
||||
const testWorkflow = {
|
||||
nodes: [
|
||||
{
|
||||
id: 1,
|
||||
type: 'TestNode',
|
||||
pos: [100, 100]
|
||||
}
|
||||
],
|
||||
links: []
|
||||
}
|
||||
|
||||
const mockFile = createMockGltfFile({
|
||||
asset: {
|
||||
version: '2.0',
|
||||
generator: 'ComfyUI GLTF Test',
|
||||
extras: {
|
||||
workflow: testWorkflow
|
||||
}
|
||||
},
|
||||
scenes: []
|
||||
})
|
||||
|
||||
const metadata = await getGltfBinaryMetadata(mockFile)
|
||||
|
||||
expect(metadata).toBeDefined()
|
||||
expect(metadata.workflow).toBeDefined()
|
||||
|
||||
const workflow = metadata.workflow as {
|
||||
nodes: Array<{ id: number; type: string }>
|
||||
}
|
||||
expect(workflow.nodes[0].id).toBe(1)
|
||||
expect(workflow.nodes[0].type).toBe('TestNode')
|
||||
})
|
||||
|
||||
it('should extract prompt metadata from GLTF binary file', async () => {
|
||||
const testPrompt = {
|
||||
node1: {
|
||||
class_type: 'TestNode',
|
||||
inputs: {
|
||||
seed: 123456
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mockFile = createMockGltfFile({
|
||||
asset: {
|
||||
version: '2.0',
|
||||
generator: 'ComfyUI GLTF Test',
|
||||
extras: {
|
||||
prompt: testPrompt
|
||||
}
|
||||
},
|
||||
scenes: []
|
||||
})
|
||||
|
||||
const metadata = await getGltfBinaryMetadata(mockFile)
|
||||
expect(metadata).toBeDefined()
|
||||
expect(metadata.prompt).toBeDefined()
|
||||
|
||||
const prompt = metadata.prompt as Record<string, any>
|
||||
expect(prompt.node1.class_type).toBe('TestNode')
|
||||
expect(prompt.node1.inputs.seed).toBe(123456)
|
||||
})
|
||||
|
||||
it('should handle string JSON content', async () => {
|
||||
const workflowStr = JSON.stringify({
|
||||
nodes: [{ id: 1, type: 'StringifiedNode' }],
|
||||
links: []
|
||||
})
|
||||
|
||||
const mockFile = createMockGltfFile({
|
||||
asset: {
|
||||
version: '2.0',
|
||||
extras: {
|
||||
workflow: workflowStr // As string instead of object
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const metadata = await getGltfBinaryMetadata(mockFile)
|
||||
|
||||
expect(metadata).toBeDefined()
|
||||
expect(metadata.workflow).toBeDefined()
|
||||
|
||||
const workflow = metadata.workflow as {
|
||||
nodes: Array<{ id: number; type: string }>
|
||||
}
|
||||
expect(workflow.nodes[0].type).toBe('StringifiedNode')
|
||||
})
|
||||
|
||||
it('should handle invalid GLTF binary files gracefully', async () => {
|
||||
const invalidEmptyFile = new File([], 'invalid.glb')
|
||||
const metadata = await getGltfBinaryMetadata(invalidEmptyFile)
|
||||
expect(metadata).toEqual({})
|
||||
})
|
||||
})
|
||||
448
src/scripts/metadata/ply.test.ts
Normal file
448
src/scripts/metadata/ply.test.ts
Normal file
@@ -0,0 +1,448 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { isPLYAsciiFormat, parseASCIIPLY } from '@/scripts/metadata/ply'
|
||||
|
||||
function createPLYBuffer(content: string): ArrayBuffer {
|
||||
return new TextEncoder().encode(content).buffer
|
||||
}
|
||||
|
||||
describe('PLY metadata parser', () => {
|
||||
describe('isPLYAsciiFormat', () => {
|
||||
it('should return true for ASCII format PLY', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 3
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
0 0 0
|
||||
1 0 0
|
||||
0 1 0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
expect(isPLYAsciiFormat(buffer)).toBe(true)
|
||||
})
|
||||
|
||||
it('should return false for binary format PLY', () => {
|
||||
const ply = `ply
|
||||
format binary_little_endian 1.0
|
||||
element vertex 3
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
expect(isPLYAsciiFormat(buffer)).toBe(false)
|
||||
})
|
||||
|
||||
it('should return false for binary big endian format', () => {
|
||||
const ply = `ply
|
||||
format binary_big_endian 1.0
|
||||
element vertex 3
|
||||
end_header`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
expect(isPLYAsciiFormat(buffer)).toBe(false)
|
||||
})
|
||||
|
||||
it('should handle empty buffer', () => {
|
||||
const buffer = new ArrayBuffer(0)
|
||||
expect(isPLYAsciiFormat(buffer)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('parseASCIIPLY', () => {
|
||||
it('should parse simple PLY with positions only', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 3
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
0.0 0.0 0.0
|
||||
1.0 0.0 0.0
|
||||
0.0 1.0 0.0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(3)
|
||||
expect(result!.colors).toBeNull()
|
||||
expect(result!.positions).toBeInstanceOf(Float32Array)
|
||||
expect(result!.positions.length).toBe(9)
|
||||
|
||||
expect(result!.positions[0]).toBeCloseTo(0.0)
|
||||
expect(result!.positions[1]).toBeCloseTo(0.0)
|
||||
expect(result!.positions[2]).toBeCloseTo(0.0)
|
||||
|
||||
expect(result!.positions[3]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[4]).toBeCloseTo(0.0)
|
||||
expect(result!.positions[5]).toBeCloseTo(0.0)
|
||||
|
||||
expect(result!.positions[6]).toBeCloseTo(0.0)
|
||||
expect(result!.positions[7]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[8]).toBeCloseTo(0.0)
|
||||
})
|
||||
|
||||
it('should parse PLY with positions and colors', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 2
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
property uchar red
|
||||
property uchar green
|
||||
property uchar blue
|
||||
end_header
|
||||
1.0 2.0 3.0 255 128 0
|
||||
-1.0 -2.0 -3.0 0 255 128`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(2)
|
||||
expect(result!.colors).not.toBeNull()
|
||||
expect(result!.colors).toBeInstanceOf(Float32Array)
|
||||
expect(result!.colors!.length).toBe(6)
|
||||
|
||||
// First vertex position
|
||||
expect(result!.positions[0]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[1]).toBeCloseTo(2.0)
|
||||
expect(result!.positions[2]).toBeCloseTo(3.0)
|
||||
|
||||
// First vertex color (normalized to 0-1)
|
||||
expect(result!.colors![0]).toBeCloseTo(1.0) // 255/255
|
||||
expect(result!.colors![1]).toBeCloseTo(128 / 255)
|
||||
expect(result!.colors![2]).toBeCloseTo(0.0)
|
||||
|
||||
// Second vertex color
|
||||
expect(result!.colors![3]).toBeCloseTo(0.0)
|
||||
expect(result!.colors![4]).toBeCloseTo(1.0)
|
||||
expect(result!.colors![5]).toBeCloseTo(128 / 255)
|
||||
})
|
||||
|
||||
it('should handle properties in non-standard order', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property uchar red
|
||||
property float z
|
||||
property uchar green
|
||||
property float x
|
||||
property uchar blue
|
||||
property float y
|
||||
end_header
|
||||
255 3.0 128 1.0 64 2.0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(1)
|
||||
|
||||
expect(result!.positions[0]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[1]).toBeCloseTo(2.0)
|
||||
expect(result!.positions[2]).toBeCloseTo(3.0)
|
||||
|
||||
expect(result!.colors![0]).toBeCloseTo(1.0)
|
||||
expect(result!.colors![1]).toBeCloseTo(128 / 255)
|
||||
expect(result!.colors![2]).toBeCloseTo(64 / 255)
|
||||
})
|
||||
|
||||
it('should handle extra properties', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
property float nx
|
||||
property float ny
|
||||
property float nz
|
||||
property uchar red
|
||||
property uchar green
|
||||
property uchar blue
|
||||
property uchar alpha
|
||||
end_header
|
||||
1.0 2.0 3.0 0.0 1.0 0.0 255 128 64 255`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.positions[0]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[1]).toBeCloseTo(2.0)
|
||||
expect(result!.positions[2]).toBeCloseTo(3.0)
|
||||
|
||||
expect(result!.colors![0]).toBeCloseTo(1.0)
|
||||
expect(result!.colors![1]).toBeCloseTo(128 / 255)
|
||||
expect(result!.colors![2]).toBeCloseTo(64 / 255)
|
||||
})
|
||||
|
||||
it('should handle negative coordinates', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
-1.5 -2.5 -3.5`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.positions[0]).toBeCloseTo(-1.5)
|
||||
expect(result!.positions[1]).toBeCloseTo(-2.5)
|
||||
expect(result!.positions[2]).toBeCloseTo(-3.5)
|
||||
})
|
||||
|
||||
it('should handle scientific notation', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
1.5e-3 2.5e+2 -3.5e1`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.positions[0]).toBeCloseTo(0.0015)
|
||||
expect(result!.positions[1]).toBeCloseTo(250)
|
||||
expect(result!.positions[2]).toBeCloseTo(-35)
|
||||
})
|
||||
|
||||
it('should skip empty lines in vertex data', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 2
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
|
||||
1.0 0.0 0.0
|
||||
|
||||
0.0 1.0 0.0
|
||||
`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(2)
|
||||
expect(result!.positions[0]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[3]).toBeCloseTo(0.0)
|
||||
expect(result!.positions[4]).toBeCloseTo(1.0)
|
||||
})
|
||||
|
||||
it('should handle whitespace variations', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
1.0 2.0 3.0 `
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.positions[0]).toBeCloseTo(1.0)
|
||||
expect(result!.positions[1]).toBeCloseTo(2.0)
|
||||
expect(result!.positions[2]).toBeCloseTo(3.0)
|
||||
})
|
||||
|
||||
it('should return null for invalid header - missing vertex count', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
1.0 2.0 3.0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null for invalid header - missing x property', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
2.0 3.0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null for invalid header - missing y property', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float z
|
||||
end_header
|
||||
1.0 3.0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null for invalid header - missing z property', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float y
|
||||
end_header
|
||||
1.0 2.0`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should return null for empty buffer', () => {
|
||||
const buffer = new ArrayBuffer(0)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
it('should handle large vertex count', () => {
|
||||
const vertexCount = 1000
|
||||
let plyContent = `ply
|
||||
format ascii 1.0
|
||||
element vertex ${vertexCount}
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
`
|
||||
for (let i = 0; i < vertexCount; i++) {
|
||||
plyContent += `${i} ${i * 2} ${i * 3}\n`
|
||||
}
|
||||
|
||||
const buffer = createPLYBuffer(plyContent)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(vertexCount)
|
||||
expect(result!.positions.length).toBe(vertexCount * 3)
|
||||
|
||||
expect(result!.positions[0]).toBeCloseTo(0)
|
||||
expect(result!.positions[1]).toBeCloseTo(0)
|
||||
expect(result!.positions[2]).toBeCloseTo(0)
|
||||
|
||||
const lastIdx = (vertexCount - 1) * 3
|
||||
expect(result!.positions[lastIdx]).toBeCloseTo(vertexCount - 1)
|
||||
expect(result!.positions[lastIdx + 1]).toBeCloseTo((vertexCount - 1) * 2)
|
||||
expect(result!.positions[lastIdx + 2]).toBeCloseTo((vertexCount - 1) * 3)
|
||||
})
|
||||
|
||||
it('should handle partial color properties', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
property uchar red
|
||||
end_header
|
||||
1.0 2.0 3.0 255`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
// hasColor is true but green/blue indices are -1, so colors won't be parsed
|
||||
expect(result!.positions[0]).toBeCloseTo(1.0)
|
||||
})
|
||||
|
||||
it('should handle double property type', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 1
|
||||
property double x
|
||||
property double y
|
||||
property double z
|
||||
end_header
|
||||
1.123456789 2.987654321 3.111111111`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.positions[0]).toBeCloseTo(1.123456789)
|
||||
expect(result!.positions[1]).toBeCloseTo(2.987654321)
|
||||
expect(result!.positions[2]).toBeCloseTo(3.111111111)
|
||||
})
|
||||
|
||||
it('should stop parsing at vertex count limit', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 2
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
end_header
|
||||
1.0 0.0 0.0
|
||||
0.0 1.0 0.0
|
||||
0.0 0.0 1.0
|
||||
999 999 999`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(2)
|
||||
expect(result!.positions.length).toBe(6)
|
||||
})
|
||||
|
||||
it('should handle face elements after vertices', () => {
|
||||
const ply = `ply
|
||||
format ascii 1.0
|
||||
element vertex 3
|
||||
property float x
|
||||
property float y
|
||||
property float z
|
||||
element face 1
|
||||
property list uchar int vertex_indices
|
||||
end_header
|
||||
0.0 0.0 0.0
|
||||
1.0 0.0 0.0
|
||||
0.0 1.0 0.0
|
||||
3 0 1 2`
|
||||
|
||||
const buffer = createPLYBuffer(ply)
|
||||
const result = parseASCIIPLY(buffer)
|
||||
|
||||
expect(result).not.toBeNull()
|
||||
expect(result!.vertexCount).toBe(3)
|
||||
})
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user