mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-05-04 13:12:10 +00:00
Compare commits
7 Commits
version-bu
...
pysssss/sa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c7715bac8 | ||
|
|
5624bab342 | ||
|
|
cac66cdc39 | ||
|
|
25a5cb4868 | ||
|
|
181b1ac245 | ||
|
|
55e21ac7e8 | ||
|
|
8011f0cd1e |
177
scripts/generate-embedded-metadata-test-files.py
Normal file
177
scripts/generate-embedded-metadata-test-files.py
Normal file
@@ -0,0 +1,177 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate test fixture files for metadata parser tests.
|
||||
|
||||
Each fixture embeds the same workflow and prompt JSON, matching the
|
||||
format the ComfyUI backend uses to write metadata.
|
||||
|
||||
Prerequisites:
|
||||
source ~/ComfyUI/.venv/bin/activate
|
||||
python3 scripts/generate-embedded-metadata-test-files.py
|
||||
|
||||
Output: src/scripts/metadata/__fixtures__/
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
|
||||
import av
|
||||
from PIL import Image
|
||||
|
||||
REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
FIXTURES_DIR = os.path.join(REPO_ROOT, 'src', 'scripts', 'metadata', '__fixtures__')
|
||||
|
||||
WORKFLOW = {
|
||||
'nodes': [
|
||||
{
|
||||
'id': 1,
|
||||
'type': 'KSampler',
|
||||
'pos': [100, 100],
|
||||
'size': [200, 200],
|
||||
}
|
||||
]
|
||||
}
|
||||
PROMPT = {'1': {'class_type': 'KSampler', 'inputs': {}}}
|
||||
|
||||
WORKFLOW_JSON = json.dumps(WORKFLOW, separators=(',', ':'))
|
||||
PROMPT_JSON = json.dumps(PROMPT, separators=(',', ':'))
|
||||
|
||||
|
||||
def out(name: str) -> str:
|
||||
return os.path.join(FIXTURES_DIR, name)
|
||||
|
||||
|
||||
def report(name: str):
|
||||
size = os.path.getsize(out(name))
|
||||
print(f' {name} ({size} bytes)')
|
||||
|
||||
|
||||
def make_1x1_image() -> Image.Image:
|
||||
return Image.new('RGB', (1, 1), (255, 0, 0))
|
||||
|
||||
|
||||
def build_exif_bytes() -> bytes:
|
||||
"""Build EXIF bytes matching the backend's tag assignments.
|
||||
|
||||
Backend: 0x010F (Make) = "workflow:<json>", 0x0110 (Model) = "prompt:<json>"
|
||||
"""
|
||||
img = make_1x1_image()
|
||||
exif = img.getexif()
|
||||
exif[0x010F] = f'workflow:{WORKFLOW_JSON}'
|
||||
exif[0x0110] = f'prompt:{PROMPT_JSON}'
|
||||
return exif.tobytes()
|
||||
|
||||
|
||||
def inject_exif_prefix_in_webp(path: str):
|
||||
"""Prepend Exif\\0\\0 to the EXIF chunk in a WEBP file.
|
||||
|
||||
PIL always strips this prefix, so we re-inject it to test that code path.
|
||||
"""
|
||||
data = bytearray(open(path, 'rb').read())
|
||||
off = 12
|
||||
while off < len(data):
|
||||
chunk_type = data[off:off + 4]
|
||||
chunk_len = struct.unpack_from('<I', data, off + 4)[0]
|
||||
if chunk_type == b'EXIF':
|
||||
prefix = b'Exif\x00\x00'
|
||||
data[off + 8:off + 8] = prefix
|
||||
struct.pack_into('<I', data, off + 4, chunk_len + len(prefix))
|
||||
riff_size = struct.unpack_from('<I', data, 4)[0]
|
||||
struct.pack_into('<I', data, 4, riff_size + len(prefix))
|
||||
break
|
||||
off += 8 + chunk_len + (chunk_len % 2)
|
||||
with open(path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def generate_av_fixture(
|
||||
name: str,
|
||||
fmt: str,
|
||||
codec: str,
|
||||
rate: int = 44100,
|
||||
options: dict | None = None,
|
||||
):
|
||||
"""Generate an audio fixture via PyAV container.metadata[], matching the backend."""
|
||||
path = out(name)
|
||||
container = av.open(path, mode='w', format=fmt, options=options or {})
|
||||
stream = container.add_stream(codec, rate=rate)
|
||||
stream.layout = 'mono'
|
||||
|
||||
container.metadata['prompt'] = PROMPT_JSON
|
||||
container.metadata['workflow'] = WORKFLOW_JSON
|
||||
|
||||
sample_fmt = stream.codec_context.codec.audio_formats[0].name
|
||||
samples = stream.codec_context.frame_size or 1024
|
||||
frame = av.AudioFrame(format=sample_fmt, layout='mono', samples=samples)
|
||||
frame.rate = rate
|
||||
frame.pts = 0
|
||||
for packet in stream.encode(frame):
|
||||
container.mux(packet)
|
||||
for packet in stream.encode():
|
||||
container.mux(packet)
|
||||
container.close()
|
||||
report(name)
|
||||
|
||||
|
||||
def generate_webp():
|
||||
img = make_1x1_image()
|
||||
exif = build_exif_bytes()
|
||||
|
||||
img.save(out('with_metadata.webp'), 'WEBP', exif=exif)
|
||||
report('with_metadata.webp')
|
||||
|
||||
img.save(out('with_metadata_exif_prefix.webp'), 'WEBP', exif=exif)
|
||||
inject_exif_prefix_in_webp(out('with_metadata_exif_prefix.webp'))
|
||||
report('with_metadata_exif_prefix.webp')
|
||||
|
||||
|
||||
def generate_avif():
|
||||
img = make_1x1_image()
|
||||
exif = build_exif_bytes()
|
||||
img.save(out('with_metadata.avif'), 'AVIF', exif=exif)
|
||||
report('with_metadata.avif')
|
||||
|
||||
|
||||
def generate_flac():
|
||||
generate_av_fixture('with_metadata.flac', 'flac', 'flac')
|
||||
|
||||
|
||||
def generate_opus():
|
||||
generate_av_fixture('with_metadata.opus', 'opus', 'libopus', rate=48000)
|
||||
|
||||
|
||||
def generate_mp3():
|
||||
generate_av_fixture('with_metadata.mp3', 'mp3', 'libmp3lame')
|
||||
|
||||
|
||||
def generate_mp4():
|
||||
"""Generate MP4 via ffmpeg CLI with QuickTime keys/ilst metadata."""
|
||||
path = out('with_metadata.mp4')
|
||||
subprocess.run([
|
||||
'ffmpeg', '-y', '-loglevel', 'error',
|
||||
'-f', 'lavfi', '-i', 'anullsrc=r=44100:cl=mono',
|
||||
'-t', '0.01', '-c:a', 'aac', '-b:a', '32k',
|
||||
'-movflags', 'use_metadata_tags',
|
||||
'-metadata', f'prompt={PROMPT_JSON}',
|
||||
'-metadata', f'workflow={WORKFLOW_JSON}',
|
||||
path,
|
||||
], check=True)
|
||||
report('with_metadata.mp4')
|
||||
|
||||
|
||||
def generate_webm():
|
||||
generate_av_fixture('with_metadata.webm', 'webm', 'libvorbis')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Generating fixtures...')
|
||||
generate_webp()
|
||||
generate_avif()
|
||||
generate_flac()
|
||||
generate_opus()
|
||||
generate_mp3()
|
||||
generate_mp4()
|
||||
generate_webm()
|
||||
print('Done.')
|
||||
48
src/scripts/metadata/__fixtures__/helpers.ts
Normal file
48
src/scripts/metadata/__fixtures__/helpers.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
export const EXPECTED_WORKFLOW = {
|
||||
nodes: [{ id: 1, type: 'KSampler', pos: [100, 100], size: [200, 200] }]
|
||||
}
|
||||
|
||||
export const EXPECTED_PROMPT = {
|
||||
'1': { class_type: 'KSampler', inputs: {} }
|
||||
}
|
||||
|
||||
type ReadMethod = 'readAsText' | 'readAsArrayBuffer'
|
||||
|
||||
export function mockFileReaderError(method: ReadMethod): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
queueMicrotask(() =>
|
||||
this.onerror?.(new ProgressEvent('error') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function mockFileReaderAbort(method: ReadMethod): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
queueMicrotask(() =>
|
||||
this.onabort?.(new ProgressEvent('abort') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function mockFileReaderResult(
|
||||
method: ReadMethod,
|
||||
result: string | ArrayBuffer | null
|
||||
): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
Object.defineProperty(this, 'result', {
|
||||
value: result,
|
||||
configurable: true
|
||||
})
|
||||
queueMicrotask(() =>
|
||||
this.onload?.(new ProgressEvent('load') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
BIN
src/scripts/metadata/__fixtures__/with_metadata.avif
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.avif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 552 B |
BIN
src/scripts/metadata/__fixtures__/with_metadata.flac
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.flac
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp3
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp3
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp4
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.mp4
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.opus
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.opus
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.webm
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.webm
Normal file
Binary file not shown.
BIN
src/scripts/metadata/__fixtures__/with_metadata.webp
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 266 B |
BIN
src/scripts/metadata/__fixtures__/with_metadata_exif_prefix.webp
Normal file
BIN
src/scripts/metadata/__fixtures__/with_metadata_exif_prefix.webp
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 272 B |
72
src/scripts/metadata/avif.test.ts
Normal file
72
src/scripts/metadata/avif.test.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromAvifFile } from './avif'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.avif')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('AVIF metadata', () => {
|
||||
it('extracts workflow and prompt from EXIF data in ISOBMFF boxes', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.avif', { type: 'image/avif' })
|
||||
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(JSON.parse(result.workflow)).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(JSON.parse(result.prompt)).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-AVIF data', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'fake.avif')
|
||||
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid AVIF file')
|
||||
})
|
||||
|
||||
it('returns empty when AVIF has valid ftyp but corrupt internal boxes', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
const buf = new Uint8Array(40)
|
||||
const dv = new DataView(buf.buffer)
|
||||
dv.setUint32(0, 16)
|
||||
buf.set(new TextEncoder().encode('ftypavif'), 4)
|
||||
dv.setUint32(16, 24)
|
||||
buf.set(new TextEncoder().encode('meta'), 20)
|
||||
|
||||
const file = new File([buf], 'corrupt.avif', { type: 'image/avif' })
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error parsing AVIF metadata'),
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.avif')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromAvifFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromAvifFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -407,6 +407,7 @@ export function getFromAvifFile(file: File): Promise<Record<string, string>> {
|
||||
console.error('FileReader: Error reading AVIF file:', err)
|
||||
resolve({})
|
||||
}
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
49
src/scripts/metadata/ebml.test.ts
Normal file
49
src/scripts/metadata/ebml.test.ts
Normal file
@@ -0,0 +1,49 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromWebmFile } from './ebml'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.webm')
|
||||
|
||||
describe('WebM/EBML metadata', () => {
|
||||
it('extracts workflow and prompt from EBML SimpleTag elements', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.webm', { type: 'video/webm' })
|
||||
|
||||
const result = await getFromWebmFile(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-WebM data', async () => {
|
||||
const file = new File([new Uint8Array(16)], 'fake.webm')
|
||||
|
||||
const result = await getFromWebmFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.webm')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromWebmFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromWebmFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -353,6 +353,7 @@ export function getFromWebmFile(file: File): Promise<ComfyMetadata> {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => handleFileLoad(event, resolve)
|
||||
reader.onerror = () => resolve({})
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, MAX_READ_BYTES))
|
||||
})
|
||||
}
|
||||
|
||||
56
src/scripts/metadata/flac.test.ts
Normal file
56
src/scripts/metadata/flac.test.ts
Normal file
@@ -0,0 +1,56 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromFlacBuffer, getFromFlacFile } from './flac'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.flac')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('FLAC metadata', () => {
|
||||
it('extracts workflow and prompt from Vorbis comments', () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const buffer = bytes.buffer.slice(
|
||||
bytes.byteOffset,
|
||||
bytes.byteOffset + bytes.byteLength
|
||||
)
|
||||
|
||||
const result = getFromFlacBuffer(buffer)
|
||||
|
||||
expect(result.workflow).toBe(JSON.stringify(EXPECTED_WORKFLOW))
|
||||
expect(result.prompt).toBe(JSON.stringify(EXPECTED_PROMPT))
|
||||
})
|
||||
|
||||
it('returns undefined for non-FLAC data', () => {
|
||||
const buf = new ArrayBuffer(16)
|
||||
const result = getFromFlacBuffer(buf)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.flac')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getFromFlacFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getFromFlacFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -42,6 +42,8 @@ export function getFromFlacFile(file: File): Promise<Record<string, string>> {
|
||||
const arrayBuffer = event.target.result as ArrayBuffer
|
||||
r(getFromFlacBuffer(arrayBuffer))
|
||||
}
|
||||
reader.onerror = () => r({})
|
||||
reader.onabort = () => r({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { ASCII, GltfSizeBytes } from '@/types/metadataTypes'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getGltfBinaryMetadata } from './gltf'
|
||||
|
||||
describe('GLTF binary metadata parser', () => {
|
||||
@@ -160,4 +164,20 @@ describe('GLTF binary metadata parser', () => {
|
||||
const metadata = await getGltfBinaryMetadata(invalidEmptyFile)
|
||||
expect(metadata).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.glb')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getGltfBinaryMetadata(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getGltfBinaryMetadata(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -165,6 +165,7 @@ export function getGltfBinaryMetadata(file: File): Promise<ComfyMetadata> {
|
||||
}
|
||||
}
|
||||
reader.onerror = () => resolve({})
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, bytesToRead))
|
||||
})
|
||||
}
|
||||
|
||||
52
src/scripts/metadata/isobmff.test.ts
Normal file
52
src/scripts/metadata/isobmff.test.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromIsobmffFile } from './isobmff'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.mp4')
|
||||
|
||||
describe('ISOBMFF (MP4) metadata', () => {
|
||||
it('extracts workflow and prompt from QuickTime keys/ilst boxes', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.mp4', { type: 'video/mp4' })
|
||||
|
||||
const result = await getFromIsobmffFile(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-ISOBMFF data', async () => {
|
||||
const file = new File([new Uint8Array(16)], 'fake.mp4', {
|
||||
type: 'video/mp4'
|
||||
})
|
||||
|
||||
const result = await getFromIsobmffFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.mp4')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromIsobmffFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromIsobmffFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -274,6 +274,7 @@ export function getFromIsobmffFile(file: File): Promise<ComfyMetadata> {
|
||||
console.error('FileReader: Error reading ISOBMFF file:', err)
|
||||
resolve({})
|
||||
}
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, MAX_READ_BYTES))
|
||||
})
|
||||
}
|
||||
|
||||
91
src/scripts/metadata/json.test.ts
Normal file
91
src/scripts/metadata/json.test.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError,
|
||||
mockFileReaderResult
|
||||
} from './__fixtures__/helpers'
|
||||
import { getDataFromJSON } from './json'
|
||||
|
||||
function jsonFile(content: object): File {
|
||||
return new File([JSON.stringify(content)], 'test.json', {
|
||||
type: 'application/json'
|
||||
})
|
||||
}
|
||||
|
||||
describe('getDataFromJSON', () => {
|
||||
it('detects API-format workflows by class_type on every value', async () => {
|
||||
const apiData = {
|
||||
'1': { class_type: 'KSampler', inputs: {} },
|
||||
'2': { class_type: 'EmptyLatentImage', inputs: {} }
|
||||
}
|
||||
|
||||
const result = await getDataFromJSON(jsonFile(apiData))
|
||||
|
||||
expect(result).toEqual({ prompt: apiData })
|
||||
})
|
||||
|
||||
it('treats objects without universal class_type as a workflow', async () => {
|
||||
const workflow = { nodes: [], links: [], version: 1 }
|
||||
|
||||
const result = await getDataFromJSON(jsonFile(workflow))
|
||||
|
||||
expect(result).toEqual({ workflow })
|
||||
})
|
||||
|
||||
it('extracts templates when the root object has a templates key', async () => {
|
||||
const templates = [{ name: 'basic' }]
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ templates }))
|
||||
|
||||
expect(result).toEqual({ templates })
|
||||
})
|
||||
|
||||
it('returns undefined for non-JSON content', async () => {
|
||||
const file = new File(['not valid json'], 'bad.json', {
|
||||
type: 'application/json'
|
||||
})
|
||||
|
||||
const result = await getDataFromJSON(file)
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('resolves undefined when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsText')
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsText')
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when reader.result is not a string', async () => {
|
||||
mockFileReaderResult('readAsText', new ArrayBuffer(8))
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when reader.result is null', async () => {
|
||||
mockFileReaderResult('readAsText', null)
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -6,21 +6,28 @@ export function getDataFromJSON(
|
||||
return new Promise<Record<string, object> | undefined>((resolve) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = async () => {
|
||||
const readerResult = reader.result as string
|
||||
const jsonContent = JSON.parse(readerResult)
|
||||
if (jsonContent?.templates) {
|
||||
resolve({ templates: jsonContent.templates })
|
||||
return
|
||||
try {
|
||||
if (typeof reader.result !== 'string') {
|
||||
resolve(undefined)
|
||||
return
|
||||
}
|
||||
const jsonContent = JSON.parse(reader.result)
|
||||
if (jsonContent?.templates) {
|
||||
resolve({ templates: jsonContent.templates })
|
||||
return
|
||||
}
|
||||
if (isApiJson(jsonContent)) {
|
||||
resolve({ prompt: jsonContent })
|
||||
return
|
||||
}
|
||||
resolve({ workflow: jsonContent })
|
||||
} catch {
|
||||
resolve(undefined)
|
||||
}
|
||||
if (isApiJson(jsonContent)) {
|
||||
resolve({ prompt: jsonContent })
|
||||
return
|
||||
}
|
||||
resolve({ workflow: jsonContent })
|
||||
return
|
||||
}
|
||||
reader.onerror = () => resolve(undefined)
|
||||
reader.onabort = () => resolve(undefined)
|
||||
reader.readAsText(file)
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
106
src/scripts/metadata/mp3.test.ts
Normal file
106
src/scripts/metadata/mp3.test.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getMp3Metadata } from './mp3'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.mp3')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('MP3 metadata', () => {
|
||||
it('extracts workflow and prompt from ID3 tags', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns undefined fields when file has no embedded metadata', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'empty.mp3', {
|
||||
type: 'audio/mpeg'
|
||||
})
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
expect(console.error).toHaveBeenCalledWith('Invalid file signature.')
|
||||
})
|
||||
|
||||
it('does not log an invalid signature for a valid MP3 sync header', async () => {
|
||||
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const buf = new Uint8Array(16)
|
||||
buf[0] = 0xff
|
||||
buf[1] = 0xfb
|
||||
const file = new File([buf], 'valid.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
await getMp3Metadata(file)
|
||||
|
||||
expect(errorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not log an invalid signature for a valid ID3v2 header', async () => {
|
||||
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const buf = new Uint8Array(16)
|
||||
buf[0] = 0x49
|
||||
buf[1] = 0x44
|
||||
buf[2] = 0x33
|
||||
const file = new File([buf], 'valid-id3.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
await getMp3Metadata(file)
|
||||
|
||||
expect(errorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('extracts metadata that spans the 4096-byte page boundary', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const metadata =
|
||||
`prompt\0${JSON.stringify(EXPECTED_PROMPT)}\0` +
|
||||
`workflow\0${JSON.stringify(EXPECTED_WORKFLOW)}\0`
|
||||
const metadataStart = 4090
|
||||
const size = metadataStart + metadata.length + 4
|
||||
const buf = new Uint8Array(size)
|
||||
for (let i = 0; i < metadata.length; i++) {
|
||||
buf[metadataStart + i] = metadata.charCodeAt(i)
|
||||
}
|
||||
buf[size - 2] = 0xff
|
||||
buf[size - 1] = 0xfb
|
||||
const file = new File([buf], 'large.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.mp3')
|
||||
|
||||
it('resolves undefined fields when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
|
||||
it('resolves undefined fields when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,21 +1,28 @@
|
||||
export async function getMp3Metadata(file: File) {
|
||||
const reader = new FileReader()
|
||||
const read_process = new Promise(
|
||||
(r) => (reader.onload = (event) => r(event?.target?.result))
|
||||
)
|
||||
const read_process = new Promise<ArrayBuffer | null>((r) => {
|
||||
reader.onload = (event) => r((event?.target?.result as ArrayBuffer) ?? null)
|
||||
reader.onerror = () => r(null)
|
||||
reader.onabort = () => r(null)
|
||||
})
|
||||
reader.readAsArrayBuffer(file)
|
||||
const arrayBuffer = (await read_process) as ArrayBuffer
|
||||
const arrayBuffer = await read_process
|
||||
if (!arrayBuffer) return { prompt: undefined, workflow: undefined }
|
||||
//https://stackoverflow.com/questions/7302439/how-can-i-determine-that-a-particular-file-is-in-fact-an-mp3-file#7302482
|
||||
const sig_bytes = new Uint8Array(arrayBuffer, 0, 3)
|
||||
if (
|
||||
(sig_bytes[0] != 0xff && sig_bytes[1] != 0xfb) ||
|
||||
(sig_bytes[0] != 0x49 && sig_bytes[1] != 0x44 && sig_bytes[2] != 0x33)
|
||||
(sig_bytes[0] != 0xff || sig_bytes[1] != 0xfb) &&
|
||||
(sig_bytes[0] != 0x49 || sig_bytes[1] != 0x44 || sig_bytes[2] != 0x33)
|
||||
)
|
||||
console.error('Invalid file signature.')
|
||||
let header = ''
|
||||
while (header.length < arrayBuffer.byteLength) {
|
||||
const page = String.fromCharCode(
|
||||
...new Uint8Array(arrayBuffer, header.length, header.length + 4096)
|
||||
...new Uint8Array(
|
||||
arrayBuffer,
|
||||
header.length,
|
||||
Math.min(4096, arrayBuffer.byteLength - header.length)
|
||||
)
|
||||
)
|
||||
header += page
|
||||
if (page.match('\u00ff\u00fb')) break
|
||||
|
||||
74
src/scripts/metadata/ogg.test.ts
Normal file
74
src/scripts/metadata/ogg.test.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getOggMetadata } from './ogg'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.opus')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('OGG/Opus metadata', () => {
|
||||
it('extracts workflow and prompt from an Opus file', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.opus', { type: 'audio/ogg' })
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns undefined fields for non-OGG data', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'fake.ogg', {
|
||||
type: 'audio/ogg'
|
||||
})
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
expect(console.error).toHaveBeenCalledWith('Invalid file signature.')
|
||||
})
|
||||
|
||||
it('handles files larger than 4096 bytes without RangeError', async () => {
|
||||
const size = 5000
|
||||
const buf = new Uint8Array(size)
|
||||
const oggs = new TextEncoder().encode('OggS\0')
|
||||
buf.set(oggs, 0)
|
||||
buf.set(oggs, 4500)
|
||||
const file = new File([buf], 'large.ogg', { type: 'audio/ogg' })
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.ogg')
|
||||
|
||||
it('resolves undefined fields when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
|
||||
it('resolves undefined fields when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,17 +1,24 @@
|
||||
export async function getOggMetadata(file: File) {
|
||||
const reader = new FileReader()
|
||||
const read_process = new Promise(
|
||||
(r) => (reader.onload = (event) => r(event?.target?.result))
|
||||
)
|
||||
const read_process = new Promise<ArrayBuffer | null>((r) => {
|
||||
reader.onload = (event) => r((event?.target?.result as ArrayBuffer) ?? null)
|
||||
reader.onerror = () => r(null)
|
||||
reader.onabort = () => r(null)
|
||||
})
|
||||
reader.readAsArrayBuffer(file)
|
||||
const arrayBuffer = (await read_process) as ArrayBuffer
|
||||
const arrayBuffer = await read_process
|
||||
if (!arrayBuffer) return { prompt: undefined, workflow: undefined }
|
||||
const signature = String.fromCharCode(...new Uint8Array(arrayBuffer, 0, 4))
|
||||
if (signature !== 'OggS') console.error('Invalid file signature.')
|
||||
let oggs = 0
|
||||
let header = ''
|
||||
while (header.length < arrayBuffer.byteLength) {
|
||||
const page = String.fromCharCode(
|
||||
...new Uint8Array(arrayBuffer, header.length, header.length + 4096)
|
||||
...new Uint8Array(
|
||||
arrayBuffer,
|
||||
header.length,
|
||||
Math.min(4096, arrayBuffer.byteLength - header.length)
|
||||
)
|
||||
)
|
||||
if (page.match('OggS\u0000')) oggs++
|
||||
header += page
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { getFromPngBuffer } from './png'
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromPngBuffer, getFromPngFile } from './png'
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const PNG_SIGNATURE = [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]
|
||||
|
||||
function createPngWithChunk(
|
||||
chunkType: string,
|
||||
keyword: string,
|
||||
content: string,
|
||||
content: string | Uint8Array,
|
||||
options: {
|
||||
compressionFlag?: number
|
||||
compressionMethod?: number
|
||||
@@ -20,12 +28,11 @@ function createPngWithChunk(
|
||||
translatedKeyword = ''
|
||||
} = options
|
||||
|
||||
const signature = new Uint8Array([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a
|
||||
])
|
||||
const signature = new Uint8Array(PNG_SIGNATURE)
|
||||
const typeBytes = new TextEncoder().encode(chunkType)
|
||||
const keywordBytes = new TextEncoder().encode(keyword)
|
||||
const contentBytes = new TextEncoder().encode(content)
|
||||
const contentBytes =
|
||||
content instanceof Uint8Array ? content : new TextEncoder().encode(content)
|
||||
|
||||
let chunkData: Uint8Array
|
||||
if (chunkType === 'iTXt') {
|
||||
@@ -66,12 +73,11 @@ function createPngWithChunk(
|
||||
new DataView(lengthBytes.buffer).setUint32(0, chunkData.length, false)
|
||||
|
||||
const crc = new Uint8Array(4)
|
||||
|
||||
const iendType = new TextEncoder().encode('IEND')
|
||||
const iendLength = new Uint8Array(4)
|
||||
const iendCrc = new Uint8Array(4)
|
||||
|
||||
const total = signature.length + 4 + 4 + chunkData.length + 4 + 4 + 4 + 0 + 4
|
||||
const total = signature.length + (4 + 4 + chunkData.length + 4) + (4 + 4 + 4)
|
||||
const result = new Uint8Array(total)
|
||||
|
||||
let offset = 0
|
||||
@@ -138,6 +144,21 @@ describe('getFromPngBuffer', () => {
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
|
||||
it('logs warning and skips iTXt chunk with unsupported compression method', async () => {
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const buffer = createPngWithChunk('iTXt', 'workflow', 'data', {
|
||||
compressionFlag: 1,
|
||||
compressionMethod: 99
|
||||
})
|
||||
|
||||
const result = await getFromPngBuffer(buffer)
|
||||
|
||||
expect(result['workflow']).toBeUndefined()
|
||||
expect(console.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Unsupported compression method 99')
|
||||
)
|
||||
})
|
||||
|
||||
it('parses compressed iTXt chunk', async () => {
|
||||
const workflow = '{"nodes":[{"id":1,"type":"KSampler"}]}'
|
||||
const contentBytes = new TextEncoder().encode(workflow)
|
||||
@@ -163,83 +184,49 @@ describe('getFromPngBuffer', () => {
|
||||
pos += chunk.length
|
||||
}
|
||||
|
||||
const buffer = createPngWithCompressedITXt(
|
||||
'workflow',
|
||||
compressedBytes,
|
||||
'',
|
||||
''
|
||||
)
|
||||
const buffer = createPngWithChunk('iTXt', 'workflow', compressedBytes, {
|
||||
compressionFlag: 1,
|
||||
compressionMethod: 0
|
||||
})
|
||||
const result = await getFromPngBuffer(buffer)
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
})
|
||||
|
||||
function createPngWithCompressedITXt(
|
||||
keyword: string,
|
||||
compressedContent: Uint8Array,
|
||||
languageTag: string,
|
||||
translatedKeyword: string
|
||||
): ArrayBuffer {
|
||||
const signature = new Uint8Array([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a
|
||||
])
|
||||
const typeBytes = new TextEncoder().encode('iTXt')
|
||||
const keywordBytes = new TextEncoder().encode(keyword)
|
||||
const langBytes = new TextEncoder().encode(languageTag)
|
||||
const transBytes = new TextEncoder().encode(translatedKeyword)
|
||||
describe('getFromPngFile', () => {
|
||||
it('reads metadata from a File object', async () => {
|
||||
const workflow = '{"nodes":[]}'
|
||||
const buffer = createPngWithChunk('tEXt', 'workflow', workflow)
|
||||
const file = new File([buffer], 'test.png', { type: 'image/png' })
|
||||
|
||||
const totalLength =
|
||||
keywordBytes.length +
|
||||
1 +
|
||||
2 +
|
||||
langBytes.length +
|
||||
1 +
|
||||
transBytes.length +
|
||||
1 +
|
||||
compressedContent.length
|
||||
const result = await getFromPngFile(file)
|
||||
|
||||
const chunkData = new Uint8Array(totalLength)
|
||||
let pos = 0
|
||||
chunkData.set(keywordBytes, pos)
|
||||
pos += keywordBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData[pos++] = 1
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(langBytes, pos)
|
||||
pos += langBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(transBytes, pos)
|
||||
pos += transBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(compressedContent, pos)
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
|
||||
const lengthBytes = new Uint8Array(4)
|
||||
new DataView(lengthBytes.buffer).setUint32(0, chunkData.length, false)
|
||||
it('returns empty for an invalid PNG File', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new ArrayBuffer(8)], 'bad.png', {
|
||||
type: 'image/png'
|
||||
})
|
||||
|
||||
const crc = new Uint8Array(4)
|
||||
const iendType = new TextEncoder().encode('IEND')
|
||||
const iendLength = new Uint8Array(4)
|
||||
const iendCrc = new Uint8Array(4)
|
||||
const result = await getFromPngFile(file)
|
||||
|
||||
const total = signature.length + 4 + 4 + chunkData.length + 4 + 4 + 4 + 0 + 4
|
||||
const result = new Uint8Array(total)
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid PNG file')
|
||||
})
|
||||
|
||||
let offset = 0
|
||||
result.set(signature, offset)
|
||||
offset += signature.length
|
||||
result.set(lengthBytes, offset)
|
||||
offset += 4
|
||||
result.set(typeBytes, offset)
|
||||
offset += 4
|
||||
result.set(chunkData, offset)
|
||||
offset += chunkData.length
|
||||
result.set(crc, offset)
|
||||
offset += 4
|
||||
result.set(iendLength, offset)
|
||||
offset += 4
|
||||
result.set(iendType, offset)
|
||||
offset += 4
|
||||
result.set(iendCrc, offset)
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.png')
|
||||
|
||||
return result.buffer
|
||||
}
|
||||
it('rejects when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
await expect(getFromPngFile(file)).rejects.toBeDefined()
|
||||
})
|
||||
|
||||
it('rejects when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
await expect(getFromPngFile(file)).rejects.toThrow('FileReader aborted')
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -126,6 +126,7 @@ export async function getFromPngFile(
|
||||
resolve(result)
|
||||
}
|
||||
reader.onerror = () => reject(reader.error)
|
||||
reader.onabort = () => reject(new Error('FileReader aborted'))
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
42
src/scripts/metadata/svg.test.ts
Normal file
42
src/scripts/metadata/svg.test.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { getSvgMetadata } from './svg'
|
||||
|
||||
function svgFile(content: string): File {
|
||||
return new File([content], 'test.svg', { type: 'image/svg+xml' })
|
||||
}
|
||||
|
||||
describe('getSvgMetadata', () => {
|
||||
it('extracts workflow and prompt from CDATA in <metadata>', async () => {
|
||||
const svg = `<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<metadata><![CDATA[${JSON.stringify({
|
||||
workflow: { nodes: [] },
|
||||
prompt: { '1': {} }
|
||||
})}]]></metadata>
|
||||
<rect width="1" height="1"/>
|
||||
</svg>`
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({
|
||||
workflow: { nodes: [] },
|
||||
prompt: { '1': {} }
|
||||
})
|
||||
})
|
||||
|
||||
it('returns empty when SVG has no metadata element', async () => {
|
||||
const svg = '<svg xmlns="http://www.w3.org/2000/svg"><rect/></svg>'
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it('returns empty when CDATA contains invalid JSON', async () => {
|
||||
const svg = `<svg><metadata><![CDATA[not valid json]]></metadata></svg>`
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
@@ -1,67 +1,215 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { getWebpMetadata } from './pnginfo'
|
||||
import { getLatentMetadata, getWebpMetadata } from './pnginfo'
|
||||
|
||||
function buildExifPayload(workflowJson: string): Uint8Array {
|
||||
const fullStr = `workflow:${workflowJson}\0`
|
||||
const strBytes = new TextEncoder().encode(fullStr)
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const headerSize = 22
|
||||
const buf = new Uint8Array(headerSize + strBytes.length)
|
||||
const fixturesDir = path.resolve(__dirname, 'metadata/__fixtures__')
|
||||
|
||||
type AsciiIfdEntry = { tag: number; value: string }
|
||||
|
||||
function encodeAsciiIfd(entries: AsciiIfdEntry[]): Uint8Array {
|
||||
const tableSize = 10 + 12 * entries.length
|
||||
const strings = entries.map((e) => new TextEncoder().encode(`${e.value}\0`))
|
||||
const totalStringBytes = strings.reduce((sum, s) => sum + s.length, 0)
|
||||
|
||||
const buf = new Uint8Array(tableSize + totalStringBytes)
|
||||
const dv = new DataView(buf.buffer)
|
||||
|
||||
buf.set([0x49, 0x49], 0)
|
||||
dv.setUint16(2, 0x002a, true)
|
||||
dv.setUint32(4, 8, true)
|
||||
dv.setUint16(8, 1, true)
|
||||
dv.setUint16(10, 0, true)
|
||||
dv.setUint16(12, 2, true)
|
||||
dv.setUint32(14, strBytes.length, true)
|
||||
dv.setUint32(18, 22, true)
|
||||
buf.set(strBytes, 22)
|
||||
dv.setUint16(8, entries.length, true)
|
||||
|
||||
let stringOffset = tableSize
|
||||
for (let i = 0; i < entries.length; i++) {
|
||||
const entryOffset = 10 + i * 12
|
||||
dv.setUint16(entryOffset, entries[i].tag, true)
|
||||
dv.setUint16(entryOffset + 2, 2, true)
|
||||
dv.setUint32(entryOffset + 4, strings[i].length, true)
|
||||
dv.setUint32(entryOffset + 8, stringOffset, true)
|
||||
buf.set(strings[i], stringOffset)
|
||||
stringOffset += strings[i].length
|
||||
}
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
function buildWebp(precedingChunkLength: number, workflowJson: string): File {
|
||||
const exifPayload = buildExifPayload(workflowJson)
|
||||
const precedingPadded = precedingChunkLength + (precedingChunkLength % 2)
|
||||
const totalSize = 12 + (8 + precedingPadded) + (8 + exifPayload.length)
|
||||
type WebpChunk = { type: string; payload: Uint8Array }
|
||||
|
||||
const buffer = new Uint8Array(totalSize)
|
||||
const dv = new DataView(buffer.buffer)
|
||||
function wrapInWebp(chunks: WebpChunk[]): File {
|
||||
let payloadSize = 0
|
||||
for (const c of chunks) {
|
||||
payloadSize += 8 + c.payload.length + (c.payload.length % 2)
|
||||
}
|
||||
const totalSize = 12 + payloadSize
|
||||
const buf = new Uint8Array(totalSize)
|
||||
const dv = new DataView(buf.buffer)
|
||||
|
||||
buffer.set([0x52, 0x49, 0x46, 0x46], 0)
|
||||
buf.set([0x52, 0x49, 0x46, 0x46], 0)
|
||||
dv.setUint32(4, totalSize - 8, true)
|
||||
buffer.set([0x57, 0x45, 0x42, 0x50], 8)
|
||||
buf.set([0x57, 0x45, 0x42, 0x50], 8)
|
||||
|
||||
buffer.set([0x56, 0x50, 0x38, 0x20], 12)
|
||||
dv.setUint32(16, precedingChunkLength, true)
|
||||
let offset = 12
|
||||
for (const c of chunks) {
|
||||
for (let i = 0; i < 4; i++) {
|
||||
buf[offset + i] = c.type.charCodeAt(i)
|
||||
}
|
||||
dv.setUint32(offset + 4, c.payload.length, true)
|
||||
buf.set(c.payload, offset + 8)
|
||||
offset += 8 + c.payload.length + (c.payload.length % 2)
|
||||
}
|
||||
|
||||
const exifStart = 20 + precedingPadded
|
||||
buffer.set([0x45, 0x58, 0x49, 0x46], exifStart)
|
||||
dv.setUint32(exifStart + 4, exifPayload.length, true)
|
||||
buffer.set(exifPayload, exifStart + 8)
|
||||
return new File([buf], 'test.webp', { type: 'image/webp' })
|
||||
}
|
||||
|
||||
return new File([buffer], 'test.webp', { type: 'image/webp' })
|
||||
function exifChunk(
|
||||
entries: AsciiIfdEntry[],
|
||||
options: { withExifPrefix?: boolean } = {}
|
||||
): WebpChunk {
|
||||
const ifd = encodeAsciiIfd(entries)
|
||||
if (!options.withExifPrefix) {
|
||||
return { type: 'EXIF', payload: ifd }
|
||||
}
|
||||
const prefixed = new Uint8Array(6 + ifd.length)
|
||||
prefixed.set(new TextEncoder().encode('Exif\0\0'), 0)
|
||||
prefixed.set(ifd, 6)
|
||||
return { type: 'EXIF', payload: prefixed }
|
||||
}
|
||||
|
||||
describe('getWebpMetadata', () => {
|
||||
it('finds workflow when a preceding chunk has odd length (RIFF padding)', async () => {
|
||||
const workflow = '{"nodes":[]}'
|
||||
const file = buildWebp(3, workflow)
|
||||
it('returns empty when the file is not a valid WEBP', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(12)], 'fake.webp')
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata.workflow).toBe(workflow)
|
||||
expect(metadata).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid WEBP file')
|
||||
})
|
||||
|
||||
it('finds workflow when preceding chunk has even length (no padding)', async () => {
|
||||
const workflow = '{"nodes":[1]}'
|
||||
const file = buildWebp(4, workflow)
|
||||
it('returns empty when a valid WEBP has no EXIF chunk', async () => {
|
||||
const file = wrapInWebp([
|
||||
{ type: 'VP8 ', payload: new Uint8Array([0, 0, 0, 0]) }
|
||||
])
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata.workflow).toBe(workflow)
|
||||
expect(metadata).toEqual({})
|
||||
})
|
||||
|
||||
it('extracts workflow and prompt from EXIF without prefix', async () => {
|
||||
const bytes = fs.readFileSync(path.join(fixturesDir, 'with_metadata.webp'))
|
||||
const file = new File([bytes], 'test.webp', { type: 'image/webp' })
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({
|
||||
workflow:
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[100,100],"size":[200,200]}]}',
|
||||
prompt: '{"1":{"class_type":"KSampler","inputs":{}}}'
|
||||
})
|
||||
})
|
||||
|
||||
it('extracts workflow and prompt from EXIF with Exif\\0\\0 prefix', async () => {
|
||||
const bytes = fs.readFileSync(
|
||||
path.join(fixturesDir, 'with_metadata_exif_prefix.webp')
|
||||
)
|
||||
const file = new File([bytes], 'test.webp', { type: 'image/webp' })
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({
|
||||
workflow:
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[100,100],"size":[200,200]}]}',
|
||||
prompt: '{"1":{"class_type":"KSampler","inputs":{}}}'
|
||||
})
|
||||
})
|
||||
|
||||
it('walks past odd-length preceding chunks (RIFF padding)', async () => {
|
||||
const file = wrapInWebp([
|
||||
{ type: 'VP8 ', payload: new Uint8Array(3) },
|
||||
exifChunk([{ tag: 0, value: 'workflow:{"a":1}' }])
|
||||
])
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({ workflow: '{"a":1}' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('getLatentMetadata', () => {
|
||||
function buildSafetensors(headerObj: object): File {
|
||||
const headerBytes = new TextEncoder().encode(JSON.stringify(headerObj))
|
||||
const buf = new Uint8Array(8 + headerBytes.length)
|
||||
const dv = new DataView(buf.buffer)
|
||||
dv.setUint32(0, headerBytes.length, true)
|
||||
dv.setUint32(4, 0, true)
|
||||
buf.set(headerBytes, 8)
|
||||
return new File([buf], 'test.safetensors')
|
||||
}
|
||||
|
||||
it('extracts __metadata__ from a safetensors header', async () => {
|
||||
const workflow =
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[100,100],"size":[200,200]}]}'
|
||||
const prompt = '{"1":{"class_type":"KSampler","inputs":{}}}'
|
||||
const file = buildSafetensors({
|
||||
__metadata__: { workflow, prompt },
|
||||
'tensor.weight': { dtype: 'F32', shape: [1], data_offsets: [0, 4] }
|
||||
})
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({ workflow, prompt })
|
||||
})
|
||||
|
||||
it('returns undefined when the safetensors header has no __metadata__', async () => {
|
||||
const file = buildSafetensors({
|
||||
'tensor.weight': { dtype: 'F32', shape: [1], data_offsets: [0, 4] }
|
||||
})
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined for a truncated or malformed file', async () => {
|
||||
const file = new File([new Uint8Array(4)], 'bad.safetensors')
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toBeUndefined()
|
||||
})
|
||||
|
||||
it('extracts metadata when the header is larger than 4 MiB', async () => {
|
||||
const filler = 'x'.repeat(5 * 1024 * 1024)
|
||||
const workflow =
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[0,0],"size":[1,1]}]}'
|
||||
const file = buildSafetensors({
|
||||
__metadata__: { workflow, filler }
|
||||
})
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata?.workflow).toBe(workflow)
|
||||
expect(metadata?.filler).toBe(filler)
|
||||
})
|
||||
|
||||
it('warns and returns undefined when the header size exceeds the 8 MiB limit', async () => {
|
||||
const warnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const prefix = new Uint8Array(8)
|
||||
const prefixView = new DataView(prefix.buffer)
|
||||
const oversized = 8 * 1024 * 1024 + 1
|
||||
prefixView.setUint32(0, oversized, true)
|
||||
const file = new File([prefix], 'too-big.safetensors')
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toBeUndefined()
|
||||
expect(warnSpy).toHaveBeenCalledWith(
|
||||
`Safetensors header size ${oversized} bytes exceeds maximum ${8 * 1024 * 1024} bytes`
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -105,14 +105,17 @@ export function getWebpMetadata(file: File) {
|
||||
...webp.slice(offset, offset + 4)
|
||||
)
|
||||
if (chunk_type === 'EXIF') {
|
||||
let exifOffset = offset + 8
|
||||
let exifLength = chunk_length
|
||||
if (
|
||||
String.fromCharCode(...webp.slice(offset + 8, offset + 8 + 6)) ==
|
||||
String.fromCharCode(...webp.slice(exifOffset, exifOffset + 6)) ==
|
||||
'Exif\0\0'
|
||||
) {
|
||||
offset += 6
|
||||
exifOffset += 6
|
||||
exifLength -= 6
|
||||
}
|
||||
let data = parseExifData(
|
||||
webp.slice(offset + 8, offset + 8 + chunk_length)
|
||||
const data = parseExifData(
|
||||
webp.slice(exifOffset, exifOffset + exifLength)
|
||||
)
|
||||
for (const key in data) {
|
||||
const value = data[Number(key)]
|
||||
@@ -131,32 +134,35 @@ export function getWebpMetadata(file: File) {
|
||||
|
||||
r(txt_chunks)
|
||||
}
|
||||
|
||||
reader.onerror = () => r({})
|
||||
reader.onabort = () => r({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
export function getLatentMetadata(file: File): Promise<Record<string, string>> {
|
||||
return new Promise((r) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => {
|
||||
const safetensorsData = new Uint8Array(
|
||||
event.target?.result as ArrayBuffer
|
||||
)
|
||||
const dataView = new DataView(safetensorsData.buffer)
|
||||
let header_size = dataView.getUint32(0, true)
|
||||
let offset = 8
|
||||
let header = JSON.parse(
|
||||
new TextDecoder().decode(
|
||||
safetensorsData.slice(offset, offset + header_size)
|
||||
)
|
||||
)
|
||||
r(header.__metadata__)
|
||||
}
|
||||
// Matches backend MAX_SAFETENSORS_HEADER_SIZE in
|
||||
// ComfyUI/app/assets/services/metadata_extract.py
|
||||
const MAX_SAFETENSORS_HEADER_SIZE = 8 * 1024 * 1024
|
||||
|
||||
var slice = file.slice(0, 1024 * 1024 * 4)
|
||||
reader.readAsArrayBuffer(slice)
|
||||
})
|
||||
export async function getLatentMetadata(
|
||||
file: File
|
||||
): Promise<Record<string, string> | undefined> {
|
||||
try {
|
||||
const prefix = await file.slice(0, 8).arrayBuffer()
|
||||
if (prefix.byteLength < 8) return undefined
|
||||
const headerSize = new DataView(prefix).getUint32(0, true)
|
||||
if (headerSize > MAX_SAFETENSORS_HEADER_SIZE) {
|
||||
console.warn(
|
||||
`Safetensors header size ${headerSize} bytes exceeds maximum ${MAX_SAFETENSORS_HEADER_SIZE} bytes`
|
||||
)
|
||||
return undefined
|
||||
}
|
||||
const headerData = await file.slice(8, 8 + headerSize).arrayBuffer()
|
||||
const header = JSON.parse(new TextDecoder().decode(headerData))
|
||||
return header.__metadata__
|
||||
} catch {
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
interface NodeConnection {
|
||||
|
||||
Reference in New Issue
Block a user