mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-02-20 06:44:32 +00:00
## Summary Implements Phase 1 of the **Vue-owns-truth** pattern for widget values. Widget values are now canonical in a Pinia store; `widget.value` delegates to the store while preserving full backward compatibility. ## Changes - **New store**: `src/stores/widgetValueStore.ts` - centralized widget value storage with `get/set/remove/removeNode` API - **BaseWidget integration**: `widget.value` getter/setter now delegates to store when widget is associated with a node - **LGraphNode wiring**: `addCustomWidget()` automatically calls `widget.setNodeId(this.id)` to wire widgets to their nodes - **Test fixes**: Added Pinia setup to test files that use widgets ## Why This foundation enables: - Vue components to reactively bind to widget values via `computed(() => store.get(...))` - Future Yjs/CRDT backing for real-time collaboration - Cleaner separation between Vue state and LiteGraph rendering ## Backward Compatibility | Extension Pattern | Status | |-------------------|--------| | `widget.value = x` | ✅ Works unchanged | | `node.widgets[i].value` | ✅ Works unchanged | | `widget.callback` | ✅ Still fires | | `node.onWidgetChanged` | ✅ Still fires | ## Testing - ✅ 4252 unit tests pass - ✅ Build succeeds ┆Issue is synchronized with this [Notion page](https://www.notion.so/PR-8594-feat-add-WidgetValueStore-for-centralized-widget-value-management-2fc6d73d36508160886fcb9f3ebd941e) by [Unito](https://www.unito.io) --------- Co-authored-by: Amp <amp@ampcode.com> Co-authored-by: github-actions <github-actions@github.com> Co-authored-by: GitHub Action <action@github.com>
132 lines
3.5 KiB
TypeScript
132 lines
3.5 KiB
TypeScript
async function decompressZlib(
|
|
data: Uint8Array<ArrayBuffer>
|
|
): Promise<Uint8Array<ArrayBuffer>> {
|
|
const stream = new DecompressionStream('deflate')
|
|
const writer = stream.writable.getWriter()
|
|
try {
|
|
await writer.write(data)
|
|
await writer.close()
|
|
} finally {
|
|
writer.releaseLock()
|
|
}
|
|
|
|
const reader = stream.readable.getReader()
|
|
const chunks: Uint8Array<ArrayBuffer>[] = []
|
|
let totalLength = 0
|
|
|
|
try {
|
|
while (true) {
|
|
const { done, value } = await reader.read()
|
|
if (done) break
|
|
chunks.push(value)
|
|
totalLength += value.length
|
|
}
|
|
} finally {
|
|
reader.releaseLock()
|
|
}
|
|
|
|
const result = new Uint8Array(totalLength)
|
|
let offset = 0
|
|
for (const chunk of chunks) {
|
|
result.set(chunk, offset)
|
|
offset += chunk.length
|
|
}
|
|
return result
|
|
}
|
|
|
|
export async function getFromPngBuffer(
|
|
buffer: ArrayBuffer
|
|
): Promise<Record<string, string>> {
|
|
const pngData = new Uint8Array(buffer)
|
|
const dataView = new DataView(pngData.buffer)
|
|
|
|
if (dataView.getUint32(0) !== 0x89504e47) {
|
|
console.error('Not a valid PNG file')
|
|
return {}
|
|
}
|
|
|
|
let offset = 8
|
|
const txt_chunks: Record<string, string> = {}
|
|
|
|
while (offset < pngData.length) {
|
|
const length = dataView.getUint32(offset)
|
|
const type = String.fromCharCode(...pngData.slice(offset + 4, offset + 8))
|
|
|
|
if (type === 'tEXt' || type === 'comf' || type === 'iTXt') {
|
|
let keyword_end = offset + 8
|
|
while (pngData[keyword_end] !== 0) {
|
|
keyword_end++
|
|
}
|
|
const keyword = String.fromCharCode(
|
|
...pngData.slice(offset + 8, keyword_end)
|
|
)
|
|
|
|
let textStart = keyword_end + 1
|
|
let isCompressed = false
|
|
let compressionMethod = 0
|
|
|
|
if (type === 'iTXt') {
|
|
const chunkEnd = offset + 8 + length
|
|
isCompressed = pngData[textStart] === 1
|
|
compressionMethod = pngData[textStart + 1]
|
|
textStart += 2
|
|
|
|
while (pngData[textStart] !== 0 && textStart < chunkEnd) {
|
|
textStart++
|
|
}
|
|
if (textStart < chunkEnd) textStart++
|
|
|
|
while (pngData[textStart] !== 0 && textStart < chunkEnd) {
|
|
textStart++
|
|
}
|
|
if (textStart < chunkEnd) textStart++
|
|
}
|
|
|
|
let contentArraySegment = pngData.slice(textStart, offset + 8 + length)
|
|
|
|
if (isCompressed) {
|
|
if (compressionMethod === 0) {
|
|
try {
|
|
contentArraySegment = await decompressZlib(contentArraySegment)
|
|
} catch (e) {
|
|
console.error(`Failed to decompress iTXt chunk "${keyword}":`, e)
|
|
offset += 12 + length
|
|
continue
|
|
}
|
|
} else {
|
|
console.warn(
|
|
`Unsupported compression method ${compressionMethod} for iTXt chunk "${keyword}"`
|
|
)
|
|
offset += 12 + length
|
|
continue
|
|
}
|
|
}
|
|
|
|
const contentJson = new TextDecoder('utf-8').decode(contentArraySegment)
|
|
txt_chunks[keyword] = contentJson
|
|
}
|
|
|
|
offset += 12 + length
|
|
}
|
|
return txt_chunks
|
|
}
|
|
|
|
export async function getFromPngFile(
|
|
file: File
|
|
): Promise<Record<string, string>> {
|
|
return new Promise<Record<string, string>>((resolve, reject) => {
|
|
const reader = new FileReader()
|
|
reader.onload = async (event) => {
|
|
const buffer = event.target?.result
|
|
if (!(buffer instanceof ArrayBuffer)) {
|
|
reject(new Error('Failed to read file as ArrayBuffer'))
|
|
return
|
|
}
|
|
const result = await getFromPngBuffer(buffer)
|
|
resolve(result)
|
|
}
|
|
reader.onerror = () => reject(reader.error)
|
|
reader.readAsArrayBuffer(file)
|
|
})
|
|
}
|