Compare commits

...

24 Commits

Author SHA1 Message Date
Alexander Brown
d1fc88b747 fix: refresh deps and clear production audit vulnerabilities (#9068)
## Summary
- refresh workspace dependency catalog and lockfile for security and
maintenance updates
- resolve production audit findings (runtime dependencies now report
clean)
- align Tiptap dependencies on v2 and pin `@tiptap/pm` to avoid
mixed-version type issues
- update Storybook preview toolbar config for Storybook 10 typing
(`showName` removed)

## Validation
- `pnpm typecheck` 
- `pnpm lint`  (warnings only)
- `pnpm test:unit` 
- `pnpm audit --prod` 
- `pnpm audit` ⚠️ remaining dev-only transitive advisories
(`minimatch`/`ajv`/`brace-expansion`) in upstream toolchain deps

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9068-fix-refresh-deps-and-clear-production-audit-vulnerabilities-30e6d73d36508122b778ec3ca99d41a4)
by [Unito](https://www.unito.io)

Co-authored-by: Amp <amp@ampcode.com>
2026-03-08 02:33:46 +00:00
Christian Byrne
00490e8d94 [backport core/1.40] fix: prevent non-widget inputs on nested subgraphs from appearing as button widgets (#9542) (#9581)
Backport of #9542 to core/1.40.

Conflict: resolveSubgraphInputTarget.ts was modify/delete — kept as new
file (the fix).

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9542
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

Co-authored-by: Alexander Brown <drjkl@comfy.org>
Co-authored-by: Amp <amp@ampcode.com>
2026-03-07 18:31:17 -08:00
Christian Byrne
e5a4443653 [backport core/1.40] fix: remove timeouts from error toasts so they persist until dismissed (#9543) (#9580)
Backport of #9543 to core/1.40.

Conflicts: 6 modify/delete files removed (not on 1.40), 1 content
conflict resolved in useNodeReplacement.ts (added error handling).

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9543
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345
2026-03-07 18:31:12 -08:00
Christian Byrne
094c4c4871 [backport core/1.40] fix: Prevent corruption of workflow data due to checkState during graph loading (#9531) (#9579)
Backport of #9531 to core/1.40. Critical data corruption fix.

Conflicts resolved: restructured try/catch in app.ts to wrap with
ChangeTracker.isLoadingGraph. Removed appModeStore.ts (app mode not on
1.40).

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9531
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

Co-authored-by: pythongosssss <125205205+pythongosssss@users.noreply.github.com>
2026-03-07 18:31:07 -08:00
Christian Byrne
6ab6e78497 [backport core/1.40] fix: extract and harden subgraph node ID deduplication (#9510) (#9578)
Backport of #9510 to core/1.40. Stability fix for subgraph node ID
conflicts.

Conflict: added missing test imports in LGraph.test.ts.

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9510
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9578-backport-core-1-40-fix-extract-and-harden-subgraph-node-ID-deduplication-9510-31d6d73d36508122bfe8d2aea4ddae35)
by [Unito](https://www.unito.io)

Co-authored-by: Alexander Brown <drjkl@comfy.org>
Co-authored-by: Amp <amp@ampcode.com>
2026-03-07 18:31:01 -08:00
Christian Byrne
602784a672 [backport core/1.40] fix: textarea stays disabled after link disconnect on promoted widgets (#9199) (#9577)
Backport of #9199 to core/1.40.

Conflicts resolved in useGraphNodeManager.ts/test.ts — accepted incoming
promoted widget handling changes.

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9199
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9577-backport-core-1-40-fix-textarea-stays-disabled-after-link-disconnect-on-promoted-widge-31d6d73d365081619c7cfbea1bbc4463)
by [Unito](https://www.unito.io)
2026-03-07 18:30:56 -08:00
Christian Byrne
22eefc4222 [backport core/1.40] fix: spin out workflow tab/load stability regressions (#9345) (#9576)
Backport of #9345 to core/1.40. Stability fix for workflow tab loading.

Conflicts: import additions and new test block in workflowService —
accepted incoming.

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9345
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9576-backport-core-1-40-fix-spin-out-workflow-tab-load-stability-regressions-9345-31d6d73d36508126a4a6f7cccf592272)
by [Unito](https://www.unito.io)

Co-authored-by: Alexander Brown <drjkl@comfy.org>
Co-authored-by: Amp <amp@ampcode.com>
2026-03-07 18:30:50 -08:00
Christian Byrne
e181ec95b0 [backport core/1.40] [fix] Replace eval() with safe math expression parser (#9263) (#9575)
Backport of #9263 to core/1.40. Security fix — removes eval() usage.

Conflicts resolved: added new exports to litegraph.ts barrel, added new
test imports in widget.test.ts.

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9263
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9575-backport-core-1-40-fix-Replace-eval-with-safe-math-expression-parser-9263-31d6d73d365081099903f24d1d6584cc)
by [Unito](https://www.unito.io)

Co-authored-by: Johnpaul Chiwetelu <49923152+Myestery@users.noreply.github.com>
2026-03-07 18:30:45 -08:00
Christian Byrne
c5f42b0862 [backport core/1.40] Fix essentials nodes not being marked core (#9287) (#9574)
Backport of #9287 to core/1.40. Snapshot PNG conflict resolved (accepted
theirs).

**Original PR:** https://github.com/Comfy-Org/ComfyUI_frontend/pull/9287
**Pipeline ticket:** 15e1f241-efaa-4fe5-88ca-4ccc7bfb3345

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9574-backport-core-1-40-Fix-essentials-nodes-not-being-marked-core-9287-31d6d73d365081a48f01f6cb2ef00619)
by [Unito](https://www.unito.io)

Co-authored-by: AustinMroz <austin@comfy.org>
2026-03-07 18:30:39 -08:00
Comfy Org PR Bot
32fff22eb1 [backport core/1.40] fix: handle failed global subgraph blueprint loading gracefully (#9573)
Backport of #9063 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9573-backport-core-1-40-fix-handle-failed-global-subgraph-blueprint-loading-gracefully-31d6d73d36508123aaeeecde21c72b49)
by [Unito](https://www.unito.io)

Co-authored-by: Christian Byrne <cbyrne@comfy.org>
2026-03-07 18:25:55 -08:00
Comfy Org PR Bot
e29f9b6800 [backport core/1.40] fix: subgraph unpacking creates extra link to seed widget (#9572)
Backport of #9046 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9572-backport-core-1-40-fix-subgraph-unpacking-creates-extra-link-to-seed-widget-31d6d73d365081f4b64ccdaffa508e8e)
by [Unito](https://www.unito.io)

Co-authored-by: Christian Byrne <cbyrne@comfy.org>
2026-03-07 18:25:47 -08:00
Comfy Org PR Bot
c1262e3bb2 [backport core/1.40] [Bug] Node preview images are lost when switching between multiple workflow tabs (#9571)
Backport of #9380 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9571-backport-core-1-40-Bug-Node-preview-images-are-lost-when-switching-between-multiple-w-31d6d73d36508164b4e3d90b756f51fa)
by [Unito](https://www.unito.io)

Co-authored-by: Kelly Yang <124ykl@gmail.com>
Co-authored-by: Alexander Brown <drjkl@comfy.org>
2026-03-07 18:25:40 -08:00
Comfy Org PR Bot
69aa9ae2d7 [backport core/1.40] fix: prevent persistent loading state when cycling batches with identical URLs (#9570)
Backport of #8999 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9570-backport-core-1-40-fix-prevent-persistent-loading-state-when-cycling-batches-with-iden-31d6d73d3650818d9136cfc82e73d89f)
by [Unito](https://www.unito.io)

Co-authored-by: Christian Byrne <cbyrne@comfy.org>
Co-authored-by: Simula_r <18093452+simula-r@users.noreply.github.com>
2026-03-07 18:25:32 -08:00
Comfy Org PR Bot
fa652592b4 [backport core/1.40] fix: Custom Combo options display in Nodes 2.0 (#9569)
Backport of #9324 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9569-backport-core-1-40-fix-Custom-Combo-options-display-in-Nodes-2-0-31d6d73d3650819a8c67fbdf1ef7cf15)
by [Unito](https://www.unito.io)

Co-authored-by: Alexander Brown <drjkl@comfy.org>
2026-03-07 18:25:24 -08:00
Comfy Org PR Bot
9f2de249f4 [backport core/1.40] fix: node replacement fails after execution and modal sync (#9568)
Backport of #9269 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9568-backport-core-1-40-fix-node-replacement-fails-after-execution-and-modal-sync-31d6d73d365081dc8affc0e1591df4cb)
by [Unito](https://www.unito.io)

Co-authored-by: jaeone94 <89377375+jaeone94@users.noreply.github.com>
2026-03-07 18:25:17 -08:00
Comfy Org PR Bot
114c2ef182 [backport core/1.40] Prevent serialization of progress text to prompt (#9224)
Backport of #9221 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9224-backport-core-1-40-Prevent-serialization-of-progress-text-to-prompt-3136d73d36508139a4d1f25a37cfe9c4)
by [Unito](https://www.unito.io)

Co-authored-by: AustinMroz <austin@comfy.org>
2026-02-25 17:38:45 -08:00
Comfy Org PR Bot
dd0aff5865 [backport core/1.40] fix: publish desktop-specific frontend release artifact (#9208)
Backport of #9206 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9208-backport-core-1-40-fix-publish-desktop-specific-frontend-release-artifact-3126d73d36508195acdac4009a72509f)
by [Unito](https://www.unito.io)

Co-authored-by: Benjamin Lu <benjaminlu1107@gmail.com>
2026-02-25 03:43:56 -08:00
Comfy Org PR Bot
c723ee4891 [backport core/1.40] fix: resolve desktop-ui build failure from icon path cwd mismatch (#9192)
Backport of #9185 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9192-backport-core-1-40-fix-resolve-desktop-ui-build-failure-from-icon-path-cwd-mismatch-3126d73d36508116a057cbc556a27569)
by [Unito](https://www.unito.io)

Co-authored-by: Benjamin Lu <benjaminlu1107@gmail.com>
2026-02-24 20:57:55 -08:00
Comfy Org PR Bot
3bea20e755 [backport core/1.40] fix: prevent infinite node resize loop in Vue mode (#9178)
Backport of #9177 to `core/1.40`

Automatically created by backport workflow.

Co-authored-by: Alexander Brown <drjkl@comfy.org>
Co-authored-by: Amp <amp@ampcode.com>
2026-02-24 21:18:36 +00:00
Comfy Org PR Bot
3e97dde185 [backport core/1.40] fix: use getAuthHeader for API key auth in subscription/billing (#9148)
Backport of #9142 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9148-backport-core-1-40-fix-use-getAuthHeader-for-API-key-auth-in-subscription-billing-3116d73d3650816f9facc4359d6a7431)
by [Unito](https://www.unito.io)

Co-authored-by: Christian Byrne <cbyrne@comfy.org>
2026-02-23 19:00:53 -08:00
Comfy Org PR Bot
f0fbb55a0a [backport core/1.40] fix: fix error overlay and TabErrors filtering for nested subgraphs (#9132)
Backport of #9129 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9132-backport-core-1-40-fix-fix-error-overlay-and-TabErrors-filtering-for-nested-subgraphs-3106d73d365081dd9041e9c382613353)
by [Unito](https://www.unito.io)

Co-authored-by: jaeone94 <89377375+jaeone94@users.noreply.github.com>
2026-02-23 04:13:43 -08:00
Comfy Org PR Bot
d37023bf5e [backport core/1.40] [refactor] Extract executionErrorStore from executionStore (#9130)
Backport of #9060 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9130-backport-core-1-40-refactor-Extract-executionErrorStore-from-executionStore-3106d73d3650818ca57dcec8dcb8a709)
by [Unito](https://www.unito.io)

Co-authored-by: jaeone94 <89377375+jaeone94@users.noreply.github.com>
2026-02-23 04:00:50 -08:00
Comfy Org PR Bot
a28cb69a73 [backport core/1.40] feat(node): show Enter Subgraph and Error buttons side by side in node footer (#9127)
Backport of #9126 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9127-backport-core-1-40-feat-node-show-Enter-Subgraph-and-Error-buttons-side-by-side-in-no-3106d73d3650817f96d7e72713e9a4ae)
by [Unito](https://www.unito.io)

Co-authored-by: jaeone94 <89377375+jaeone94@users.noreply.github.com>
2026-02-23 01:19:13 -08:00
Comfy Org PR Bot
cd7d627ef4 [backport core/1.40] feat: add feature flag to disable Essentials tab in node library (#9081)
Backport of #9067 to `core/1.40`

Automatically created by backport workflow.

┆Issue is synchronized with this [Notion
page](https://www.notion.so/PR-9081-backport-core-1-40-feat-add-feature-flag-to-disable-Essentials-tab-in-node-library-30f6d73d365081be9f48d9e15b3f7b49)
by [Unito](https://www.unito.io)

Co-authored-by: Christian Byrne <cbyrne@comfy.org>
Co-authored-by: GitHub Action <action@github.com>
2026-02-21 22:30:00 -08:00
111 changed files with 6637 additions and 3192 deletions

View File

@@ -53,7 +53,13 @@ jobs:
IS_NIGHTLY: ${{ case(github.ref == 'refs/heads/main', 'true', 'false') }}
run: |
pnpm install --frozen-lockfile
pnpm build
# Desktop-specific release artifact with desktop distribution flags.
DISTRIBUTION=desktop pnpm build
pnpm zipdist ./dist ./dist-desktop.zip
# Default release artifact for core/PyPI.
NX_SKIP_NX_CACHE=true pnpm build
pnpm zipdist
- name: Upload dist artifact
uses: actions/upload-artifact@v6
@@ -62,6 +68,7 @@ jobs:
path: |
dist/
dist.zip
dist-desktop.zip
draft_release:
needs: build
@@ -79,6 +86,7 @@ jobs:
with:
files: |
dist.zip
dist-desktop.zip
tag_name: v${{ needs.build.outputs.version }}
target_commitish: ${{ github.event.pull_request.base.ref }}
make_latest: >-

View File

@@ -35,7 +35,7 @@
}
],
"no-control-regex": "off",
"no-eval": "off",
"no-eval": "error",
"no-redeclare": "error",
"no-restricted-imports": [
"error",

View File

@@ -90,7 +90,6 @@ const preview: Preview = {
{ value: 'light', icon: 'sun', title: 'Light' },
{ value: 'dark', icon: 'moon', title: 'Dark' }
],
showName: true,
dynamicTitle: true
}
}

View File

@@ -61,8 +61,7 @@
"^build"
],
"options": {
"cwd": "apps/desktop-ui",
"command": "vite build --config vite.config.mts"
"command": "vite build --config apps/desktop-ui/vite.config.mts"
},
"outputs": [
"{projectRoot}/dist"

View File

@@ -4,7 +4,7 @@
<template v-if="filter.tasks.length === 0">
<!-- Empty filter -->
<Divider />
<p class="text-neutral-400 w-full text-center">
<p class="w-full text-center text-neutral-400">
{{ $t('maintenance.allOk') }}
</p>
</template>
@@ -25,7 +25,7 @@
<!-- Display: Cards -->
<template v-else>
<div class="flex flex-wrap justify-evenly gap-8 pad-y my-4">
<div class="pad-y my-4 flex flex-wrap justify-evenly gap-8">
<TaskCard
v-for="task in filter.tasks"
:key="task.id"
@@ -45,7 +45,8 @@ import { useConfirm, useToast } from 'primevue'
import ConfirmPopup from 'primevue/confirmpopup'
import Divider from 'primevue/divider'
import { t } from '@/i18n'
import { useI18n } from 'vue-i18n'
import { useMaintenanceTaskStore } from '@/stores/maintenanceTaskStore'
import type {
MaintenanceFilter,
@@ -55,6 +56,7 @@ import type {
import TaskCard from './TaskCard.vue'
import TaskListItem from './TaskListItem.vue'
const { t } = useI18n()
const toast = useToast()
const confirm = useConfirm()
const taskStore = useMaintenanceTaskStore()
@@ -80,8 +82,7 @@ const executeTask = async (task: MaintenanceTask) => {
toast.add({
severity: 'error',
summary: t('maintenance.error.toastTitle'),
detail: message ?? t('maintenance.error.defaultDescription'),
life: 10_000
detail: message ?? t('maintenance.error.defaultDescription')
})
}

View File

@@ -188,8 +188,7 @@ const completeValidation = async () => {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('maintenance.error.cannotContinue'),
life: 5_000
detail: t('maintenance.error.cannotContinue')
})
}
}

View File

@@ -1,8 +1,8 @@
<template>
<BaseViewTemplate dark hide-language-selector>
<div class="h-full p-8 2xl:p-16 flex flex-col items-center justify-center">
<div class="flex h-full flex-col items-center justify-center p-8 2xl:p-16">
<div
class="bg-neutral-800 rounded-lg shadow-lg p-6 w-full max-w-[600px] flex flex-col gap-6"
class="flex w-full max-w-[600px] flex-col gap-6 rounded-lg bg-neutral-800 p-6 shadow-lg"
>
<h2 class="text-3xl font-semibold text-neutral-100">
{{ $t('install.helpImprove') }}
@@ -15,7 +15,7 @@
<a
href="https://comfy.org/privacy"
target="_blank"
class="text-blue-400 hover:text-blue-300 underline"
class="text-blue-400 underline hover:text-blue-300"
>
{{ $t('install.privacyPolicy') }} </a
>.
@@ -33,7 +33,7 @@
}}
</span>
</div>
<div class="flex pt-6 justify-end">
<div class="flex justify-end pt-6">
<Button
:label="$t('g.ok')"
icon="pi pi-check"
@@ -72,8 +72,7 @@ const updateConsent = async () => {
toast.add({
severity: 'error',
summary: t('install.settings.errorUpdatingConsent'),
detail: t('install.settings.errorUpdatingConsentDetail'),
life: 3000
detail: t('install.settings.errorUpdatingConsentDetail')
})
} finally {
isUpdating.value = false

View File

@@ -0,0 +1,183 @@
{
"id": "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
"revision": 0,
"last_node_id": 2,
"last_link_id": 0,
"nodes": [
{
"id": 2,
"type": "e5fb1765-aaaa-bbbb-cccc-ddddeeee0001",
"pos": [600, 400],
"size": [200, 100],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": null
}
],
"properties": {},
"widgets_values": []
}
],
"links": [],
"groups": [],
"definitions": {
"subgraphs": [
{
"id": "e5fb1765-aaaa-bbbb-cccc-ddddeeee0001",
"version": 1,
"state": {
"lastGroupId": 0,
"lastNodeId": 2,
"lastLinkId": 5,
"lastRerouteId": 0
},
"revision": 0,
"config": {},
"name": "Subgraph With Duplicate Links",
"inputNode": {
"id": -10,
"bounding": [200, 400, 120, 60]
},
"outputNode": {
"id": -20,
"bounding": [900, 400, 120, 60]
},
"inputs": [],
"outputs": [
{
"id": "out-latent-1",
"name": "LATENT",
"type": "LATENT",
"linkIds": [2],
"pos": [920, 420]
}
],
"widgets": [],
"nodes": [
{
"id": 1,
"type": "KSampler",
"pos": [400, 100],
"size": [270, 262],
"flags": {},
"order": 1,
"mode": 0,
"inputs": [
{
"name": "model",
"type": "MODEL",
"link": null
},
{
"name": "positive",
"type": "CONDITIONING",
"link": null
},
{
"name": "negative",
"type": "CONDITIONING",
"link": null
},
{
"name": "latent_image",
"type": "LATENT",
"link": 1
}
],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [2]
}
],
"properties": {
"Node name for S&R": "KSampler"
},
"widgets_values": [0, "randomize", 20, 8, "euler", "simple", 1]
},
{
"id": 2,
"type": "EmptyLatentImage",
"pos": [100, 200],
"size": [200, 106],
"flags": {},
"order": 0,
"mode": 0,
"inputs": [],
"outputs": [
{
"name": "LATENT",
"type": "LATENT",
"links": [1, 3, 4, 5]
}
],
"properties": {
"Node name for S&R": "EmptyLatentImage"
},
"widgets_values": [512, 512, 1]
}
],
"groups": [],
"links": [
{
"id": 1,
"origin_id": 2,
"origin_slot": 0,
"target_id": 1,
"target_slot": 3,
"type": "LATENT"
},
{
"id": 2,
"origin_id": 1,
"origin_slot": 0,
"target_id": -20,
"target_slot": 0,
"type": "LATENT"
},
{
"id": 3,
"origin_id": 2,
"origin_slot": 0,
"target_id": 1,
"target_slot": 3,
"type": "LATENT"
},
{
"id": 4,
"origin_id": 2,
"origin_slot": 0,
"target_id": 1,
"target_slot": 3,
"type": "LATENT"
},
{
"id": 5,
"origin_id": 2,
"origin_slot": 0,
"target_id": 1,
"target_slot": 3,
"type": "LATENT"
}
],
"extra": {}
}
]
},
"config": {},
"extra": {
"ds": {
"scale": 1,
"offset": [0, 0]
},
"frontendVersion": "1.38.14"
},
"version": 0.4
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

After

Width:  |  Height:  |  Size: 41 KiB

View File

@@ -375,6 +375,45 @@ test.describe('Subgraph Operations', { tag: ['@slow', '@subgraph'] }, () => {
})
})
test.describe('Subgraph Unpacking', () => {
test('Unpacking subgraph with duplicate links does not create extra links', async ({
comfyPage
}) => {
await comfyPage.workflow.loadWorkflow(
'subgraphs/subgraph-duplicate-links'
)
const result = await comfyPage.page.evaluate(() => {
const graph = window.app!.graph!
const subgraphNode = graph.nodes.find((n) => n.isSubgraphNode())
if (!subgraphNode || !subgraphNode.isSubgraphNode()) {
return { error: 'No subgraph node found' }
}
graph.unpackSubgraph(subgraphNode)
const linkCount = graph.links.size
const nodes = graph.nodes
const ksampler = nodes.find((n) => n.type === 'KSampler')
if (!ksampler) return { error: 'No KSampler found after unpack' }
const linkedInputCount = ksampler.inputs.filter(
(i) => i.link != null
).length
return { linkCount, linkedInputCount, nodeCount: nodes.length }
})
expect(result).not.toHaveProperty('error')
// Should have exactly 1 link (EmptyLatentImage→KSampler)
// not 4 (with 3 duplicates). The KSampler→output link is dropped
// because the subgraph output has no downstream connection.
expect(result.linkCount).toBe(1)
// KSampler should have exactly 1 linked input (latent_image)
expect(result.linkedInputCount).toBe(1)
})
})
test.describe('Subgraph Creation and Deletion', () => {
test('Can create subgraph from selected nodes', async ({ comfyPage }) => {
await comfyPage.workflow.loadWorkflow('default')

View File

@@ -70,13 +70,14 @@
"@primevue/themes": "catalog:",
"@sentry/vue": "catalog:",
"@sparkjsdev/spark": "catalog:",
"@tiptap/core": "^2.10.4",
"@tiptap/extension-link": "^2.10.4",
"@tiptap/extension-table": "^2.10.4",
"@tiptap/extension-table-cell": "^2.10.4",
"@tiptap/extension-table-header": "^2.10.4",
"@tiptap/extension-table-row": "^2.10.4",
"@tiptap/starter-kit": "^2.10.4",
"@tiptap/core": "catalog:",
"@tiptap/extension-link": "catalog:",
"@tiptap/extension-table": "catalog:",
"@tiptap/extension-table-cell": "catalog:",
"@tiptap/extension-table-header": "catalog:",
"@tiptap/extension-table-row": "catalog:",
"@tiptap/pm": "catalog:",
"@tiptap/starter-kit": "catalog:",
"@vueuse/core": "catalog:",
"@vueuse/integrations": "catalog:",
"@xterm/addon-fit": "^0.10.0",
@@ -93,9 +94,9 @@
"extendable-media-recorder-wav-encoder": "^7.0.129",
"firebase": "catalog:",
"fuse.js": "^7.0.0",
"glob": "^11.0.3",
"glob": "catalog:",
"jsonata": "catalog:",
"jsondiffpatch": "^0.6.0",
"jsondiffpatch": "catalog:",
"loglevel": "^1.9.2",
"marked": "^15.0.11",
"pinia": "catalog:",

5063
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -9,12 +9,12 @@ catalog:
'@iconify-json/lucide': ^1.1.178
'@iconify/json': ^2.2.380
'@iconify/tailwind4': ^1.2.0
'@intlify/eslint-plugin-vue-i18n': ^4.1.0
'@intlify/eslint-plugin-vue-i18n': ^4.1.1
'@lobehub/i18n-cli': ^1.26.1
'@nx/eslint': 22.2.6
'@nx/playwright': 22.2.6
'@nx/storybook': 22.2.4
'@nx/vite': 22.2.6
'@nx/eslint': 22.5.2
'@nx/playwright': 22.5.2
'@nx/storybook': 22.5.2
'@nx/vite': 22.5.2
'@pinia/testing': ^1.0.3
'@playwright/test': ^1.58.1
'@primeuix/forms': 0.0.2
@@ -27,11 +27,19 @@ catalog:
'@sentry/vite-plugin': ^4.6.0
'@sentry/vue': ^10.32.1
'@sparkjsdev/spark': ^0.1.10
'@storybook/addon-docs': ^10.1.9
'@storybook/addon-docs': ^10.2.10
'@storybook/addon-mcp': 0.1.6
'@storybook/vue3': ^10.1.9
'@storybook/vue3-vite': ^10.1.9
'@tailwindcss/vite': ^4.1.12
'@storybook/vue3': ^10.2.10
'@storybook/vue3-vite': ^10.2.10
'@tailwindcss/vite': ^4.2.0
'@tiptap/core': ^2.27.2
'@tiptap/extension-link': ^2.27.2
'@tiptap/extension-table': ^2.27.2
'@tiptap/extension-table-cell': ^2.27.2
'@tiptap/extension-table-header': ^2.27.2
'@tiptap/extension-table-row': ^2.27.2
'@tiptap/pm': 2.27.2
'@tiptap/starter-kit': ^2.27.2
'@types/fs-extra': ^11.0.4
'@types/jsdom': ^21.1.7
'@types/node': ^24.1.0
@@ -45,7 +53,7 @@ catalog:
'@vueuse/integrations': ^14.2.0
'@webgpu/types': ^0.1.66
algoliasearch: ^5.21.0
axios: ^1.8.2
axios: ^1.13.5
cross-env: ^10.1.0
cva: 1.0.0-beta.4
dompurify: ^3.3.1
@@ -55,24 +63,26 @@ catalog:
eslint-import-resolver-typescript: ^4.4.4
eslint-plugin-import-x: ^4.16.1
eslint-plugin-oxlint: 1.25.0
eslint-plugin-storybook: ^10.1.9
eslint-plugin-storybook: ^10.2.10
eslint-plugin-unused-imports: ^4.3.0
eslint-plugin-vue: ^10.6.2
firebase: ^11.6.0
glob: ^13.0.6
globals: ^16.5.0
happy-dom: ^20.0.11
husky: ^9.1.7
jiti: 2.6.1
jsdom: ^27.4.0
jsonata: ^2.1.0
jsondiffpatch: ^0.7.3
knip: ^5.75.1
lint-staged: ^16.2.7
markdown-table: ^3.0.4
mixpanel-browser: ^2.71.0
nx: 22.2.6
oxfmt: ^0.26.0
oxlint: ^1.33.0
oxlint-tsgolint: ^0.9.1
nx: 22.5.2
oxfmt: ^0.34.0
oxlint: ^1.49.0
oxlint-tsgolint: ^0.14.2
picocolors: ^1.1.1
pinia: ^3.0.4
postcss-html: ^1.8.0
@@ -81,9 +91,9 @@ catalog:
primevue: ^4.2.5
reka-ui: ^2.5.0
rollup-plugin-visualizer: ^6.0.4
storybook: ^10.1.9
storybook: ^10.2.10
stylelint: ^16.26.1
tailwindcss: ^4.1.12
tailwindcss: ^4.2.0
tailwindcss-primeui: ^0.6.1
tsx: ^4.15.6
tw-animate-css: ^1.3.8
@@ -100,10 +110,10 @@ catalog:
vitest: ^4.0.16
vue: ^3.5.13
vue-component-type-helpers: ^3.2.1
vue-eslint-parser: ^10.2.0
vue-i18n: ^9.14.3
vue-eslint-parser: ^10.4.0
vue-i18n: ^9.14.5
vue-router: ^4.4.3
vue-tsc: ^3.2.1
vue-tsc: ^3.2.5
vuefire: ^3.2.1
wwobjloader2: ^6.2.1
yjs: ^13.6.27
@@ -130,4 +140,5 @@ onlyBuiltDependencies:
- oxc-resolver
overrides:
'@tiptap/pm': 2.27.2
'@types/eslint': '-'

View File

@@ -1,9 +1,14 @@
import zipdir from 'zip-dir'
zipdir('./dist', { saveTo: './dist.zip' }, function (err, buffer) {
const sourceDir = process.argv[2] || './dist'
const outputPath = process.argv[3] || './dist.zip'
zipdir(sourceDir, { saveTo: outputPath }, function (err, buffer) {
if (err) {
console.error('Error zipping "dist" directory:', err)
console.error(`Error zipping "${sourceDir}" directory:`, err)
} else {
console.log('Successfully zipped "dist" directory.')
process.stdout.write(
`Successfully zipped "${sourceDir}" directory to "${outputPath}".\n`
)
}
})

View File

@@ -169,6 +169,7 @@ import { useSettingStore } from '@/platform/settings/settingStore'
import { app } from '@/scripts/app'
import { useCommandStore } from '@/stores/commandStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useQueueStore, useQueueUIStore } from '@/stores/queueStore'
import { useRightSidePanelStore } from '@/stores/workspace/rightSidePanelStore'
import { useSidebarTabStore } from '@/stores/workspace/sidebarTabStore'
@@ -189,6 +190,7 @@ const { toastErrorHandler } = useErrorHandling()
const commandStore = useCommandStore()
const queueStore = useQueueStore()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const queueUIStore = useQueueUIStore()
const sidebarTabStore = useSidebarTabStore()
const { activeJobsCount } = storeToRefs(queueStore)
@@ -262,7 +264,7 @@ const shouldShowRedDot = computed((): boolean => {
return shouldShowConflictRedDot.value
})
const { hasAnyError } = storeToRefs(executionStore)
const { hasAnyError } = storeToRefs(executionErrorStore)
// Right side panel toggle
const { isOpen: isRightSidePanelOpen } = storeToRefs(rightSidePanelStore)

View File

@@ -136,8 +136,7 @@ onMounted(async () => {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('toastMessages.failedToFetchLogs'),
life: 5000
detail: t('toastMessages.failedToFetchLogs')
})
}
})

View File

@@ -233,6 +233,7 @@ import { isCloud } from '@/platform/distribution/types'
import type { NodeReplacement } from '@/platform/nodeReplacement/types'
import { useNodeReplacement } from '@/platform/nodeReplacement/useNodeReplacement'
import { useDialogStore } from '@/stores/dialogStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import type { MissingNodeType } from '@/types/comfy'
import { cn } from '@/utils/tailwindUtil'
import { useMissingNodes } from '@/workbench/extensions/manager/composables/nodePack/useMissingNodes'
@@ -244,6 +245,7 @@ const { missingNodeTypes } = defineProps<{
const { missingCoreNodes } = useMissingNodes()
const { replaceNodesInPlace } = useNodeReplacement()
const dialogStore = useDialogStore()
const executionErrorStore = useExecutionErrorStore()
interface ProcessedNode {
label: string
@@ -338,6 +340,14 @@ function handleReplaceSelected() {
replacedTypes.value = nextReplaced
selectedTypes.value = nextSelected
// replaceNodesInPlace() handles canvas rendering via onNodeAdded(),
// but the modal only updates its own local UI state above.
// Without this call the Errors Tab would still list the replaced nodes
// as missing because executionErrorStore is not aware of the replacement.
if (result.length > 0) {
executionErrorStore.removeMissingNodesByType(result)
}
// Auto-close when all replaceable nodes replaced and no non-replaceable remain
const allReplaced = replaceableNodes.value.every((n) =>
nextReplaced.has(n.label)

View File

@@ -275,8 +275,7 @@ async function handleBuy() {
toast.add({
severity: 'error',
summary: t('credits.topUp.purchaseError'),
detail: t('credits.topUp.purchaseErrorDetail', { error: errorMessage }),
life: 5000
detail: t('credits.topUp.purchaseErrorDetail', { error: errorMessage })
})
} finally {
loading.value = false

View File

@@ -98,8 +98,7 @@ async function onConfirmCancel() {
toast.add({
severity: 'error',
summary: t('subscription.cancelDialog.failed'),
detail: error instanceof Error ? error.message : t('g.unknownError'),
life: 5000
detail: error instanceof Error ? error.message : t('g.unknownError')
})
} finally {
isLoading.value = false

View File

@@ -64,17 +64,17 @@ import { useI18n } from 'vue-i18n'
import { storeToRefs } from 'pinia'
import Button from '@/components/ui/button/Button.vue'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useRightSidePanelStore } from '@/stores/workspace/rightSidePanelStore'
import { useCanvasStore } from '@/renderer/core/canvas/canvasStore'
import { useErrorGroups } from '@/components/rightSidePanel/errors/useErrorGroups'
const { t } = useI18n()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const rightSidePanelStore = useRightSidePanelStore()
const canvasStore = useCanvasStore()
const { totalErrorCount, isErrorOverlayOpen } = storeToRefs(executionStore)
const { totalErrorCount, isErrorOverlayOpen } = storeToRefs(executionErrorStore)
const { groupedErrorMessages } = useErrorGroups(ref(''), t)
const errorCountLabel = computed(() =>
@@ -90,7 +90,7 @@ const isVisible = computed(
)
function dismiss() {
executionStore.dismissErrorOverlay()
executionErrorStore.dismissErrorOverlay()
}
function seeErrors() {
@@ -100,6 +100,6 @@ function seeErrors() {
}
rightSidePanelStore.openPanel('errors')
executionStore.dismissErrorOverlay()
executionErrorStore.dismissErrorOverlay()
}
</script>

View File

@@ -70,7 +70,7 @@
:key="nodeData.id"
:node-data="nodeData"
:error="
executionStore.lastExecutionError?.node_id === nodeData.id
executionErrorStore.lastExecutionError?.node_id === nodeData.id
? 'Execution error'
: null
"
@@ -170,6 +170,7 @@ import { storeToRefs } from 'pinia'
import { useBootstrapStore } from '@/stores/bootstrapStore'
import { useCommandStore } from '@/stores/commandStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useNodeDefStore } from '@/stores/nodeDefStore'
import { useColorPaletteStore } from '@/stores/workspace/colorPaletteStore'
import { useSearchBoxStore } from '@/stores/workspace/searchBoxStore'
@@ -196,6 +197,7 @@ const workspaceStore = useWorkspaceStore()
const canvasStore = useCanvasStore()
const workflowStore = useWorkflowStore()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const toastStore = useToastStore()
const colorPaletteStore = useColorPaletteStore()
const colorPaletteService = useColorPaletteService()
@@ -376,7 +378,7 @@ watch(
// Update node slot errors for LiteGraph nodes
// (Vue nodes read from store directly)
watch(
() => executionStore.lastNodeErrors,
() => executionErrorStore.lastNodeErrors,
(lastNodeErrors) => {
if (!comfyApp.graph) return

View File

@@ -579,8 +579,7 @@ const onUpdateComfyUI = async (): Promise<void> => {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: error.value || t('helpCenter.updateComfyUIFailed'),
life: 5000
detail: error.value || t('helpCenter.updateComfyUIFailed')
})
return
}
@@ -597,8 +596,7 @@ const onUpdateComfyUI = async (): Promise<void> => {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: err instanceof Error ? err.message : t('g.unknownError'),
life: 5000
detail: err instanceof Error ? err.message : t('g.unknownError')
})
}
}

View File

@@ -14,7 +14,7 @@ import { SubgraphNode } from '@/lib/litegraph/src/litegraph'
import type { LGraphNode } from '@/lib/litegraph/src/litegraph'
import { useSettingStore } from '@/platform/settings/settingStore'
import { useCanvasStore } from '@/renderer/core/canvas/canvasStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useRightSidePanelStore } from '@/stores/workspace/rightSidePanelStore'
import type { RightSidePanelTab } from '@/stores/workspace/rightSidePanelStore'
import { resolveNodeDisplayName } from '@/utils/nodeTitleUtil'
@@ -36,12 +36,12 @@ import SubgraphEditor from './subgraph/SubgraphEditor.vue'
import TabErrors from './errors/TabErrors.vue'
const canvasStore = useCanvasStore()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const rightSidePanelStore = useRightSidePanelStore()
const settingStore = useSettingStore()
const { t } = useI18n()
const { hasAnyError, allErrorExecutionIds } = storeToRefs(executionStore)
const { hasAnyError, allErrorExecutionIds } = storeToRefs(executionErrorStore)
const { findParentGroup } = useGraphHierarchy()
@@ -98,7 +98,7 @@ type RightSidePanelTabList = Array<{
const hasDirectNodeError = computed(() =>
selectedNodes.value.some((node) =>
executionStore.activeGraphErrorNodeIds.has(String(node.id))
executionErrorStore.activeGraphErrorNodeIds.has(String(node.id))
)
)
@@ -106,7 +106,7 @@ const hasContainerInternalError = computed(() => {
if (allErrorExecutionIds.value.length === 0) return false
return selectedNodes.value.some((node) => {
if (!(node instanceof SubgraphNode || isGroupNode(node))) return false
return executionStore.hasInternalErrorForNode(node.id)
return executionErrorStore.isContainerWithInternalError(node)
})
})

View File

@@ -94,7 +94,7 @@ describe('TabErrors.vue', () => {
it('renders prompt-level errors (Group title = error message)', async () => {
const wrapper = mountComponent({
execution: {
executionError: {
lastPromptError: {
type: 'prompt_no_outputs',
message: 'Server Error: No outputs',
@@ -118,7 +118,7 @@ describe('TabErrors.vue', () => {
} as ReturnType<typeof getNodeByExecutionId>)
const wrapper = mountComponent({
execution: {
executionError: {
lastNodeErrors: {
'6': {
class_type: 'CLIPTextEncode',
@@ -143,7 +143,7 @@ describe('TabErrors.vue', () => {
} as ReturnType<typeof getNodeByExecutionId>)
const wrapper = mountComponent({
execution: {
executionError: {
lastExecutionError: {
prompt_id: 'abc',
node_id: '10',
@@ -167,7 +167,7 @@ describe('TabErrors.vue', () => {
vi.mocked(getNodeByExecutionId).mockReturnValue(null)
const wrapper = mountComponent({
execution: {
executionError: {
lastNodeErrors: {
'1': {
class_type: 'CLIPTextEncode',
@@ -198,7 +198,7 @@ describe('TabErrors.vue', () => {
vi.mocked(useCopyToClipboard).mockReturnValue({ copyToClipboard: mockCopy })
const wrapper = mountComponent({
execution: {
executionError: {
lastNodeErrors: {
'1': {
class_type: 'TestNode',

View File

@@ -3,15 +3,17 @@ import type { Ref } from 'vue'
import Fuse from 'fuse.js'
import type { IFuseOptions } from 'fuse.js'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useCanvasStore } from '@/renderer/core/canvas/canvasStore'
import { app } from '@/scripts/app'
import { isCloud } from '@/platform/distribution/types'
import { SubgraphNode } from '@/lib/litegraph/src/litegraph'
import type { LGraphNode } from '@/lib/litegraph/src/litegraph'
import {
getNodeByExecutionId,
getExecutionIdByNode,
getRootParentNode
} from '@/utils/graphTraversalUtil'
import { resolveNodeDisplayName } from '@/utils/nodeTitleUtil'
@@ -19,6 +21,7 @@ import { isLGraphNode } from '@/utils/litegraphUtil'
import { isGroupNode } from '@/utils/executableGroupNodeDto'
import { st } from '@/i18n'
import type { ErrorCardData, ErrorGroup, ErrorItem } from './types'
import type { NodeExecutionId } from '@/types/nodeIdentification'
import { isNodeExecutionId } from '@/types/nodeIdentification'
const PROMPT_CARD_ID = '__prompt__'
@@ -192,38 +195,42 @@ export function useErrorGroups(
searchQuery: Ref<string>,
t: (key: string) => string
) {
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const canvasStore = useCanvasStore()
const collapseState = reactive<Record<string, boolean>>({})
const selectedNodeInfo = computed(() => {
const items = canvasStore.selectedItems
const nodeIds = new Set<string>()
const containerIds = new Set<string>()
const containerExecutionIds = new Set<NodeExecutionId>()
for (const item of items) {
if (!isLGraphNode(item)) continue
nodeIds.add(String(item.id))
if (item instanceof SubgraphNode || isGroupNode(item)) {
containerIds.add(String(item.id))
if (
(item instanceof SubgraphNode || isGroupNode(item)) &&
app.rootGraph
) {
const execId = getExecutionIdByNode(app.rootGraph, item)
if (execId) containerExecutionIds.add(execId)
}
}
return {
nodeIds: nodeIds.size > 0 ? nodeIds : null,
containerIds
containerExecutionIds
}
})
const isSingleNodeSelected = computed(
() =>
selectedNodeInfo.value.nodeIds?.size === 1 &&
selectedNodeInfo.value.containerIds.size === 0
selectedNodeInfo.value.containerExecutionIds.size === 0
)
const errorNodeCache = computed(() => {
const map = new Map<string, LGraphNode>()
for (const execId of executionStore.allErrorExecutionIds) {
for (const execId of executionErrorStore.allErrorExecutionIds) {
const node = getNodeByExecutionId(app.rootGraph, execId)
if (node) map.set(execId, node)
}
@@ -237,8 +244,9 @@ export function useErrorGroups(
const graphNode = errorNodeCache.value.get(executionNodeId)
if (graphNode && nodeIds.has(String(graphNode.id))) return true
for (const containerId of selectedNodeInfo.value.containerIds) {
if (executionNodeId.startsWith(`${containerId}:`)) return true
for (const containerExecId of selectedNodeInfo.value
.containerExecutionIds) {
if (executionNodeId.startsWith(`${containerExecId}:`)) return true
}
return false
@@ -262,10 +270,10 @@ export function useErrorGroups(
}
function processPromptError(groupsMap: Map<string, GroupEntry>) {
if (selectedNodeInfo.value.nodeIds || !executionStore.lastPromptError)
if (selectedNodeInfo.value.nodeIds || !executionErrorStore.lastPromptError)
return
const error = executionStore.lastPromptError
const error = executionErrorStore.lastPromptError
const groupTitle = error.message
const cards = getOrCreateGroup(groupsMap, groupTitle, 0)
const isKnown = KNOWN_PROMPT_ERROR_TYPES.has(error.type)
@@ -293,10 +301,10 @@ export function useErrorGroups(
groupsMap: Map<string, GroupEntry>,
filterBySelection = false
) {
if (!executionStore.lastNodeErrors) return
if (!executionErrorStore.lastNodeErrors) return
for (const [nodeId, nodeError] of Object.entries(
executionStore.lastNodeErrors
executionErrorStore.lastNodeErrors
)) {
addNodeErrorToGroup(
groupsMap,
@@ -316,9 +324,9 @@ export function useErrorGroups(
groupsMap: Map<string, GroupEntry>,
filterBySelection = false
) {
if (!executionStore.lastExecutionError) return
if (!executionErrorStore.lastExecutionError) return
const e = executionStore.lastExecutionError
const e = executionErrorStore.lastExecutionError
addNodeErrorToGroup(
groupsMap,
String(e.node_id),

View File

@@ -9,7 +9,7 @@ import type { LGraphGroup, LGraphNode } from '@/lib/litegraph/src/litegraph'
import { SubgraphNode } from '@/lib/litegraph/src/litegraph'
import type { IBaseWidget } from '@/lib/litegraph/src/types/widgets'
import { useCanvasStore } from '@/renderer/core/canvas/canvasStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useRightSidePanelStore } from '@/stores/workspace/rightSidePanelStore'
import { useSettingStore } from '@/platform/settings/settingStore'
import { cn } from '@/utils/tailwindUtil'
@@ -62,7 +62,7 @@ watchEffect(() => (widgets.value = widgetsProp))
provide(HideLayoutFieldKey, true)
const canvasStore = useCanvasStore()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const rightSidePanelStore = useRightSidePanelStore()
const nodeDefStore = useNodeDefStore()
const { t } = useI18n()
@@ -110,7 +110,9 @@ const targetNode = computed<LGraphNode | null>(() => {
const hasDirectError = computed(() => {
if (!targetNode.value) return false
return executionStore.activeGraphErrorNodeIds.has(String(targetNode.value.id))
return executionErrorStore.activeGraphErrorNodeIds.has(
String(targetNode.value.id)
)
})
const hasContainerInternalError = computed(() => {
@@ -119,7 +121,7 @@ const hasContainerInternalError = computed(() => {
targetNode.value instanceof SubgraphNode || isGroupNode(targetNode.value)
if (!isContainer) return false
return executionStore.hasInternalErrorForNode(targetNode.value.id)
return executionErrorStore.isContainerWithInternalError(targetNode.value)
})
const nodeHasError = computed(() => {

View File

@@ -53,6 +53,7 @@ import NodeSearchCategoryTreeNode, {
CATEGORY_UNSELECTED_CLASS
} from '@/components/searchbox/v2/NodeSearchCategoryTreeNode.vue'
import type { CategoryNode } from '@/components/searchbox/v2/NodeSearchCategoryTreeNode.vue'
import { useFeatureFlags } from '@/composables/useFeatureFlags'
import { nodeOrganizationService } from '@/services/nodeOrganizationService'
import { useNodeDefStore } from '@/stores/nodeDefStore'
import { NodeSourceType } from '@/types/nodeSource'
@@ -64,6 +65,7 @@ const selectedCategory = defineModel<string>('selectedCategory', {
})
const { t } = useI18n()
const { flags } = useFeatureFlags()
const nodeDefStore = useNodeDefStore()
const topCategories = computed(() => [
@@ -79,7 +81,7 @@ const hasEssentialNodes = computed(() =>
const sourceCategories = computed(() => {
const categories = []
if (hasEssentialNodes.value) {
if (flags.nodeLibraryEssentialsEnabled && hasEssentialNodes.value) {
categories.push({ id: 'essentials', label: t('g.essentials') })
}
categories.push({ id: 'custom', label: t('g.custom') })

View File

@@ -592,8 +592,7 @@ const enterFolderView = async (asset: AssetItem) => {
toast.add({
severity: 'error',
summary: t('sideToolbar.folderView.errorSummary'),
detail: t('sideToolbar.folderView.errorDetail'),
life: 5000
detail: t('sideToolbar.folderView.errorDetail')
})
exitFolderView()
}
@@ -639,8 +638,7 @@ const copyJobId = async () => {
toast.add({
severity: 'error',
summary: t('mediaAsset.jobIdToast.error'),
detail: t('mediaAsset.jobIdToast.jobIdCopyFailed'),
life: 3000
detail: t('mediaAsset.jobIdToast.jobIdCopyFailed')
})
}
}

View File

@@ -52,7 +52,7 @@
:value="tab.value"
:class="
cn(
'select-none border-none outline-none px-3 py-2 rounded-lg cursor-pointer',
'flex-1 text-center select-none border-none outline-none px-3 py-2 rounded-lg cursor-pointer',
'text-sm text-foreground transition-colors',
selectedTab === tab.value
? 'bg-comfy-input font-bold'
@@ -70,7 +70,9 @@
<!-- Tab content (scrollable) -->
<TabsRoot v-model="selectedTab" class="h-full">
<EssentialNodesPanel
v-if="selectedTab === 'essentials'"
v-if="
flags.nodeLibraryEssentialsEnabled && selectedTab === 'essentials'
"
v-model:expanded-keys="expandedKeys"
:root="renderedEssentialRoot"
@node-click="handleNodeClick"
@@ -109,10 +111,11 @@ import {
TabsRoot,
TabsTrigger
} from 'reka-ui'
import { computed, nextTick, onMounted, ref } from 'vue'
import { computed, nextTick, onMounted, ref, watchEffect } from 'vue'
import { useI18n } from 'vue-i18n'
import SearchBox from '@/components/common/SearchBoxV2.vue'
import { useFeatureFlags } from '@/composables/useFeatureFlags'
import { useNodeDragToCanvas } from '@/composables/node/useNodeDragToCanvas'
import { usePerTabState } from '@/composables/usePerTabState'
import {
@@ -136,11 +139,22 @@ import EssentialNodesPanel from './nodeLibrary/EssentialNodesPanel.vue'
import NodeDragPreview from './nodeLibrary/NodeDragPreview.vue'
import SidebarTabTemplate from './SidebarTabTemplate.vue'
const { flags } = useFeatureFlags()
const selectedTab = useLocalStorage<TabId>(
'Comfy.NodeLibrary.Tab',
DEFAULT_TAB_ID
)
watchEffect(() => {
if (
!flags.nodeLibraryEssentialsEnabled &&
selectedTab.value === 'essentials'
) {
selectedTab.value = DEFAULT_TAB_ID
}
})
const sortOrderByTab = useLocalStorage<Record<TabId, SortingStrategyId>>(
'Comfy.NodeLibrary.SortByTab',
{
@@ -324,11 +338,21 @@ async function handleSearch() {
expandedKeys.value = allKeys
}
const tabs = computed(() => [
{ value: 'essentials', label: t('sideToolbar.nodeLibraryTab.essentials') },
{ value: 'all', label: t('sideToolbar.nodeLibraryTab.allNodes') },
{ value: 'custom', label: t('sideToolbar.nodeLibraryTab.custom') }
])
const tabs = computed(() => {
const baseTabs: Array<{ value: TabId; label: string }> = [
{ value: 'all', label: t('sideToolbar.nodeLibraryTab.allNodes') },
{ value: 'custom', label: t('sideToolbar.nodeLibraryTab.custom') }
]
return flags.nodeLibraryEssentialsEnabled
? [
{
value: 'essentials' as TabId,
label: t('sideToolbar.nodeLibraryTab.essentials')
},
...baseTabs
]
: baseTabs
})
onMounted(() => {
searchBoxRef.value?.focus()

View File

@@ -4,6 +4,7 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
import { computed, nextTick, watch } from 'vue'
import { useGraphNodeManager } from '@/composables/graph/useGraphNodeManager'
import { createPromotedWidgetView } from '@/core/graph/subgraph/promotedWidgetView'
import { BaseWidget, LGraph, LGraphNode } from '@/lib/litegraph/src/litegraph'
import { NodeSlotType } from '@/lib/litegraph/src/types/globalEnums'
import { useWidgetValueStore } from '@/stores/widgetValueStore'
@@ -74,3 +75,193 @@ describe('Node Reactivity', () => {
expect(widgetValue.value).toBe(99)
})
})
describe('Widget slotMetadata reactivity on link disconnect', () => {
beforeEach(() => {
setActivePinia(createTestingPinia({ stubActions: false }))
})
function createWidgetInputGraph() {
const graph = new LGraph()
const node = new LGraphNode('test')
// Add a widget and an associated input slot (simulates "widget converted to input")
node.addWidget('string', 'prompt', 'hello', () => undefined, {})
const input = node.addInput('prompt', 'STRING')
// Associate the input slot with the widget (as widgetInputs extension does)
input.widget = { name: 'prompt' }
// Start with a connected link
input.link = 42
graph.add(node)
return { graph, node }
}
it('sets slotMetadata.linked to true when input has a link', () => {
const { graph, node } = createWidgetInputGraph()
const { vueNodeData } = useGraphNodeManager(graph)
const nodeData = vueNodeData.get(String(node.id))
const widgetData = nodeData?.widgets?.find((w) => w.name === 'prompt')
expect(widgetData?.slotMetadata).toBeDefined()
expect(widgetData?.slotMetadata?.linked).toBe(true)
})
it('updates slotMetadata.linked to false after link disconnect event', async () => {
const { graph, node } = createWidgetInputGraph()
const { vueNodeData } = useGraphNodeManager(graph)
const nodeData = vueNodeData.get(String(node.id))
const widgetData = nodeData?.widgets?.find((w) => w.name === 'prompt')
// Verify initially linked
expect(widgetData?.slotMetadata?.linked).toBe(true)
// Simulate link disconnection (as LiteGraph does before firing the event)
node.inputs[0].link = null
// Fire the trigger event that LiteGraph fires on disconnect
graph.trigger('node:slot-links:changed', {
nodeId: node.id,
slotType: NodeSlotType.INPUT,
slotIndex: 0,
connected: false,
linkId: 42
})
await nextTick()
// slotMetadata.linked should now be false
expect(widgetData?.slotMetadata?.linked).toBe(false)
})
it('reactively updates disabled state in a derived computed after disconnect', async () => {
const { graph, node } = createWidgetInputGraph()
const { vueNodeData } = useGraphNodeManager(graph)
const nodeData = vueNodeData.get(String(node.id))!
// Mimic what processedWidgets does in NodeWidgets.vue:
// derive disabled from slotMetadata.linked
const derivedDisabled = computed(() => {
const widgets = nodeData.widgets ?? []
const widget = widgets.find((w) => w.name === 'prompt')
return widget?.slotMetadata?.linked ? true : false
})
// Initially linked → disabled
expect(derivedDisabled.value).toBe(true)
// Track changes
const onChange = vi.fn()
watch(derivedDisabled, onChange)
// Simulate disconnect
node.inputs[0].link = null
graph.trigger('node:slot-links:changed', {
nodeId: node.id,
slotType: NodeSlotType.INPUT,
slotIndex: 0,
connected: false,
linkId: 42
})
await nextTick()
// The derived computed should now return false
expect(derivedDisabled.value).toBe(false)
expect(onChange).toHaveBeenCalledTimes(1)
})
it('updates slotMetadata for promoted widgets where SafeWidgetData.name differs from input.widget.name', async () => {
// Set up a subgraph with an interior node that has a "prompt" widget.
// createPromotedWidgetView resolves against this interior node.
const subgraph = createTestSubgraph()
const interiorNode = new LGraphNode('interior')
interiorNode.id = 10
interiorNode.addWidget('string', 'prompt', 'hello', () => undefined, {})
subgraph.add(interiorNode)
const subgraphNode = createTestSubgraphNode(subgraph, { id: 123 })
// Create a PromotedWidgetView with displayName="value" (subgraph input
// slot name) and sourceWidgetName="prompt" (interior widget name).
// PromotedWidgetView.name returns "value", but safeWidgetMapper sets
// SafeWidgetData.name to sourceWidgetName ("prompt").
const promotedView = createPromotedWidgetView(
subgraphNode,
'10',
'prompt',
'value'
)
// Host the promoted view on a regular node so we can control widgets
// directly (SubgraphNode.widgets is a synthetic getter).
const graph = new LGraph()
const hostNode = new LGraphNode('host')
hostNode.widgets = [promotedView]
const input = hostNode.addInput('value', 'STRING')
input.widget = { name: 'value' }
input.link = 42
graph.add(hostNode)
const { vueNodeData } = useGraphNodeManager(graph)
const nodeData = vueNodeData.get(String(hostNode.id))
// SafeWidgetData.name is "prompt" (sourceWidgetName), but the
// input slot widget name is "value" — slotName bridges this gap.
const widgetData = nodeData?.widgets?.find((w) => w.name === 'prompt')
expect(widgetData).toBeDefined()
expect(widgetData?.slotName).toBe('value')
expect(widgetData?.slotMetadata?.linked).toBe(true)
// Disconnect
hostNode.inputs[0].link = null
graph.trigger('node:slot-links:changed', {
nodeId: hostNode.id,
slotType: NodeSlotType.INPUT,
slotIndex: 0,
connected: false,
linkId: 42
})
await nextTick()
expect(widgetData?.slotMetadata?.linked).toBe(false)
})
})
describe('Subgraph Promoted Pseudo Widgets', () => {
beforeEach(() => {
setActivePinia(createTestingPinia({ stubActions: false }))
})
it('marks promoted $$ widgets as canvasOnly for Vue widget rendering', () => {
const subgraph = createTestSubgraph()
const interiorNode = new LGraphNode('interior')
interiorNode.id = 10
subgraph.add(interiorNode)
const subgraphNode = createTestSubgraphNode(subgraph, { id: 123 })
const graph = subgraphNode.graph as LGraph
graph.add(subgraphNode)
usePromotionStore().promote(
subgraphNode.rootGraph.id,
subgraphNode.id,
'10',
'$$canvas-image-preview'
)
const { vueNodeData } = useGraphNodeManager(graph)
const vueNode = vueNodeData.get(String(subgraphNode.id))
const promotedWidget = vueNode?.widgets?.find(
(widget) => widget.name === '$$canvas-image-preview'
)
expect(promotedWidget).toBeDefined()
expect(promotedWidget?.options?.canvasOnly).toBe(true)
})
})

View File

@@ -68,6 +68,12 @@ export interface SafeWidgetData {
spec?: InputSpec
/** Input slot metadata (index and link status) */
slotMetadata?: WidgetSlotMetadata
/**
* Original LiteGraph widget name used for slot metadata matching.
* For promoted widgets, `name` is `sourceWidgetName` (interior widget name)
* which differs from the subgraph node's input slot widget name.
*/
slotName?: string
}
export interface VueNodeData {
@@ -226,9 +232,12 @@ function safeWidgetMapper(
...sharedEnhancements,
callback,
hasLayoutSize: typeof widget.computeLayoutSize === 'function',
isDOMWidget: isDOMWidget(widget),
options,
slotMetadata: slotInfo
isDOMWidget: isDOMWidget(widget) || isPromotedDOMWidget(widget),
options: isPromotedPseudoWidget
? { ...options, canvasOnly: true }
: options,
slotMetadata: slotInfo,
slotName: name !== widget.name ? widget.name : undefined
}
} catch (error) {
return {
@@ -341,7 +350,7 @@ export function useGraphNodeManager(graph: LGraph): GraphNodeManager {
// Update only widgets with new slot metadata, keeping other widget data intact
for (const widget of currentData.widgets ?? []) {
const slotInfo = slotMetadata.get(widget.name)
const slotInfo = slotMetadata.get(widget.slotName ?? widget.name)
if (slotInfo) widget.slotMetadata = slotInfo
}
}

View File

@@ -394,8 +394,7 @@ export function useCoreCommands(): ComfyCommand[] {
if (app.canvas.empty) {
toastStore.add({
severity: 'error',
summary: t('toastMessages.emptyCanvas'),
life: 3000
summary: t('toastMessages.emptyCanvas')
})
return
}
@@ -554,8 +553,7 @@ export function useCoreCommands(): ComfyCommand[] {
toastStore.add({
severity: 'error',
summary: t('toastMessages.nothingToQueue'),
detail: t('toastMessages.pleaseSelectOutputNodes'),
life: 3000
detail: t('toastMessages.pleaseSelectOutputNodes')
})
return
}
@@ -568,8 +566,7 @@ export function useCoreCommands(): ComfyCommand[] {
toastStore.add({
severity: 'error',
summary: t('toastMessages.failedToQueue'),
detail: t('toastMessages.failedExecutionPathResolution'),
life: 3000
detail: t('toastMessages.failedExecutionPathResolution')
})
return
}
@@ -599,8 +596,7 @@ export function useCoreCommands(): ComfyCommand[] {
toastStore.add({
severity: 'error',
summary: t('toastMessages.nothingToGroup'),
detail: t('toastMessages.pleaseSelectNodesToGroup'),
life: 3000
detail: t('toastMessages.pleaseSelectNodesToGroup')
})
return
}
@@ -945,8 +941,7 @@ export function useCoreCommands(): ComfyCommand[] {
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: t('manager.notAvailable'),
life: 3000
detail: t('manager.notAvailable')
})
return
}
@@ -1031,8 +1026,7 @@ export function useCoreCommands(): ComfyCommand[] {
toastStore.add({
severity: 'error',
summary: t('toastMessages.cannotCreateSubgraph'),
detail: t('toastMessages.failedToConvertToSubgraph'),
life: 3000
detail: t('toastMessages.failedToConvertToSubgraph')
})
return
}
@@ -1241,8 +1235,7 @@ export function useCoreCommands(): ComfyCommand[] {
summary: t('g.error'),
detail: t('g.commandProhibited', {
command: 'Comfy.Memory.UnloadModels'
}),
life: 3000
})
})
return
}
@@ -1261,8 +1254,7 @@ export function useCoreCommands(): ComfyCommand[] {
summary: t('g.error'),
detail: t('g.commandProhibited', {
command: 'Comfy.Memory.UnloadModelsAndExecutionCache'
}),
life: 3000
})
})
return
}

View File

@@ -21,7 +21,8 @@ export enum ServerFeatureFlag {
LINEAR_TOGGLE_ENABLED = 'linear_toggle_enabled',
TEAM_WORKSPACES_ENABLED = 'team_workspaces_enabled',
USER_SECRETS_ENABLED = 'user_secrets_enabled',
NODE_REPLACEMENTS = 'node_replacements'
NODE_REPLACEMENTS = 'node_replacements',
NODE_LIBRARY_ESSENTIALS_ENABLED = 'node_library_essentials_enabled'
}
/**
@@ -100,6 +101,17 @@ export function useFeatureFlags() {
},
get nodeReplacementsEnabled() {
return api.getServerFeature(ServerFeatureFlag.NODE_REPLACEMENTS, false)
},
get nodeLibraryEssentialsEnabled() {
if (isNightly || import.meta.env.DEV) return true
return (
remoteConfig.value.node_library_essentials_enabled ??
api.getServerFeature(
ServerFeatureFlag.NODE_LIBRARY_ESSENTIALS_ENABLED,
false
)
)
}
})

View File

@@ -0,0 +1,161 @@
import { createTestingPinia } from '@pinia/testing'
import { setActivePinia } from 'pinia'
import { beforeEach, describe, expect, test, vi } from 'vitest'
import { resolveSubgraphInputTarget } from '@/core/graph/subgraph/resolveSubgraphInputTarget'
import { LGraphNode } from '@/lib/litegraph/src/litegraph'
import {
createTestSubgraph,
createTestSubgraphNode
} from '@/lib/litegraph/src/subgraph/__fixtures__/subgraphHelpers'
import type { Subgraph } from '@/lib/litegraph/src/subgraph/Subgraph'
import type { SubgraphNode } from '@/lib/litegraph/src/subgraph/SubgraphNode'
vi.mock('@/renderer/core/canvas/canvasStore', () => ({
useCanvasStore: () => ({})
}))
vi.mock('@/stores/domWidgetStore', () => ({
useDomWidgetStore: () => ({ widgetStates: new Map() })
}))
vi.mock('@/services/litegraphService', () => ({
useLitegraphService: () => ({ updatePreviews: () => ({}) })
}))
function createOuterSubgraphSetup(inputNames: string[]): {
outerSubgraph: Subgraph
outerSubgraphNode: SubgraphNode
} {
const outerSubgraph = createTestSubgraph({
inputs: inputNames.map((name) => ({ name, type: '*' }))
})
const outerSubgraphNode = createTestSubgraphNode(outerSubgraph, { id: 1 })
return { outerSubgraph, outerSubgraphNode }
}
function addLinkedNestedSubgraphNode(
outerSubgraph: Subgraph,
inputName: string,
linkedInputName: string,
options: { widget?: string } = {}
): { innerSubgraphNode: SubgraphNode } {
const innerSubgraph = createTestSubgraph({
inputs: [{ name: linkedInputName, type: '*' }]
})
const innerSubgraphNode = createTestSubgraphNode(innerSubgraph, { id: 819 })
outerSubgraph.add(innerSubgraphNode)
const inputSlot = outerSubgraph.inputNode.slots.find(
(slot) => slot.name === inputName
)
if (!inputSlot) throw new Error(`Missing subgraph input slot: ${inputName}`)
const input = innerSubgraphNode.addInput(linkedInputName, '*')
if (options.widget) {
innerSubgraphNode.addWidget('number', options.widget, 0, () => undefined)
input.widget = { name: options.widget }
}
inputSlot.connect(input, innerSubgraphNode)
if (input.link == null) {
throw new Error(`Expected link to be created for input ${linkedInputName}`)
}
return { innerSubgraphNode }
}
beforeEach(() => {
setActivePinia(createTestingPinia({ stubActions: false }))
vi.clearAllMocks()
})
describe('resolveSubgraphInputTarget', () => {
test('returns target for widget-backed input on nested SubgraphNode', () => {
const { outerSubgraph, outerSubgraphNode } = createOuterSubgraphSetup([
'width'
])
addLinkedNestedSubgraphNode(outerSubgraph, 'width', 'width', {
widget: 'width'
})
const result = resolveSubgraphInputTarget(outerSubgraphNode, 'width')
expect(result).toMatchObject({
nodeId: '819',
widgetName: 'width'
})
})
test('returns undefined for non-widget input on nested SubgraphNode', () => {
const { outerSubgraph, outerSubgraphNode } = createOuterSubgraphSetup([
'audio'
])
addLinkedNestedSubgraphNode(outerSubgraph, 'audio', 'audio')
const result = resolveSubgraphInputTarget(outerSubgraphNode, 'audio')
expect(result).toBeUndefined()
})
test('resolves widget inputs but not non-widget inputs on the same nested SubgraphNode', () => {
const { outerSubgraph, outerSubgraphNode } = createOuterSubgraphSetup([
'width',
'audio'
])
addLinkedNestedSubgraphNode(outerSubgraph, 'width', 'width', {
widget: 'width'
})
addLinkedNestedSubgraphNode(outerSubgraph, 'audio', 'audio')
expect(
resolveSubgraphInputTarget(outerSubgraphNode, 'width')
).toMatchObject({
nodeId: '819',
widgetName: 'width'
})
expect(
resolveSubgraphInputTarget(outerSubgraphNode, 'audio')
).toBeUndefined()
})
test('returns target for widget-backed input on plain interior node', () => {
const { outerSubgraph, outerSubgraphNode } = createOuterSubgraphSetup([
'seed'
])
const inputSlot = outerSubgraph.inputNode.slots.find(
(slot) => slot.name === 'seed'
)!
const node = new LGraphNode('Interior-seed')
node.id = 42
const input = node.addInput('seed_input', '*')
node.addWidget('number', 'seed', 0, () => undefined)
input.widget = { name: 'seed' }
outerSubgraph.add(node)
inputSlot.connect(input, node)
const result = resolveSubgraphInputTarget(outerSubgraphNode, 'seed')
expect(result).toMatchObject({
nodeId: '42',
widgetName: 'seed'
})
})
test('returns undefined for non-widget input on plain interior node', () => {
const { outerSubgraph, outerSubgraphNode } = createOuterSubgraphSetup([
'image'
])
const inputSlot = outerSubgraph.inputNode.slots.find(
(slot) => slot.name === 'image'
)!
const node = new LGraphNode('Interior-image')
const input = node.addInput('image_input', '*')
outerSubgraph.add(node)
inputSlot.connect(input, node)
const result = resolveSubgraphInputTarget(outerSubgraphNode, 'image')
expect(result).toBeUndefined()
})
})

View File

@@ -0,0 +1,37 @@
import type { LGraphNode } from '@/lib/litegraph/src/litegraph'
import { resolveSubgraphInputLink } from './resolveSubgraphInputLink'
type ResolvedSubgraphInputTarget = {
nodeId: string
widgetName: string
}
export function resolveSubgraphInputTarget(
node: LGraphNode,
inputName: string
): ResolvedSubgraphInputTarget | undefined {
return resolveSubgraphInputLink(
node,
inputName,
({ inputNode, targetInput, getTargetWidget }) => {
if (inputNode.isSubgraphNode()) {
const targetWidget = getTargetWidget()
if (!targetWidget) return undefined
return {
nodeId: String(inputNode.id),
widgetName: targetInput.name
}
}
const targetWidget = getTargetWidget()
if (!targetWidget) return undefined
return {
nodeId: String(inputNode.id),
widgetName: targetWidget.name
}
}
)
}

View File

@@ -6,6 +6,7 @@ import { LGraphNode } from '@/lib/litegraph/src/LGraphNode'
import { LLink } from '@/lib/litegraph/src/litegraph'
import type { ComfyNodeDef } from '@/schemas/nodeDefSchema'
import { app } from '@/scripts/app'
import { useWidgetValueStore } from '@/stores/widgetValueStore'
function applyToGraph(this: LGraphNode, extraLinks: LLink[] = []) {
if (!this.outputs[0].links?.length || !this.graph) return
@@ -74,17 +75,25 @@ function onCustomComboCreated(this: LGraphNode) {
function addOption(node: LGraphNode) {
if (!node.widgets) return
const newCount = node.widgets.length - 1
node.addWidget('string', `option${newCount}`, '', () => {})
const widget = node.widgets.at(-1)
const widgetName = `option${newCount}`
const widget = node.addWidget('string', widgetName, '', () => {})
if (!widget) return
let value = ''
Object.defineProperty(widget, 'value', {
get() {
return value
return useWidgetValueStore().getWidget(
app.rootGraph.id,
node.id,
widgetName
)?.value
},
set(v) {
value = v
set(v: string) {
const state = useWidgetValueStore().getWidget(
app.rootGraph.id,
node.id,
widgetName
)
if (state) state.value = v
updateCombo()
if (!node.widgets) return
const lastWidget = node.widgets.at(-1)

View File

@@ -204,8 +204,7 @@ import { electronAPI as getElectronAPI } from '@/utils/envUtil'
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: t('desktopUpdate.errorInstallingUpdate'),
life: 10_000
detail: t('desktopUpdate.errorInstallingUpdate')
})
}
}
@@ -214,8 +213,7 @@ import { electronAPI as getElectronAPI } from '@/utils/envUtil'
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: t('desktopUpdate.errorCheckingUpdate'),
life: 10_000
detail: t('desktopUpdate.errorCheckingUpdate')
})
}
}

View File

@@ -7,11 +7,19 @@ import {
LiteGraph,
LLink
} from '@/lib/litegraph/src/litegraph'
import type { SerialisableGraph } from '@/lib/litegraph/src/types/serialisation'
import type { UUID } from '@/lib/litegraph/src/utils/uuid'
import { usePromotionStore } from '@/stores/promotionStore'
import { useWidgetValueStore } from '@/stores/widgetValueStore'
import {
createTestSubgraphData,
createTestSubgraphNode
} from './subgraph/__fixtures__/subgraphHelpers'
import { duplicateSubgraphNodeIds } from './__fixtures__/duplicateSubgraphNodeIds'
import { nestedSubgraphProxyWidgets } from './__fixtures__/nestedSubgraphProxyWidgets'
import { nodeIdSpaceExhausted } from './__fixtures__/nodeIdSpaceExhausted'
import { uniqueSubgraphNodeIds } from './__fixtures__/uniqueSubgraphNodeIds'
import { test } from './__fixtures__/testExtensions'
function swapNodes(nodes: LGraphNode[]) {
@@ -484,3 +492,228 @@ describe('ensureGlobalIdUniqueness', () => {
expect(subNode.id).toBe(subId)
})
})
describe('Subgraph Unpacking', () => {
class TestNode extends LGraphNode {
constructor(title?: string) {
super(title ?? 'TestNode')
this.addInput('input_0', 'number')
this.addOutput('output_0', 'number')
}
}
class MultiInputNode extends LGraphNode {
constructor(title?: string) {
super(title ?? 'MultiInputNode')
this.addInput('input_0', 'number')
this.addInput('input_1', 'number')
this.addOutput('output_0', 'number')
}
}
function registerTestNodes() {
LiteGraph.registerNodeType('test/TestNode', TestNode)
LiteGraph.registerNodeType('test/MultiInputNode', MultiInputNode)
}
function createSubgraphOnGraph(rootGraph: LGraph) {
return rootGraph.createSubgraph(createTestSubgraphData())
}
it('deduplicates links when unpacking subgraph with duplicate links', () => {
registerTestNodes()
const rootGraph = new LGraph()
const subgraph = createSubgraphOnGraph(rootGraph)
const sourceNode = LiteGraph.createNode('test/TestNode', 'Source')!
const targetNode = LiteGraph.createNode('test/TestNode', 'Target')!
subgraph.add(sourceNode)
subgraph.add(targetNode)
// Create a legitimate link
sourceNode.connect(0, targetNode, 0)
expect(subgraph._links.size).toBe(1)
// Manually add duplicate links (simulating the bug)
const existingLink = subgraph._links.values().next().value!
for (let i = 0; i < 3; i++) {
const dupLink = new LLink(
++subgraph.state.lastLinkId,
existingLink.type,
existingLink.origin_id,
existingLink.origin_slot,
existingLink.target_id,
existingLink.target_slot
)
subgraph._links.set(dupLink.id, dupLink)
sourceNode.outputs[0].links!.push(dupLink.id)
}
expect(subgraph._links.size).toBe(4)
const subgraphNode = createTestSubgraphNode(subgraph, { pos: [100, 100] })
rootGraph.add(subgraphNode)
rootGraph.unpackSubgraph(subgraphNode)
// After unpacking, there should be exactly 1 link (not 4)
expect(rootGraph.links.size).toBe(1)
})
it('preserves correct link connections when unpacking with duplicate links', () => {
registerTestNodes()
const rootGraph = new LGraph()
const subgraph = createSubgraphOnGraph(rootGraph)
const sourceNode = LiteGraph.createNode('test/MultiInputNode', 'Source')!
const targetNode = LiteGraph.createNode('test/MultiInputNode', 'Target')!
subgraph.add(sourceNode)
subgraph.add(targetNode)
// Connect source output 0 → target input 0
sourceNode.connect(0, targetNode, 0)
// Add duplicate links to the same connection
const existingLink = subgraph._links.values().next().value!
const dupLink = new LLink(
++subgraph.state.lastLinkId,
existingLink.type,
existingLink.origin_id,
existingLink.origin_slot,
existingLink.target_id,
existingLink.target_slot
)
subgraph._links.set(dupLink.id, dupLink)
sourceNode.outputs[0].links!.push(dupLink.id)
const subgraphNode = createTestSubgraphNode(subgraph, { pos: [100, 100] })
rootGraph.add(subgraphNode)
rootGraph.unpackSubgraph(subgraphNode)
// Verify only 1 link exists
expect(rootGraph.links.size).toBe(1)
// Verify target input 1 does NOT have a link (no spurious connection)
const unpackedTarget = rootGraph.nodes.find((n) => n.title === 'Target')!
expect(unpackedTarget.inputs[0].link).not.toBeNull()
expect(unpackedTarget.inputs[1].link).toBeNull()
})
})
describe('deduplicateSubgraphNodeIds (via configure)', () => {
const SUBGRAPH_A = '11111111-1111-4111-8111-111111111111' as UUID
const SUBGRAPH_B = '22222222-2222-4222-8222-222222222222' as UUID
const SHARED_NODE_IDS = [3, 8, 37]
beforeEach(() => {
setActivePinia(createTestingPinia({ stubActions: false }))
LiteGraph.registerNodeType('dummy', DummyNode)
})
function loadFixture(): SerialisableGraph {
return structuredClone(duplicateSubgraphNodeIds)
}
function configureFromFixture() {
const graphData = loadFixture()
const graph = new LGraph()
graph.configure(graphData)
return { graph, graphData }
}
function nodeIdSet(graph: LGraph, subgraphId: UUID) {
return new Set(graph.subgraphs.get(subgraphId)!.nodes.map((n) => n.id))
}
it('remaps duplicate node IDs so subgraphs have no overlap', () => {
const { graph } = configureFromFixture()
const idsA = nodeIdSet(graph, SUBGRAPH_A)
const idsB = nodeIdSet(graph, SUBGRAPH_B)
for (const id of SHARED_NODE_IDS) {
expect(idsA.has(id as NodeId)).toBe(true)
}
for (const id of idsA) {
expect(idsB.has(id)).toBe(false)
}
})
it('patches link references in remapped subgraph', () => {
const { graph } = configureFromFixture()
const idsB = nodeIdSet(graph, SUBGRAPH_B)
for (const link of graph.subgraphs.get(SUBGRAPH_B)!.links.values()) {
expect(idsB.has(link.origin_id)).toBe(true)
expect(idsB.has(link.target_id)).toBe(true)
}
})
it('patches promoted widget references in remapped subgraph', () => {
const { graph } = configureFromFixture()
const idsB = nodeIdSet(graph, SUBGRAPH_B)
for (const widget of graph.subgraphs.get(SUBGRAPH_B)!.widgets) {
expect(idsB.has(widget.id)).toBe(true)
}
})
it('patches proxyWidgets in root-level nodes referencing remapped IDs', () => {
const { graph } = configureFromFixture()
const idsA = new Set(
graph.subgraphs.get(SUBGRAPH_A)!.nodes.map((n) => String(n.id))
)
const idsB = new Set(
graph.subgraphs.get(SUBGRAPH_B)!.nodes.map((n) => String(n.id))
)
const pw102 = graph.getNodeById(102 as NodeId)?.properties?.proxyWidgets
expect(Array.isArray(pw102)).toBe(true)
for (const entry of pw102 as unknown[][]) {
expect(Array.isArray(entry)).toBe(true)
expect(idsA.has(String(entry[0]))).toBe(true)
}
const pw103 = graph.getNodeById(103 as NodeId)?.properties?.proxyWidgets
expect(Array.isArray(pw103)).toBe(true)
for (const entry of pw103 as unknown[][]) {
expect(Array.isArray(entry)).toBe(true)
expect(idsB.has(String(entry[0]))).toBe(true)
}
})
it('patches proxyWidgets inside nested subgraph nodes', () => {
const graph = new LGraph()
graph.configure(structuredClone(nestedSubgraphProxyWidgets))
const idsB = new Set(
graph.subgraphs.get(SUBGRAPH_B)!.nodes.map((n) => String(n.id))
)
const innerNode = graph.subgraphs
.get(SUBGRAPH_A)!
.nodes.find((n) => n.id === (50 as NodeId))
const pw = innerNode?.properties?.proxyWidgets
expect(Array.isArray(pw)).toBe(true)
for (const entry of pw as unknown[][]) {
expect(Array.isArray(entry)).toBe(true)
expect(idsB.has(String(entry[0]))).toBe(true)
}
})
it('throws when node ID space is exhausted', () => {
expect(() => {
const graph = new LGraph()
graph.configure(structuredClone(nodeIdSpaceExhausted))
}).toThrow('Node ID space exhausted')
})
it('is a no-op when subgraph node IDs are already unique', () => {
const graph = new LGraph()
graph.configure(structuredClone(uniqueSubgraphNodeIds))
expect(nodeIdSet(graph, SUBGRAPH_A)).toEqual(new Set([10, 11, 12]))
expect(nodeIdSet(graph, SUBGRAPH_B)).toEqual(new Set([20, 21, 22]))
})
})

View File

@@ -75,6 +75,7 @@ import type {
SerialisableReroute
} from './types/serialisation'
import { getAllNestedItems } from './utils/collections'
import { deduplicateSubgraphNodeIds } from './utils/subgraphDeduplication'
export type {
LGraphTriggerAction,
@@ -1929,15 +1930,20 @@ export class LGraph
node.id = this.last_node_id
n_info.id = this.last_node_id
// Strip links from serialized data before configure to prevent
// onConnectionsChange from resolving subgraph-internal link IDs
// against the parent graph's link map (which may contain unrelated
// links with the same numeric IDs).
for (const input of n_info.inputs ?? []) {
input.link = null
}
for (const output of n_info.outputs ?? []) {
output.links = []
}
this.add(node, true)
node.configure(n_info)
node.setPos(node.pos[0] + offsetX, node.pos[1] + offsetY)
for (const input of node.inputs) {
input.link = null
}
for (const output of node.outputs) {
output.links = []
}
toSelect.push(node)
}
const groups = structuredClone(
@@ -2043,8 +2049,19 @@ export class LGraph
}
this.remove(subgraphNode)
this.subgraphs.delete(subgraphNode.subgraph.id)
// Deduplicate links by (oid, oslot, tid, tslot) to prevent repeated
// disconnect/reconnect cycles on widget inputs that can shift slot indices.
const seenLinks = new Set<string>()
const dedupedNewLinks = newLinks.filter((link) => {
const key = `${link.oid}:${link.oslot}:${link.tid}:${link.tslot}`
if (seenLinks.has(key)) return false
seenLinks.add(key)
return true
})
const linkIdMap = new Map<LinkId, LinkId[]>()
for (const newLink of newLinks) {
for (const newLink of dedupedNewLinks) {
let created: LLink | null | undefined
if (newLink.oid == SUBGRAPH_INPUT_ID) {
if (!(this instanceof Subgraph)) {
@@ -2102,7 +2119,7 @@ export class LGraph
toSelect.push(migratedReroute)
}
//iterate over newly created links to update reroute parentIds
for (const newLink of newLinks) {
for (const newLink of dedupedNewLinks) {
const linkInstance = this.links.get(newLink.id)
if (!linkInstance) {
continue
@@ -2442,19 +2459,40 @@ export class LGraph
this[i] = data[i]
}
// Subgraph definitions
// Subgraph definitions — deduplicate node IDs before configuring.
// deduplicateSubgraphNodeIds clones internally to avoid mutating
// the caller's data (e.g. reactive Pinia state).
const subgraphs = data.definitions?.subgraphs
let effectiveNodesData = nodesData
if (subgraphs) {
for (const subgraph of subgraphs) this.createSubgraph(subgraph)
for (const subgraph of subgraphs)
this.subgraphs.get(subgraph.id)?.configure(subgraph)
}
const reservedNodeIds = new Set<number>()
for (const node of this._nodes) {
if (typeof node.id === 'number') reservedNodeIds.add(node.id)
}
for (const sg of this.subgraphs.values()) {
for (const node of sg.nodes) {
if (typeof node.id === 'number') reservedNodeIds.add(node.id)
}
}
for (const n of nodesData ?? []) {
if (typeof n.id === 'number') reservedNodeIds.add(n.id)
}
if (this.isRootGraph) {
const reservedNodeIds = nodesData
?.map((n) => n.id)
.filter((id): id is number => typeof id === 'number')
this.ensureGlobalIdUniqueness(reservedNodeIds)
const deduplicated = this.isRootGraph
? deduplicateSubgraphNodeIds(
subgraphs,
reservedNodeIds,
this.state,
nodesData
)
: undefined
const finalSubgraphs = deduplicated?.subgraphs ?? subgraphs
effectiveNodesData = deduplicated?.rootNodes ?? nodesData
for (const subgraph of finalSubgraphs) this.createSubgraph(subgraph)
for (const subgraph of finalSubgraphs)
this.subgraphs.get(subgraph.id)?.configure(subgraph)
}
let error = false
@@ -2462,8 +2500,8 @@ export class LGraph
// create nodes
this._nodes = []
if (nodesData) {
for (const n_info of nodesData) {
if (effectiveNodesData) {
for (const n_info of effectiveNodesData) {
// stored info
let node = LiteGraph.createNode(String(n_info.type), n_info.title)
if (!node) {

View File

@@ -4187,7 +4187,12 @@ export class LGraphNode
// Ref: https://github.com/Comfy-Org/ComfyUI_frontend/issues/2652
// TODO: Move the layout logic before drawing of the node shape, so we don't
// need to trigger extra round of rendering.
if (y > bodyHeight) {
// In Vue mode, the DOM is the source of truth for node sizing — the
// ResizeObserver feeds measurements back to the layout store. Allowing
// LiteGraph to also call setSize() here creates an infinite feedback loop
// (LG grows node → CSS min-height increases → textarea fills extra space →
// ResizeObserver reports larger size → LG grows node again).
if (!LiteGraph.vueNodesMode && y > bodyHeight) {
this.setSize([this.size[0], y])
this.graph.setDirtyCanvas(false, true)
}

View File

@@ -0,0 +1,163 @@
import type { SerialisableGraph } from '@/lib/litegraph/src/types/serialisation'
/**
* Workflow with two subgraph definitions whose internal nodes share
* identical IDs [3, 8, 37]. Reproduces the widget-state collision bug
* where copied subgraphs overwrote each other's widget store entries.
*
* SubgraphA (node 102): widgets reference node 3, link 3→8
* SubgraphB (node 103): widgets reference node 8, link 3→37
*/
export const duplicateSubgraphNodeIds = {
id: 'aaaaaaaa-aaaa-4aaa-8aaa-aaaaaaaaaaaa',
version: 1,
revision: 0,
state: {
lastNodeId: 100,
lastLinkId: 10,
lastGroupId: 0,
lastRerouteId: 0
},
nodes: [
{
id: 102,
type: '11111111-1111-4111-8111-111111111111',
pos: [0, 0],
size: [200, 100],
flags: {},
order: 0,
mode: 0,
properties: { proxyWidgets: [['3', 'seed']] }
},
{
id: 103,
type: '22222222-2222-4222-8222-222222222222',
pos: [300, 0],
size: [200, 100],
flags: {},
order: 1,
mode: 0,
properties: { proxyWidgets: [['8', 'prompt']] }
}
],
definitions: {
subgraphs: [
{
id: '11111111-1111-4111-8111-111111111111',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphA',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 3, name: 'seed' }],
nodes: [
{
id: 3,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 8,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 37,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 1,
origin_id: 3,
origin_slot: 0,
target_id: 8,
target_slot: 0,
type: 'number'
}
],
groups: []
},
{
id: '22222222-2222-4222-8222-222222222222',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphB',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 8, name: 'prompt' }],
nodes: [
{
id: 3,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 8,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 37,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 2,
origin_id: 3,
origin_slot: 0,
target_id: 37,
target_slot: 0,
type: 'string'
}
],
groups: []
}
]
}
} as const satisfies SerialisableGraph

View File

@@ -0,0 +1,177 @@
import type { SerialisableGraph } from '@/lib/litegraph/src/types/serialisation'
/**
* Workflow where SubgraphA contains a nested SubgraphNode referencing
* SubgraphB. Both subgraph definitions share internal node IDs [3, 8, 37].
*
* The nested SubgraphNode (id 50, inside SubgraphA) has proxyWidgets
* pointing at SubgraphB's node 8. After deduplication remaps SubgraphB's
* nodes, the nested proxyWidgets must also be patched.
*
* SubgraphA (node 102): widgets reference node 3, link 3→8,
* contains nested SubgraphNode(50) → SubgraphB with proxyWidget ['8']
* SubgraphB (node 103): widgets reference node 8, link 3→37
*/
export const nestedSubgraphProxyWidgets = {
id: 'bbbbbbbb-bbbb-4bbb-8bbb-bbbbbbbbbbbb',
version: 1,
revision: 0,
state: {
lastNodeId: 100,
lastLinkId: 10,
lastGroupId: 0,
lastRerouteId: 0
},
nodes: [
{
id: 102,
type: '11111111-1111-4111-8111-111111111111',
pos: [0, 0],
size: [200, 100],
flags: {},
order: 0,
mode: 0,
properties: { proxyWidgets: [['3', 'seed']] }
},
{
id: 103,
type: '22222222-2222-4222-8222-222222222222',
pos: [300, 0],
size: [200, 100],
flags: {},
order: 1,
mode: 0,
properties: { proxyWidgets: [['8', 'prompt']] }
}
],
definitions: {
subgraphs: [
{
id: '11111111-1111-4111-8111-111111111111',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphA',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 3, name: 'seed' }],
nodes: [
{
id: 3,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 8,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 37,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
},
{
id: 50,
type: '22222222-2222-4222-8222-222222222222',
pos: [200, 0],
size: [100, 50],
flags: {},
order: 3,
mode: 0,
properties: { proxyWidgets: [['8', 'prompt']] }
}
],
links: [
{
id: 1,
origin_id: 3,
origin_slot: 0,
target_id: 8,
target_slot: 0,
type: 'number'
}
],
groups: []
},
{
id: '22222222-2222-4222-8222-222222222222',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphB',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 8, name: 'prompt' }],
nodes: [
{
id: 3,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 8,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 37,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 2,
origin_id: 3,
origin_slot: 0,
target_id: 37,
target_slot: 0,
type: 'string'
}
],
groups: []
}
]
}
} as const satisfies SerialisableGraph

View File

@@ -0,0 +1,172 @@
import type { SerialisableGraph } from '@/lib/litegraph/src/types/serialisation'
/**
* Workflow where lastNodeId is near the MAX_NODE_ID ceiling (100_000_000)
* and root node 100_000_000 reserves the only remaining candidate ID.
*
* Both subgraph definitions share node IDs [3, 8, 37]. When SubgraphB's
* duplicates need remapping, candidate 100_000_000 is already reserved,
* so the next candidate (100_000_001) exceeds MAX_NODE_ID and must throw.
*/
export const nodeIdSpaceExhausted = {
id: 'cccccccc-cccc-4ccc-8ccc-cccccccccccc',
version: 1,
revision: 0,
state: {
lastNodeId: 99_999_999,
lastLinkId: 10,
lastGroupId: 0,
lastRerouteId: 0
},
nodes: [
{
id: 102,
type: '11111111-1111-4111-8111-111111111111',
pos: [0, 0],
size: [200, 100],
flags: {},
order: 0,
mode: 0,
properties: { proxyWidgets: [['3', 'seed']] }
},
{
id: 103,
type: '22222222-2222-4222-8222-222222222222',
pos: [300, 0],
size: [200, 100],
flags: {},
order: 1,
mode: 0,
properties: { proxyWidgets: [['8', 'prompt']] }
},
{
id: 100_000_000,
type: 'dummy',
pos: [600, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
definitions: {
subgraphs: [
{
id: '11111111-1111-4111-8111-111111111111',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphA',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 3, name: 'seed' }],
nodes: [
{
id: 3,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 8,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 37,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 1,
origin_id: 3,
origin_slot: 0,
target_id: 8,
target_slot: 0,
type: 'number'
}
],
groups: []
},
{
id: '22222222-2222-4222-8222-222222222222',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphB',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 8, name: 'prompt' }],
nodes: [
{
id: 3,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 8,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 37,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 2,
origin_id: 3,
origin_slot: 0,
target_id: 37,
target_slot: 0,
type: 'string'
}
],
groups: []
}
]
}
} as const satisfies SerialisableGraph

View File

@@ -0,0 +1,163 @@
import type { SerialisableGraph } from '@/lib/litegraph/src/types/serialisation'
/**
* Workflow with two subgraph definitions whose internal nodes already
* have unique IDs. Deduplication should be a no-op — all IDs, links,
* widgets, and proxyWidgets pass through unchanged.
*
* SubgraphA (node 102): nodes [10, 11, 12], link 10→11, widget ref 10
* SubgraphB (node 103): nodes [20, 21, 22], link 20→22, widget ref 21
*/
export const uniqueSubgraphNodeIds = {
id: 'dddddddd-dddd-4ddd-8ddd-dddddddddddd',
version: 1,
revision: 0,
state: {
lastNodeId: 100,
lastLinkId: 10,
lastGroupId: 0,
lastRerouteId: 0
},
nodes: [
{
id: 102,
type: '11111111-1111-4111-8111-111111111111',
pos: [0, 0],
size: [200, 100],
flags: {},
order: 0,
mode: 0,
properties: { proxyWidgets: [['10', 'seed']] }
},
{
id: 103,
type: '22222222-2222-4222-8222-222222222222',
pos: [300, 0],
size: [200, 100],
flags: {},
order: 1,
mode: 0,
properties: { proxyWidgets: [['21', 'prompt']] }
}
],
definitions: {
subgraphs: [
{
id: '11111111-1111-4111-8111-111111111111',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphA',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 10, name: 'seed' }],
nodes: [
{
id: 10,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 11,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 12,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 1,
origin_id: 10,
origin_slot: 0,
target_id: 11,
target_slot: 0,
type: 'number'
}
],
groups: []
},
{
id: '22222222-2222-4222-8222-222222222222',
version: 1,
revision: 0,
state: {
lastNodeId: 0,
lastLinkId: 0,
lastGroupId: 0,
lastRerouteId: 0
},
name: 'SubgraphB',
config: {},
inputNode: { id: -10, bounding: [10, 100, 150, 126] },
outputNode: { id: -20, bounding: [400, 100, 140, 126] },
inputs: [],
outputs: [],
widgets: [{ id: 21, name: 'prompt' }],
nodes: [
{
id: 20,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 0,
mode: 0
},
{
id: 21,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 1,
mode: 0
},
{
id: 22,
type: 'dummy',
pos: [0, 0],
size: [100, 50],
flags: {},
order: 2,
mode: 0
}
],
links: [
{
id: 2,
origin_id: 20,
origin_slot: 0,
target_id: 22,
target_slot: 0,
type: 'string'
}
],
groups: []
}
]
}
} as const satisfies SerialisableGraph

View File

@@ -144,7 +144,11 @@ export { isColorable } from './utils/type'
export { createUuidv4 } from './utils/uuid'
export type { UUID } from './utils/uuid'
export { truncateText } from './utils/textUtils'
export { getWidgetStep } from './utils/widget'
export {
evaluateInput,
getWidgetStep,
resolveNodeRootGraphId
} from './utils/widget'
export { distributeSpace, type SpaceRequest } from './utils/spaceDistribution'
export { BaseWidget } from './widgets/BaseWidget'

View File

@@ -0,0 +1,121 @@
import { describe, expect, test } from 'vitest'
import { evaluateMathExpression } from '@/lib/litegraph/src/utils/mathParser'
describe('evaluateMathExpression', () => {
test.each([
['2+3', 5],
['10-4', 6],
['3*7', 21],
['15/3', 5]
])('basic arithmetic: %s = %d', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test.each([
['2+3*4', 14],
['(2+3)*4', 20],
['10-2*3', 4],
['10/2+3', 8]
])('operator precedence: %s = %d', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test.each([
['3.14*2', 6.28],
['.5+.5', 1],
['1.5+2.5', 4],
['0.1+0.2', 0.1 + 0.2],
['123.', 123],
['123.+3', 126]
])('decimals: %s', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test.each([
[' 2 + 3 ', 5],
[' 10 - 4 ', 6],
[' ( 2 + 3 ) * 4 ', 20]
])('whitespace handling: "%s" = %d', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test.each([
['((2+3))', 5],
['(1+(2*(3+4)))', 15],
['((1+2)*(3+4))', 21]
])('nested parentheses: %s = %d', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test.each([
['-5', -5],
['-(3+2)', -5],
['--5', 5],
['+5', 5],
['-3*2', -6],
['2*-3', -6],
['1+-2', -1],
['2--3', 5],
['-2*-3', 6],
['-(2+3)*-(4+5)', 45]
])('unary operators: %s = %d', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test.each([
['2 /2+3 * 4.75- -6', 21.25],
['2 / (2 + 3) * 4.33 - -6', 7.732],
['12* 123/-(-5 + 2)', 492],
['((80 - (19)))', 61],
['(1 - 2) + -(-(-(-4)))', 3],
['1 - -(-(-(-4)))', -3],
['12* 123/(-5 + 2)', -492],
['12 * -123', -1476],
['((2.33 / (2.9+3.5)*4) - -6)', 7.45625],
['123.45*(678.90 / (-2.5+ 11.5)-(80 -19) *33.25) / 20 + 11', -12042.760875],
[
'(123.45*(678.90 / (-2.5+ 11.5)-(((80 -(19))) *33.25)) / 20) - (123.45*(678.90 / (-2.5+ 11.5)-(((80 -(19))) *33.25)) / 20) + (13 - 2)/ -(-11) ',
1
]
])('complex expression: %s', (input, expected) => {
expect(evaluateMathExpression(input)).toBeCloseTo(expected as number)
})
test.each(['', 'abc', '2+', '(2+3', '2+3)', '()', '*3', '2 3', '.', '123..'])(
'invalid input returns undefined: "%s"',
(input) => {
expect(evaluateMathExpression(input)).toBeUndefined()
}
)
test('division by zero returns Infinity', () => {
expect(evaluateMathExpression('1/0')).toBe(Infinity)
})
test('0/0 returns NaN', () => {
expect(evaluateMathExpression('0/0')).toBeNaN()
})
test.each([
['10%3', 1],
['10%3+1', 2],
['7%2', 1]
])('modulo: %s = %d', (input, expected) => {
expect(evaluateMathExpression(input)).toBe(expected)
})
test('negative zero is normalized to positive zero', () => {
expect(Object.is(evaluateMathExpression('-0'), 0)).toBe(true)
})
test('deeply nested parentheses exceeding depth limit returns undefined', () => {
const input = '('.repeat(201) + '1' + ')'.repeat(201)
expect(evaluateMathExpression(input)).toBeUndefined()
})
test('parentheses within depth limit evaluate correctly', () => {
const input = '('.repeat(200) + '1' + ')'.repeat(200)
expect(evaluateMathExpression(input)).toBe(1)
})
})

View File

@@ -0,0 +1,116 @@
type Token = { type: 'number'; value: number } | { type: 'op'; value: string }
function tokenize(input: string): Token[] | undefined {
const tokens: Token[] = []
const re = /(\d+(?:\.\d*)?|\.\d+)|([+\-*/%()])/g
let lastIndex = 0
for (const match of input.matchAll(re)) {
const gap = input.slice(lastIndex, match.index)
if (gap.trim()) return undefined
lastIndex = match.index + match[0].length
if (match[1]) tokens.push({ type: 'number', value: parseFloat(match[1]) })
else tokens.push({ type: 'op', value: match[2] })
}
if (input.slice(lastIndex).trim()) return undefined
return tokens
}
/**
* Evaluates a basic arithmetic expression string containing
* `+`, `-`, `*`, `/`, `%`, parentheses, and decimal numbers.
* Returns `undefined` for empty or malformed input.
*/
export function evaluateMathExpression(input: string): number | undefined {
const tokenized = tokenize(input)
if (!tokenized || tokenized.length === 0) return undefined
const tokens: Token[] = tokenized
let pos = 0
let depth = 0
const MAX_DEPTH = 200
function peek(): Token | undefined {
return tokens[pos]
}
function consume(): Token {
return tokens[pos++]
}
function primary(): number | undefined {
const t = peek()
if (!t) return undefined
if (t.type === 'number') {
consume()
return t.value
}
if (t.type === 'op' && t.value === '(') {
if (++depth > MAX_DEPTH) return undefined
consume()
const result = expr()
if (result === undefined) return undefined
const closing = peek()
if (!closing || closing.type !== 'op' || closing.value !== ')') {
return undefined
}
consume()
depth--
return result
}
return undefined
}
function unary(): number | undefined {
const t = peek()
if (t?.type === 'op' && (t.value === '+' || t.value === '-')) {
consume()
const operand = unary()
if (operand === undefined) return undefined
return t.value === '-' ? -operand : operand
}
return primary()
}
function factor(): number | undefined {
let left = unary()
if (left === undefined) return undefined
while (
peek()?.type === 'op' &&
(peek()!.value === '*' || peek()!.value === '/' || peek()!.value === '%')
) {
const op = consume().value
const right = unary()
if (right === undefined) return undefined
left =
op === '*' ? left * right : op === '/' ? left / right : left % right
}
return left
}
function expr(): number | undefined {
let left = factor()
if (left === undefined) return undefined
while (
peek()?.type === 'op' &&
(peek()!.value === '+' || peek()!.value === '-')
) {
const op = consume().value
const right = factor()
if (right === undefined) return undefined
left = op === '+' ? left + right : left - right
}
return left
}
const result = expr()
if (result === undefined || pos !== tokens.length) return undefined
return result === 0 ? 0 : result
}

View File

@@ -0,0 +1,164 @@
import type { LGraphState } from '../LGraph'
import type { NodeId } from '../LGraphNode'
import type {
ExportedSubgraph,
ExposedWidget,
ISerialisedNode,
SerialisableLLink
} from '../types/serialisation'
const MAX_NODE_ID = 100_000_000
interface DeduplicationResult {
subgraphs: ExportedSubgraph[]
rootNodes: ISerialisedNode[] | undefined
}
/**
* Pre-deduplicates node IDs across serialized subgraph definitions before
* they are configured. This prevents widget store key collisions when
* multiple subgraph copies contain nodes with the same IDs.
*
* Also patches proxyWidgets in root-level nodes that reference the
* remapped inner node IDs.
*
* Returns deep clones of the inputs — the originals are never mutated.
*
* @param subgraphs - Serialized subgraph definitions to deduplicate
* @param reservedNodeIds - Node IDs already in use by root-level nodes
* @param state - Graph state containing the `lastNodeId` counter (mutated)
* @param rootNodes - Optional root-level nodes with proxyWidgets to patch
*/
export function deduplicateSubgraphNodeIds(
subgraphs: ExportedSubgraph[],
reservedNodeIds: Set<number>,
state: LGraphState,
rootNodes?: ISerialisedNode[]
): DeduplicationResult {
const clonedSubgraphs = structuredClone(subgraphs)
const clonedRootNodes = rootNodes ? structuredClone(rootNodes) : undefined
const usedNodeIds = new Set(reservedNodeIds)
const subgraphIdSet = new Set(clonedSubgraphs.map((sg) => sg.id))
const remapBySubgraph = new Map<string, Map<NodeId, NodeId>>()
for (const subgraph of clonedSubgraphs) {
const remappedIds = remapNodeIds(subgraph.nodes ?? [], usedNodeIds, state)
if (remappedIds.size === 0) continue
remapBySubgraph.set(subgraph.id, remappedIds)
patchSerialisedLinks(subgraph.links ?? [], remappedIds)
patchPromotedWidgets(subgraph.widgets ?? [], remappedIds)
}
for (const subgraph of clonedSubgraphs) {
patchProxyWidgets(subgraph.nodes ?? [], subgraphIdSet, remapBySubgraph)
}
if (clonedRootNodes) {
patchProxyWidgets(clonedRootNodes, subgraphIdSet, remapBySubgraph)
}
return { subgraphs: clonedSubgraphs, rootNodes: clonedRootNodes }
}
/**
* Remaps duplicate node IDs to unique values, updating `usedNodeIds`
* and `state.lastNodeId` as new IDs are allocated.
*
* @returns A map of old ID → new ID for nodes that were remapped.
*/
function remapNodeIds(
nodes: ISerialisedNode[],
usedNodeIds: Set<number>,
state: LGraphState
): Map<NodeId, NodeId> {
const remappedIds = new Map<NodeId, NodeId>()
for (const node of nodes) {
const id = node.id
if (typeof id !== 'number') continue
if (usedNodeIds.has(id)) {
const newId = findNextAvailableId(usedNodeIds, state)
remappedIds.set(id, newId)
node.id = newId
usedNodeIds.add(newId as number)
console.warn(
`LiteGraph: duplicate subgraph node ID ${id} remapped to ${newId}`
)
} else {
usedNodeIds.add(id)
if (id > state.lastNodeId) state.lastNodeId = id
}
}
return remappedIds
}
/**
* Finds the next unused node ID by incrementing `state.lastNodeId`.
* Throws if the ID space is exhausted.
*/
function findNextAvailableId(
usedNodeIds: Set<number>,
state: LGraphState
): NodeId {
while (true) {
const nextId = state.lastNodeId + 1
if (nextId > MAX_NODE_ID) {
throw new Error('Node ID space exhausted')
}
state.lastNodeId = nextId
if (!usedNodeIds.has(nextId)) return nextId as NodeId
}
}
/** Patches origin_id / target_id in serialized links. */
function patchSerialisedLinks(
links: SerialisableLLink[],
remappedIds: Map<NodeId, NodeId>
): void {
for (const link of links) {
const newOrigin = remappedIds.get(link.origin_id)
if (newOrigin !== undefined) link.origin_id = newOrigin
const newTarget = remappedIds.get(link.target_id)
if (newTarget !== undefined) link.target_id = newTarget
}
}
/** Patches promoted widget node references. */
function patchPromotedWidgets(
widgets: ExposedWidget[],
remappedIds: Map<NodeId, NodeId>
): void {
for (const widget of widgets) {
const newId = remappedIds.get(widget.id)
if (newId !== undefined) widget.id = newId
}
}
/** Patches proxyWidgets in root-level SubgraphNode instances. */
function patchProxyWidgets(
rootNodes: ISerialisedNode[],
subgraphIdSet: Set<string>,
remapBySubgraph: Map<string, Map<NodeId, NodeId>>
): void {
for (const node of rootNodes) {
if (!subgraphIdSet.has(node.type)) continue
const remappedIds = remapBySubgraph.get(node.type)
if (!remappedIds) continue
const proxyWidgets = node.properties?.proxyWidgets
if (!Array.isArray(proxyWidgets)) continue
for (const entry of proxyWidgets) {
if (!Array.isArray(entry)) continue
const oldId = Number(entry[0]) as NodeId
const newId = remappedIds.get(oldId)
if (newId !== undefined) entry[0] = String(newId)
}
}
}

View File

@@ -1,7 +1,11 @@
import { describe, expect, test } from 'vitest'
import type { IWidgetOptions } from '@/lib/litegraph/src/litegraph'
import { getWidgetStep } from '@/lib/litegraph/src/litegraph'
import {
evaluateInput,
getWidgetStep,
resolveNodeRootGraphId
} from '@/lib/litegraph/src/litegraph'
describe('getWidgetStep', () => {
test('should return step2 when available', () => {
@@ -42,3 +46,81 @@ describe('getWidgetStep', () => {
expect(getWidgetStep(optionsWithZeroStep)).toBe(1)
})
})
type GraphIdNode = Pick<LGraphNode, 'graph'>
describe('resolveNodeRootGraphId', () => {
test('returns node rootGraph id when node belongs to a graph', () => {
const node = {
graph: {
rootGraph: {
id: 'subgraph-root-id'
}
}
} as GraphIdNode
expect(resolveNodeRootGraphId(node)).toBe('subgraph-root-id')
})
test('returns fallback graph id when node graph is missing', () => {
const node = {
graph: null
} as GraphIdNode
expect(resolveNodeRootGraphId(node, 'app-root-id')).toBe('app-root-id')
})
})
describe('evaluateInput', () => {
test.each([
['42', 42],
['3.14', 3.14],
['-7', -7],
['0', 0]
])('plain number: "%s" = %d', (input, expected) => {
expect(evaluateInput(input)).toBe(expected)
})
test.each([
['2+3', 5],
['(4+2)*3', 18],
['3.14*2', 6.28],
['10/2+3', 8]
])('expression: "%s" = %d', (input, expected) => {
expect(evaluateInput(input)).toBe(expected)
})
test('empty string returns 0 (Number("") === 0)', () => {
expect(evaluateInput('')).toBe(0)
})
test.each(['abc', 'hello world'])(
'invalid input returns undefined: "%s"',
(input) => {
expect(evaluateInput(input)).toBeUndefined()
}
)
test('division by zero returns undefined', () => {
expect(evaluateInput('1/0')).toBeUndefined()
})
test('0/0 returns undefined (NaN is filtered)', () => {
expect(evaluateInput('0/0')).toBeUndefined()
})
test('scientific notation via Number() fallback', () => {
expect(evaluateInput('1e5')).toBe(100000)
})
test('hex notation via Number() fallback', () => {
expect(evaluateInput('0xff')).toBe(255)
})
test.each(['Infinity', '-Infinity'])(
'"%s" returns undefined (non-finite rejected)',
(input) => {
expect(evaluateInput(input)).toBeUndefined()
}
)
})

View File

@@ -1,5 +1,7 @@
import type { IWidgetOptions } from '@/lib/litegraph/src/types/widgets'
import { evaluateMathExpression } from '@/lib/litegraph/src/utils/mathParser'
/**
* The step value for numeric widgets.
* Use {@link IWidgetOptions.step2} if available, otherwise fallback to
@@ -10,16 +12,12 @@ export function getWidgetStep(options: IWidgetOptions<unknown>): number {
}
export function evaluateInput(input: string): number | undefined {
// Check if v is a valid equation or a number
if (/^[\d\s.()*+/-]+$/.test(input)) {
// Solve the equation if possible
try {
input = eval(input)
} catch {
// Ignore eval errors
}
const result = evaluateMathExpression(input)
if (result !== undefined) {
if (!isFinite(result)) return undefined
return result
}
const newValue = Number(input)
if (isNaN(newValue)) return undefined
if (!isFinite(newValue)) return undefined
return newValue
}

View File

@@ -69,6 +69,7 @@
"icon": "Icon",
"color": "Color",
"error": "Error",
"enter": "Enter",
"enterSubgraph": "Enter Subgraph",
"resizeFromBottomRight": "Resize from bottom-right corner",
"resizeFromTopRight": "Resize from top-right corner",

View File

@@ -82,8 +82,7 @@ export function useMediaAssetActions() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('g.failedToDownloadImage'),
life: 3000
detail: t('g.failedToDownloadImage')
})
}
}
@@ -124,8 +123,7 @@ export function useMediaAssetActions() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('g.failedToDownloadImage'),
life: 3000
detail: t('g.failedToDownloadImage')
})
}
}
@@ -180,8 +178,7 @@ export function useMediaAssetActions() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('exportToast.exportFailedSingle'),
life: 3000
detail: t('exportToast.exportFailedSingle')
})
}
}
@@ -236,8 +233,7 @@ export function useMediaAssetActions() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('mediaAsset.nodeTypeNotFound', { nodeType }),
life: 3000
detail: t('mediaAsset.nodeTypeNotFound', { nodeType })
})
return
}
@@ -250,8 +246,7 @@ export function useMediaAssetActions() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('mediaAsset.failedToCreateNode'),
life: 3000
detail: t('mediaAsset.failedToCreateNode')
})
return
}
@@ -441,8 +436,7 @@ export function useMediaAssetActions() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('mediaAsset.selection.failedToAddNodes'),
life: 3000
detail: t('mediaAsset.selection.failedToAddNodes')
})
} else {
toast.add({
@@ -658,8 +652,7 @@ export function useMediaAssetActions() {
summary: t('g.error'),
detail: isSingle
? t('mediaAsset.failedToDeleteAsset')
: t('mediaAsset.selection.failedToDeleteAssets'),
life: 3000
: t('mediaAsset.selection.failedToDeleteAssets')
})
} else {
// Partial success (only possible with multiple assets)
@@ -680,8 +673,7 @@ export function useMediaAssetActions() {
summary: t('g.error'),
detail: isSingle
? t('mediaAsset.failedToDeleteAsset')
: t('mediaAsset.selection.failedToDeleteAssets'),
life: 3000
: t('mediaAsset.selection.failedToDeleteAssets')
})
} finally {
// Hide loading overlay for all assets

View File

@@ -73,8 +73,7 @@ export function createAssetWidget(
toastStore.add({
severity: 'error',
summary: t('assetBrowser.invalidAsset'),
detail: t('assetBrowser.invalidAssetDetail'),
life: 5000
detail: t('assetBrowser.invalidAssetDetail')
})
return
}
@@ -92,8 +91,7 @@ export function createAssetWidget(
toastStore.add({
severity: 'error',
summary: t('assetBrowser.invalidFilename'),
detail: t('assetBrowser.invalidFilenameDetail'),
life: 5000
detail: t('assetBrowser.invalidFilenameDetail')
})
return
}

View File

@@ -16,7 +16,7 @@ const mockAccessBillingPortal = vi.fn()
const mockReportError = vi.fn()
const mockTrackBeginCheckout = vi.fn()
const mockUserId = ref<string | undefined>('user-123')
const mockGetFirebaseAuthHeader = vi.fn(() =>
const mockGetAuthHeader = vi.fn(() =>
Promise.resolve({ Authorization: 'Bearer test-token' })
)
const mockGetCheckoutAttribution = vi.hoisted(() => vi.fn(() => ({})))
@@ -58,7 +58,7 @@ vi.mock('@/composables/useErrorHandling', () => ({
vi.mock('@/stores/firebaseAuthStore', () => ({
useFirebaseAuthStore: () =>
reactive({
getFirebaseAuthHeader: mockGetFirebaseAuthHeader,
getAuthHeader: mockGetAuthHeader,
userId: computed(() => mockUserId.value)
}),
FirebaseAuthStoreError: class extends Error {}

View File

@@ -108,7 +108,7 @@ vi.mock('@/services/dialogService', () => ({
vi.mock('@/stores/firebaseAuthStore', () => ({
useFirebaseAuthStore: vi.fn(() => ({
getFirebaseAuthHeader: mockGetAuthHeader,
getAuthHeader: mockGetAuthHeader,
get userId() {
return mockUserId.value
}
@@ -363,6 +363,27 @@ describe('useSubscription', () => {
})
})
describe('non-cloud environments', () => {
it('should not fetch subscription status when not on cloud', async () => {
mockIsCloud.value = false
mockIsLoggedIn.value = true
useSubscriptionWithScope()
await vi.dynamicImportSettled()
expect(global.fetch).not.toHaveBeenCalled()
})
it('should report isActiveSubscription as true when not on cloud', () => {
mockIsCloud.value = false
const { isActiveSubscription } = useSubscriptionWithScope()
expect(isActiveSubscription.value).toBe(true)
})
})
describe('action handlers', () => {
it('should open usage history URL', () => {
const windowOpenSpy = vi

View File

@@ -40,7 +40,7 @@ function useSubscriptionInternal() {
const { showSubscriptionRequiredDialog } = useDialogService()
const firebaseAuthStore = useFirebaseAuthStore()
const { getFirebaseAuthHeader } = firebaseAuthStore
const { getAuthHeader } = firebaseAuthStore
const { wrapWithErrorHandlingAsync } = useErrorHandling()
const { isLoggedIn } = useCurrentUser()
@@ -184,7 +184,7 @@ function useSubscriptionInternal() {
* @returns Subscription status or null if no subscription exists
*/
async function fetchSubscriptionStatus(): Promise<CloudSubscriptionStatusResponse | null> {
const authHeader = await getFirebaseAuthHeader()
const authHeader = await getAuthHeader()
if (!authHeader) {
throw new FirebaseAuthStoreError(t('toastMessages.userNotAuthenticated'))
}
@@ -217,7 +217,7 @@ function useSubscriptionInternal() {
watch(
() => isLoggedIn.value,
async (loggedIn) => {
if (loggedIn) {
if (loggedIn && isCloud) {
try {
await fetchSubscriptionStatus()
} catch (error) {
@@ -238,7 +238,7 @@ function useSubscriptionInternal() {
const initiateSubscriptionCheckout =
async (): Promise<CloudSubscriptionCheckoutResponse> => {
const authHeader = await getFirebaseAuthHeader()
const authHeader = await getAuthHeader()
if (!authHeader) {
throw new FirebaseAuthStoreError(
t('toastMessages.userNotAuthenticated')

View File

@@ -39,7 +39,7 @@ vi.mock('@/platform/telemetry', () => ({
vi.mock('@/stores/firebaseAuthStore', () => ({
useFirebaseAuthStore: vi.fn(() =>
reactive({
getFirebaseAuthHeader: mockGetAuthHeader,
getAuthHeader: mockGetAuthHeader,
userId: computed(() => mockUserId.value)
})
),

View File

@@ -54,7 +54,7 @@ export async function performSubscriptionCheckout(
const firebaseAuthStore = useFirebaseAuthStore()
const { userId } = storeToRefs(firebaseAuthStore)
const telemetry = useTelemetry()
const authHeader = await firebaseAuthStore.getFirebaseAuthHeader()
const authHeader = await firebaseAuthStore.getAuthHeader()
if (!authHeader) {
throw new FirebaseAuthStoreError(t('toastMessages.userNotAuthenticated'))

View File

@@ -226,11 +226,22 @@ export function useNodeReplacement() {
useWorkflowStore().activeWorkflow?.changeTracker ?? null
changeTracker?.beforeChange()
// Target types come from node_replacements fetched at workflow load time
// and the missing nodes detected at that point — not from the current
// registered_node_types. This ensures replacement still works even if
// the user has since installed the missing node pack.
const targetTypes = new Set(
selectedTypes.map((t) => (typeof t === 'string' ? t : t.type))
)
try {
const placeholders = collectAllNodes(
graph,
(n) => !!n.has_errors && !!n.last_serialization
)
const placeholders = collectAllNodes(graph, (n) => {
if (!n.last_serialization) return false
// Prefer the original serialized type; fall back to the live type
// for nodes whose serialization predates the type field.
const originalType = n.last_serialization.type ?? n.type
return !!originalType && targetTypes.has(originalType)
})
for (const node of placeholders) {
const match = findMatchingType(node, selectedTypes)
@@ -279,6 +290,18 @@ export function useNodeReplacement() {
life: 3000
})
}
} catch (error) {
console.error('Failed to replace nodes:', error)
if (replacedTypes.length > 0) {
graph.updateExecutionOrder()
graph.setDirtyCanvas(true, true)
}
toastStore.add({
severity: 'error',
summary: t('g.error', 'Error'),
detail: t('nodeReplacement.replaceFailed', 'Failed to replace nodes')
})
return replacedTypes
} finally {
changeTracker?.afterChange()
}

View File

@@ -43,4 +43,5 @@ export type RemoteConfig = {
linear_toggle_enabled?: boolean
team_workspaces_enabled?: boolean
user_secrets_enabled?: boolean
node_library_essentials_enabled?: boolean
}

View File

@@ -83,8 +83,7 @@ describe('useSecrets', () => {
expect(mockAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'g.error',
detail: 'Network error',
life: 5000
detail: 'Network error'
})
})
})
@@ -130,8 +129,7 @@ describe('useSecrets', () => {
expect(mockAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'g.error',
detail: 'Delete failed',
life: 5000
detail: 'Delete failed'
})
})
})

View File

@@ -33,16 +33,14 @@ export function useSecrets() {
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: err.message,
life: 5000
detail: err.message
})
} else {
console.error('Unexpected error fetching secrets:', err)
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: t('g.unknownError'),
life: 5000
detail: t('g.unknownError')
})
}
} finally {
@@ -60,16 +58,14 @@ export function useSecrets() {
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: err.message,
life: 5000
detail: err.message
})
} else {
console.error('Unexpected error deleting secret:', err)
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: t('g.unknownError'),
life: 5000
detail: t('g.unknownError')
})
}
} finally {

View File

@@ -253,4 +253,575 @@ describe('useWorkflowService', () => {
expect(mockShowMissingNodes).toHaveBeenCalledTimes(1)
})
})
describe('saveWorkflow', () => {
let workflowStore: ReturnType<typeof useWorkflowStore>
beforeEach(() => {
setActivePinia(createTestingPinia())
workflowStore = useWorkflowStore()
})
it('should delegate to workflowStore.saveWorkflow for persisted workflows', async () => {
const workflow = createModeTestWorkflow({
path: 'workflows/persisted.json'
})
vi.mocked(workflowStore.saveWorkflow).mockResolvedValue()
await useWorkflowService().saveWorkflow(workflow)
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
it('should call saveWorkflowAs for temporary workflows', async () => {
const workflow = createModeTestWorkflow({
path: 'workflows/Unsaved Workflow.json'
})
Object.defineProperty(workflow, 'isTemporary', { get: () => true })
vi.spyOn(workflow, 'promptSave').mockResolvedValue(null)
await useWorkflowService().saveWorkflow(workflow)
expect(workflowStore.saveWorkflow).not.toHaveBeenCalled()
})
})
describe('saveWorkflowAs', () => {
let workflowStore: ReturnType<typeof useWorkflowStore>
beforeEach(() => {
setActivePinia(createTestingPinia())
workflowStore = useWorkflowStore()
})
it('should rename then save when workflow is temporary', async () => {
const workflow = createModeTestWorkflow({
path: 'workflows/Unsaved Workflow.json'
})
Object.defineProperty(workflow, 'isTemporary', { get: () => true })
vi.mocked(workflowStore.getWorkflowByPath).mockReturnValue(null)
vi.mocked(workflowStore.renameWorkflow).mockResolvedValue()
vi.mocked(workflowStore.saveWorkflow).mockResolvedValue()
const result = await useWorkflowService().saveWorkflowAs(workflow, {
filename: 'my-workflow'
})
expect(result).toBe(true)
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/my-workflow.json'
)
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
it('should return false when no filename is provided', async () => {
const workflow = createModeTestWorkflow({
path: 'workflows/test.json'
})
vi.spyOn(workflow, 'promptSave').mockResolvedValue(null)
const result = await useWorkflowService().saveWorkflowAs(workflow)
expect(result).toBe(false)
expect(workflowStore.saveWorkflow).not.toHaveBeenCalled()
})
})
describe('afterLoadNewGraph', () => {
let workflowStore: ReturnType<typeof useWorkflowStore>
let existingWorkflow: LoadedComfyWorkflow
beforeEach(() => {
setActivePinia(createTestingPinia())
workflowStore = useWorkflowStore()
existingWorkflow = createModeTestWorkflow({
path: 'workflows/repeat.json'
})
vi.mocked(workflowStore.getWorkflowByPath).mockReturnValue(
existingWorkflow
)
vi.mocked(workflowStore.isActive).mockReturnValue(true)
vi.mocked(workflowStore.openWorkflow).mockResolvedValue(existingWorkflow)
})
it('should reuse the active workflow when loading the same path repeatedly', async () => {
const workflowId = 'repeat-workflow-id'
existingWorkflow.changeTracker.activeState.id = workflowId
await useWorkflowService().afterLoadNewGraph('repeat', {
id: workflowId,
nodes: [{ id: 1, type: 'TestNode', pos: [0, 0], size: [100, 100] }]
} as never)
expect(workflowStore.getWorkflowByPath).toHaveBeenCalledWith(
'workflows/repeat.json'
)
expect(workflowStore.openWorkflow).toHaveBeenCalledWith(existingWorkflow)
expect(existingWorkflow.changeTracker.reset).toHaveBeenCalled()
expect(existingWorkflow.changeTracker.restore).toHaveBeenCalled()
expect(workflowStore.createNewTemporary).not.toHaveBeenCalled()
})
it('should reuse active workflow for repeated same-path loads without ids', async () => {
await useWorkflowService().afterLoadNewGraph('repeat', {
nodes: [{ id: 1, type: 'TestNode', pos: [0, 0], size: [100, 100] }]
} as never)
expect(workflowStore.getWorkflowByPath).toHaveBeenCalledWith(
'workflows/repeat.json'
)
expect(workflowStore.openWorkflow).toHaveBeenCalledWith(existingWorkflow)
expect(existingWorkflow.changeTracker.reset).toHaveBeenCalled()
expect(existingWorkflow.changeTracker.restore).toHaveBeenCalled()
expect(workflowStore.createNewTemporary).not.toHaveBeenCalled()
})
it('should reuse active workflow when only one side has an id', async () => {
existingWorkflow.changeTracker.activeState.id = 'existing-id'
await useWorkflowService().afterLoadNewGraph('repeat', {
nodes: [{ id: 1, type: 'TestNode', pos: [0, 0], size: [100, 100] }]
} as never)
expect(workflowStore.openWorkflow).toHaveBeenCalledWith(existingWorkflow)
expect(existingWorkflow.changeTracker.reset).toHaveBeenCalled()
expect(existingWorkflow.changeTracker.restore).toHaveBeenCalled()
expect(workflowStore.createNewTemporary).not.toHaveBeenCalled()
})
it('should reuse active workflow when only workflowData has an id', async () => {
await useWorkflowService().afterLoadNewGraph('repeat', {
id: 'incoming-id',
nodes: [{ id: 1, type: 'TestNode', pos: [0, 0], size: [100, 100] }]
} as never)
expect(workflowStore.openWorkflow).toHaveBeenCalledWith(existingWorkflow)
expect(existingWorkflow.changeTracker.reset).toHaveBeenCalled()
expect(existingWorkflow.changeTracker.restore).toHaveBeenCalled()
expect(workflowStore.createNewTemporary).not.toHaveBeenCalled()
})
it('should create new temporary when ids differ', async () => {
existingWorkflow.changeTracker.activeState.id = 'existing-id'
const tempWorkflow = createModeTestWorkflow({
path: 'workflows/repeat (2).json'
})
vi.mocked(workflowStore.createNewTemporary).mockReturnValue(tempWorkflow)
vi.mocked(workflowStore.openWorkflow).mockResolvedValue(tempWorkflow)
await useWorkflowService().afterLoadNewGraph('repeat', {
id: 'different-id',
nodes: [{ id: 1, type: 'TestNode', pos: [0, 0], size: [100, 100] }]
} as never)
expect(workflowStore.createNewTemporary).toHaveBeenCalled()
})
})
describe('per-workflow mode switching', () => {
let appMode: ReturnType<typeof useAppMode>
let workflowStore: ReturnType<typeof useWorkflowStore>
let service: ReturnType<typeof useWorkflowService>
function mockOpenWorkflow() {
vi.spyOn(workflowStore, 'openWorkflow').mockImplementation(async (wf) => {
// Simulate load() setting changeTracker on first open
if (!wf.changeTracker) {
wf.changeTracker = createMockChangeTracker()
wf.content = '{}'
wf.originalContent = '{}'
}
const loaded = wf as LoadedComfyWorkflow
workflowStore.activeWorkflow = loaded
return loaded
})
}
beforeEach(() => {
appMode = useAppMode()
workflowStore = useWorkflowStore()
service = useWorkflowService()
})
describe('mode derivation from active workflow', () => {
it('reflects initialMode of the active workflow', () => {
const workflow = createModeTestWorkflow({ initialMode: 'app' })
workflowStore.activeWorkflow = workflow
expect(appMode.mode.value).toBe('app')
})
it('activeMode takes precedence over initialMode', () => {
const workflow = createModeTestWorkflow({
initialMode: 'app',
activeMode: 'graph'
})
workflowStore.activeWorkflow = workflow
expect(appMode.mode.value).toBe('graph')
})
it('defaults to graph when no active workflow', () => {
expect(appMode.mode.value).toBe('graph')
})
it('updates when activeWorkflow changes', () => {
const workflow1 = createModeTestWorkflow({
path: 'workflows/one.json',
initialMode: 'app'
})
const workflow2 = createModeTestWorkflow({
path: 'workflows/two.json',
activeMode: 'builder:inputs'
})
workflowStore.activeWorkflow = workflow1
expect(appMode.mode.value).toBe('app')
workflowStore.activeWorkflow = workflow2
expect(appMode.mode.value).toBe('builder:inputs')
})
})
describe('setMode writes to active workflow', () => {
it('writes activeMode without changing initialMode', () => {
const workflow = createModeTestWorkflow({ initialMode: 'graph' })
workflowStore.activeWorkflow = workflow
appMode.setMode('builder:arrange')
expect(workflow.activeMode).toBe('builder:arrange')
expect(workflow.initialMode).toBe('graph')
expect(appMode.mode.value).toBe('builder:arrange')
})
})
describe('afterLoadNewGraph initializes initialMode', () => {
beforeEach(() => {
mockOpenWorkflow()
})
it('sets initialMode from extra.linearMode on first load', async () => {
const workflow = createModeTestWorkflow({ loaded: false })
await service.afterLoadNewGraph(
workflow,
makeWorkflowData({ linearMode: true })
)
expect(workflow.initialMode).toBe('app')
})
it('leaves initialMode null when extra.linearMode is absent', async () => {
const workflow = createModeTestWorkflow({ loaded: false })
await service.afterLoadNewGraph(workflow, makeWorkflowData())
expect(workflow.initialMode).toBeNull()
})
it('sets initialMode to graph when extra.linearMode is false', async () => {
const workflow = createModeTestWorkflow({ loaded: false })
await service.afterLoadNewGraph(
workflow,
makeWorkflowData({ linearMode: false })
)
expect(workflow.initialMode).toBe('graph')
})
it('does not set initialMode on tab switch even if data has linearMode', async () => {
const workflow = createModeTestWorkflow({ loaded: false })
// First load — no linearMode in data
await service.afterLoadNewGraph(workflow, makeWorkflowData())
expect(workflow.initialMode).toBeNull()
// User switches to app mode at runtime
workflow.activeMode = 'app'
// Tab switch / reload — data now has linearMode (leaked from graph)
await service.afterLoadNewGraph(
workflow,
makeWorkflowData({ linearMode: true })
)
// initialMode should NOT have been updated — only builder save sets it
expect(workflow.initialMode).toBeNull()
})
it('preserves existing initialMode on tab switch', async () => {
const workflow = createModeTestWorkflow({
initialMode: 'app'
})
await service.afterLoadNewGraph(workflow, makeWorkflowData())
expect(workflow.initialMode).toBe('app')
})
it('sets initialMode to app for fresh string-based loads with linearMode', async () => {
vi.spyOn(workflowStore, 'createNewTemporary').mockReturnValue(
createModeTestWorkflow()
)
await service.afterLoadNewGraph(
'test.json',
makeWorkflowData({ linearMode: true })
)
expect(appMode.mode.value).toBe('app')
})
it('reads initialMode from file when draft lacks linearMode (restoration)', async () => {
const filePath = 'workflows/saved-app.json'
const fileInitialState = makeWorkflowData({ linearMode: true })
const mockTracker = createMockChangeTracker()
mockTracker.initialState = fileInitialState
// Persisted, not-loaded workflow in the store
const persistedWorkflow = new ComfyWorkflowClass({
path: filePath,
modified: Date.now(),
size: 100
})
vi.spyOn(workflowStore, 'getWorkflowByPath').mockReturnValue(
persistedWorkflow
)
vi.spyOn(workflowStore, 'openWorkflow').mockImplementation(
async (wf) => {
wf.changeTracker = mockTracker
wf.content = JSON.stringify(fileInitialState)
wf.originalContent = wf.content
workflowStore.activeWorkflow = wf as LoadedComfyWorkflow
return wf as LoadedComfyWorkflow
}
)
// Draft data has NO linearMode (simulates rootGraph serialization)
const draftData = makeWorkflowData()
await service.afterLoadNewGraph('saved-app.json', draftData)
// initialMode should come from the file, not the draft
expect(persistedWorkflow.initialMode).toBe('app')
})
})
describe('round-trip mode preservation', () => {
it('each workflow retains its own mode across tab switches', () => {
const workflow1 = createModeTestWorkflow({
path: 'workflows/one.json',
activeMode: 'builder:inputs'
})
const workflow2 = createModeTestWorkflow({
path: 'workflows/two.json',
initialMode: 'app'
})
workflowStore.activeWorkflow = workflow1
expect(appMode.mode.value).toBe('builder:inputs')
workflowStore.activeWorkflow = workflow2
expect(appMode.mode.value).toBe('app')
workflowStore.activeWorkflow = workflow1
expect(appMode.mode.value).toBe('builder:inputs')
})
})
})
describe('saveWorkflowAs', () => {
let workflowStore: ReturnType<typeof useWorkflowStore>
let service: ReturnType<typeof useWorkflowService>
beforeEach(() => {
workflowStore = useWorkflowStore()
service = useWorkflowService()
vi.spyOn(workflowStore, 'saveWorkflow').mockResolvedValue()
vi.spyOn(workflowStore, 'renameWorkflow').mockResolvedValue()
})
function createTemporaryWorkflow(
directory: string = 'workflows'
): LoadedComfyWorkflow {
const workflow = new ComfyWorkflowClass({
path: directory + '/temp.json',
modified: Date.now(),
size: 100
})
workflow.changeTracker = createMockChangeTracker()
workflow.content = '{}'
workflow.originalContent = '{}'
Object.defineProperty(workflow, 'isTemporary', { get: () => true })
return workflow as LoadedComfyWorkflow
}
it('appends .app.json extension when initialMode is app', async () => {
const workflow = createTemporaryWorkflow()
workflow.initialMode = 'app'
await service.saveWorkflowAs(workflow, { filename: 'my-workflow' })
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/my-workflow.app.json'
)
})
it('appends .json extension when initialMode is graph', async () => {
const workflow = createTemporaryWorkflow()
workflow.initialMode = 'graph'
await service.saveWorkflowAs(workflow, { filename: 'my-workflow' })
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/my-workflow.json'
)
})
it('appends .json extension when initialMode is not set', async () => {
const workflow = createTemporaryWorkflow()
await service.saveWorkflowAs(workflow, { filename: 'my-workflow' })
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/my-workflow.json'
)
})
})
describe('saveWorkflow', () => {
let workflowStore: ReturnType<typeof useWorkflowStore>
let toastStore: ReturnType<typeof useToastStore>
let service: ReturnType<typeof useWorkflowService>
beforeEach(() => {
workflowStore = useWorkflowStore()
toastStore = useToastStore()
service = useWorkflowService()
vi.spyOn(workflowStore, 'saveWorkflow').mockResolvedValue()
vi.spyOn(workflowStore, 'renameWorkflow').mockResolvedValue()
})
function createSaveableWorkflow(path: string): LoadedComfyWorkflow {
const workflow = new ComfyWorkflowClass({
path,
modified: Date.now(),
size: 100
})
workflow.changeTracker = createMockChangeTracker()
workflow.content = '{}'
workflow.originalContent = '{}'
return workflow as LoadedComfyWorkflow
}
it('renames .json to .app.json when initialMode is app', async () => {
const workflow = createSaveableWorkflow('workflows/test.json')
workflow.initialMode = 'app'
await service.saveWorkflow(workflow)
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/test.app.json'
)
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
it('renames .app.json to .json when initialMode is graph', async () => {
const workflow = createSaveableWorkflow('workflows/test.app.json')
workflow.initialMode = 'graph'
await service.saveWorkflow(workflow)
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/test.json'
)
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
it('does not rename when extension already matches', async () => {
const workflow = createSaveableWorkflow('workflows/test.app.json')
workflow.initialMode = 'app'
await service.saveWorkflow(workflow)
expect(workflowStore.renameWorkflow).not.toHaveBeenCalled()
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
it('shows toast only when rename occurs', async () => {
const addSpy = vi.spyOn(toastStore, 'add')
const workflow = createSaveableWorkflow('workflows/test.json')
workflow.initialMode = 'app'
await service.saveWorkflow(workflow)
expect(addSpy).toHaveBeenCalledWith(
expect.objectContaining({ severity: 'info' })
)
})
it('does not show toast when no rename occurs', async () => {
const addSpy = vi.spyOn(toastStore, 'add')
const workflow = createSaveableWorkflow('workflows/test.app.json')
workflow.initialMode = 'app'
await service.saveWorkflow(workflow)
expect(addSpy).not.toHaveBeenCalled()
})
it('does not rename when initialMode is not set', async () => {
const workflow = createSaveableWorkflow('workflows/test.json')
await service.saveWorkflow(workflow)
expect(workflowStore.renameWorkflow).not.toHaveBeenCalled()
})
it('prompts for overwrite when target path already exists', async () => {
const workflow = createSaveableWorkflow('workflows/test.json')
workflow.initialMode = 'app'
const existing = createSaveableWorkflow('workflows/test.app.json')
vi.spyOn(workflowStore, 'getWorkflowByPath').mockReturnValue(existing)
vi.spyOn(workflowStore, 'deleteWorkflow').mockResolvedValue()
mockConfirm.mockResolvedValue(true)
await service.saveWorkflow(workflow)
expect(mockConfirm).toHaveBeenCalled()
expect(workflowStore.renameWorkflow).toHaveBeenCalledWith(
workflow,
'workflows/test.app.json'
)
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
it('saves without renaming when user declines overwrite', async () => {
const workflow = createSaveableWorkflow('workflows/test.json')
workflow.initialMode = 'app'
const existing = createSaveableWorkflow('workflows/test.app.json')
vi.spyOn(workflowStore, 'getWorkflowByPath').mockReturnValue(existing)
mockConfirm.mockResolvedValue(false)
await service.saveWorkflow(workflow)
expect(mockConfirm).toHaveBeenCalled()
expect(workflowStore.renameWorkflow).not.toHaveBeenCalled()
expect(workflowStore.saveWorkflow).toHaveBeenCalledWith(workflow)
})
})
})

View File

@@ -22,7 +22,16 @@ import { useMissingNodesDialog } from '@/composables/useMissingNodesDialog'
import { useDialogService } from '@/services/dialogService'
import { useDomWidgetStore } from '@/stores/domWidgetStore'
import { useWorkspaceStore } from '@/stores/workspaceStore'
import { appendJsonExt } from '@/utils/formatUtil'
import {
appendJsonExt,
appendWorkflowJsonExt,
generateUUID
} from '@/utils/formatUtil'
function linearModeToAppMode(linearMode: unknown): AppMode | null {
if (typeof linearMode !== 'boolean') return null
return linearMode ? 'app' : 'graph'
}
export const useWorkflowService = () => {
const settingStore = useSettingStore()
@@ -316,8 +325,7 @@ export const useWorkflowService = () => {
toastStore.add({
severity: 'error',
summary: t('g.error'),
detail: t('toastMessages.failedToSaveDraft'),
life: 3000
detail: t('toastMessages.failedToSaveDraft')
})
}
}
@@ -361,10 +369,24 @@ export const useWorkflowService = () => {
const fullPath = ComfyWorkflow.basePath + appendJsonExt(path)
const existingWorkflow = workflowStore.getWorkflowByPath(fullPath)
// If the workflow exists and is NOT loaded yet (restoration case),
// use the existing workflow instead of creating a new one.
// If it IS loaded, this is a re-import case - create new with suffix.
if (existingWorkflow?.isPersisted && !existingWorkflow.isLoaded) {
// Reuse an existing workflow when this is a restoration case
// (persisted but currently unloaded) or an idempotent repeated load
// of the currently active same-path workflow.
//
// This prevents accidental duplicate tabs when startup/load flows
// invoke loadGraphData more than once for the same workflow name.
const isSameActiveWorkflowLoad =
!!existingWorkflow &&
workflowStore.isActive(existingWorkflow) &&
(existingWorkflow.activeState?.id === undefined ||
workflowData.id === undefined ||
existingWorkflow.activeState.id === workflowData.id)
if (
existingWorkflow &&
((existingWorkflow.isPersisted && !existingWorkflow.isLoaded) ||
isSameActiveWorkflowLoad)
) {
const loadedWorkflow =
await workflowStore.openWorkflow(existingWorkflow)
loadedWorkflow.changeTracker.reset(workflowData)
@@ -435,7 +457,10 @@ export const useWorkflowService = () => {
* Takes an existing workflow and duplicates it with a new name
*/
const duplicateWorkflow = async (workflow: ComfyWorkflow) => {
if (!workflow.isLoaded) await workflow.load()
const state = JSON.parse(JSON.stringify(workflow.activeState))
// Ensure duplicates are always treated as distinct workflows.
if (state) state.id = generateUUID()
const suffix = workflow.isPersisted ? ' (Copy)' : ''
// Remove the suffix `(2)` or similar
const filename = workflow.filename.replace(/\s*\(\d+\)$/, '') + suffix

View File

@@ -106,8 +106,13 @@ export class ComfyWorkflow extends UserFile {
await super.load({ force })
if (!force && this.isLoaded) return this as this & LoadedComfyWorkflow
if (!this.originalContent) {
throw new Error('[ASSERT] Workflow content should be loaded')
if (this.originalContent == null) {
throw new Error(
`[ASSERT] Workflow content should be loaded for '${this.path}'`
)
}
if (this.originalContent.trim().length === 0) {
throw new Error(`Workflow content is empty for '${this.path}'`)
}
const initialState = JSON.parse(this.originalContent)

View File

@@ -478,12 +478,15 @@ export const useWorkflowStore = defineStore('workflow', () => {
const wasBookmarked = bookmarkStore.isBookmarked(oldPath)
const draftStore = useWorkflowDraftStore()
const openIndex = detachWorkflow(workflow)
// Perform the actual rename operation first
try {
await workflow.rename(newPath)
} finally {
attachWorkflow(workflow, openIndex)
await workflow.rename(newPath)
// Synchronously swap old path for new path in lookup and open paths
// to avoid a tab flicker caused by an async gap between detach/attach.
delete workflowLookup.value[oldPath]
workflowLookup.value[workflow.path] = workflow
const openIndex = openWorkflowPaths.value.indexOf(oldPath)
if (openIndex !== -1) {
openWorkflowPaths.value.splice(openIndex, 1, workflow.path)
}
draftStore.moveDraft(oldPath, newPath, workflow.key)
@@ -524,13 +527,11 @@ export const useWorkflowStore = defineStore('workflow', () => {
const saveWorkflow = async (workflow: ComfyWorkflow) => {
isBusy.value = true
try {
// Detach the workflow and re-attach to force refresh the tree objects.
await workflow.save()
// Synchronously detach and re-attach to force refresh the tree objects
// without an async gap that would cause the tab to disappear.
const openIndex = detachWorkflow(workflow)
try {
await workflow.save()
} finally {
attachWorkflow(workflow, openIndex)
}
attachWorkflow(workflow, openIndex)
} finally {
isBusy.value = false
}

View File

@@ -110,8 +110,7 @@ export function useWorkflowPersistenceV2() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('toastMessages.failedToSaveDraft'),
life: 3000
detail: t('toastMessages.failedToSaveDraft')
})
return
}

View File

@@ -145,8 +145,7 @@ describe('useTemplateUrlLoader', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'Error',
detail: 'Template "invalid-template" not found',
life: 3000
detail: 'Template "invalid-template" not found'
})
})
@@ -239,8 +238,7 @@ describe('useTemplateUrlLoader', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'Error',
detail: 'Failed to load template',
life: 3000
detail: 'Failed to load template'
})
})

View File

@@ -117,8 +117,7 @@ export function useTemplateUrlLoader() {
summary: t('g.error'),
detail: t('templateWorkflows.error.templateNotFound', {
templateName: templateParam
}),
life: 3000
})
})
} else if (modeParam === 'linear') {
// Set linear mode after successful template load
@@ -132,8 +131,7 @@ export function useTemplateUrlLoader() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('g.errorLoadingTemplate'),
life: 3000
detail: t('g.errorLoadingTemplate')
})
} finally {
cleanupUrlParams()

View File

@@ -413,8 +413,7 @@ async function handleResubscribe() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: message,
life: 5000
detail: message
})
} finally {
isResubscribing.value = false

View File

@@ -137,8 +137,7 @@ async function handleSubscribeClick(payload: {
toast.add({
severity: 'error',
summary: 'Unable to subscribe',
detail: 'This plan is not available',
life: 5000
detail: 'This plan is not available'
})
return
}
@@ -148,8 +147,7 @@ async function handleSubscribeClick(payload: {
toast.add({
severity: 'error',
summary: 'Unable to subscribe',
detail: response?.reason || 'This plan is not available',
life: 5000
detail: response?.reason || 'This plan is not available'
})
return
}
@@ -164,8 +162,7 @@ async function handleSubscribeClick(payload: {
toast.add({
severity: 'error',
summary: 'Error',
detail: message,
life: 5000
detail: message
})
} finally {
isLoadingPreview.value = false
@@ -225,8 +222,7 @@ async function handleAddCreditCard() {
toast.add({
severity: 'error',
summary: 'Error',
detail: message,
life: 5000
detail: message
})
} finally {
isSubscribing.value = false
@@ -280,8 +276,7 @@ async function handleConfirmTransition() {
toast.add({
severity: 'error',
summary: 'Error',
detail: message,
life: 5000
detail: message
})
} finally {
isSubscribing.value = false
@@ -305,8 +300,7 @@ async function handleResubscribe() {
toast.add({
severity: 'error',
summary: 'Error',
detail: message,
life: 5000
detail: message
})
} finally {
isResubscribing.value = false

View File

@@ -273,8 +273,7 @@ async function handleBuy() {
toast.add({
severity: 'error',
summary: t('credits.topUp.purchaseError'),
detail: t('credits.topUp.unknownError'),
life: 5000
detail: t('credits.topUp.unknownError')
})
}
} catch (error) {
@@ -285,8 +284,7 @@ async function handleBuy() {
toast.add({
severity: 'error',
summary: t('credits.topUp.purchaseError'),
detail: t('credits.topUp.purchaseErrorDetail', { error: errorMessage }),
life: 5000
detail: t('credits.topUp.purchaseErrorDetail', { error: errorMessage })
})
} finally {
loading.value = false

View File

@@ -102,8 +102,7 @@ async function onCreate() {
toast.add({
severity: 'error',
summary: t('workspacePanel.toast.failedToCreateWorkspace'),
detail: error instanceof Error ? error.message : t('g.unknownError'),
life: 5000
detail: error instanceof Error ? error.message : t('g.unknownError')
})
} finally {
loading.value = false

View File

@@ -79,8 +79,7 @@ async function onDelete() {
toast.add({
severity: 'error',
summary: t('workspacePanel.toast.failedToDeleteWorkspace'),
detail: error instanceof Error ? error.message : t('g.unknownError'),
life: 5000
detail: error instanceof Error ? error.message : t('g.unknownError')
})
} finally {
loading.value = false

View File

@@ -94,8 +94,7 @@ async function onSave() {
toast.add({
severity: 'error',
summary: t('workspacePanel.toast.failedToUpdateWorkspace'),
detail: error instanceof Error ? error.message : t('g.unknownError'),
life: 5000
detail: error instanceof Error ? error.message : t('g.unknownError')
})
} finally {
loading.value = false

View File

@@ -138,8 +138,7 @@ async function onCreateLink() {
toast.add({
severity: 'error',
summary: t('workspacePanel.inviteMemberDialog.linkCopyFailed'),
detail: error instanceof Error ? error.message : undefined,
life: 3000
detail: error instanceof Error ? error.message : undefined
})
} finally {
loading.value = false
@@ -161,8 +160,7 @@ async function onCopyLink() {
} catch {
toast.add({
severity: 'error',
summary: t('workspacePanel.inviteMemberDialog.linkCopyFailed'),
life: 3000
summary: t('workspacePanel.inviteMemberDialog.linkCopyFailed')
})
}
}

View File

@@ -68,8 +68,7 @@ async function onLeave() {
toast.add({
severity: 'error',
summary: t('workspacePanel.toast.failedToLeaveWorkspace'),
detail: error instanceof Error ? error.message : t('g.unknownError'),
life: 5000
detail: error instanceof Error ? error.message : t('g.unknownError')
})
} finally {
loading.value = false

View File

@@ -73,8 +73,7 @@ async function onRemove() {
} catch {
toast.add({
severity: 'error',
summary: t('workspacePanel.removeMemberDialog.error'),
life: 3000
summary: t('workspacePanel.removeMemberDialog.error')
})
} finally {
loading.value = false

View File

@@ -69,8 +69,7 @@ async function onRevoke() {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: error instanceof Error ? error.message : undefined,
life: 3000
detail: error instanceof Error ? error.message : undefined
})
} finally {
loading.value = false

View File

@@ -543,8 +543,7 @@ async function handleCopyInviteLink(invite: PendingInvite) {
} catch {
toast.add({
severity: 'error',
summary: t('g.error'),
life: 3000
summary: t('g.error')
})
}
}

View File

@@ -151,8 +151,7 @@ describe('useInviteUrlLoader', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'Failed to Accept Invite',
detail: 'Invalid invite',
life: 5000
detail: 'Invalid invite'
})
})
@@ -211,8 +210,7 @@ describe('useInviteUrlLoader', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'Failed to Accept Invite',
detail: 'Invalid token',
life: 5000
detail: 'Invalid token'
})
})

View File

@@ -97,8 +97,7 @@ export function useInviteUrlLoader() {
toast.add({
severity: 'error',
summary: t('workspace.inviteFailed'),
detail: error instanceof Error ? error.message : t('g.unknownError'),
life: 5000
detail: error instanceof Error ? error.message : t('g.unknownError')
})
} finally {
cleanupUrlParams()

View File

@@ -219,8 +219,7 @@ describe('billingOperationStore', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'billingOperation.subscriptionFailed',
detail: errorMessage,
life: 5000
detail: errorMessage
})
})
@@ -239,8 +238,7 @@ describe('billingOperationStore', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'billingOperation.topupFailed',
detail: undefined,
life: 5000
detail: undefined
})
})
})
@@ -267,8 +265,7 @@ describe('billingOperationStore', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'billingOperation.subscriptionTimeout',
life: 5000
summary: 'billingOperation.subscriptionTimeout'
})
})
@@ -287,8 +284,7 @@ describe('billingOperationStore', () => {
expect(mockToastAdd).toHaveBeenCalledWith({
severity: 'error',
summary: 'billingOperation.topupTimeout',
life: 5000
summary: 'billingOperation.topupTimeout'
})
})
})

View File

@@ -173,8 +173,7 @@ export const useBillingOperationStore = defineStore('billingOperation', () => {
useToastStore().add({
severity: 'error',
summary: defaultMessage,
detail: errorMessage ?? undefined,
life: 5000
detail: errorMessage ?? undefined
})
}
@@ -192,8 +191,7 @@ export const useBillingOperationStore = defineStore('billingOperation', () => {
useToastStore().add({
severity: 'error',
summary: message,
life: 5000
summary: message
})
}

View File

@@ -50,8 +50,12 @@ export function useLayoutSync() {
liteNode.size[0] !== layout.size.width ||
liteNode.size[1] !== layout.size.height
) {
// Use setSize() to trigger onResize callback
liteNode.setSize([layout.size.width, layout.size.height])
// Update internal size directly (like position above) to avoid
// the size setter writing back to layoutStore with Canvas source,
// which would create a feedback loop through handleLayoutChange.
liteNode.size[0] = layout.size.width
liteNode.size[1] = layout.size.height
liteNode.onResize?.(liteNode.size)
}
}

View File

@@ -22,6 +22,7 @@ import { api } from '@/scripts/api'
import { app } from '@/scripts/app'
import { useCommandStore } from '@/stores/commandStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useQueueSettingsStore } from '@/stores/queueStore'
import type { SimplifiedWidget } from '@/types/simplifiedWidget'
import { cn } from '@/utils/tailwindUtil'
@@ -29,6 +30,7 @@ import { cn } from '@/utils/tailwindUtil'
const { t } = useI18n()
const commandStore = useCommandStore()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const { batchCount } = storeToRefs(useQueueSettingsStore())
const settingStore = useSettingStore()
const { isActiveSubscription } = useBillingContext()
@@ -79,7 +81,7 @@ function nodeToNodeData(node: LGraphNode) {
return {
...nodeData,
//note lastNodeErrors uses exeuctionid, node.id is execution for root
hasErrors: !!executionStore.lastNodeErrors?.[node.id],
hasErrors: !!executionErrorStore.lastNodeErrors?.[node.id],
dropIndicator,
onDragDrop: node.onDragDrop,

View File

@@ -0,0 +1,106 @@
import { createTestingPinia } from '@pinia/testing'
import { mount } from '@vue/test-utils'
import { afterEach, describe, expect, it, vi } from 'vitest'
import { nextTick } from 'vue'
import { createI18n } from 'vue-i18n'
import type { ComponentProps } from 'vue-component-type-helpers'
import VideoPreview from '@/renderer/extensions/vueNodes/VideoPreview.vue'
vi.mock('@/base/common/downloadUtil', () => ({
downloadFile: vi.fn()
}))
const i18n = createI18n({
legacy: false,
locale: 'en',
messages: {
en: {
g: {
downloadVideo: 'Download video',
removeVideo: 'Remove video',
viewVideoOfTotal: 'View video {index} of {total}',
videoPreview:
'Video preview - Use arrow keys to navigate between videos',
errorLoadingVideo: 'Error loading video',
failedToDownloadVideo: 'Failed to download video',
calculatingDimensions: 'Calculating dimensions',
videoFailedToLoad: 'Video failed to load',
loading: 'Loading'
}
}
}
})
describe('VideoPreview', () => {
const defaultProps: ComponentProps<typeof VideoPreview> = {
imageUrls: [
'/api/view?filename=test1.mp4&type=output',
'/api/view?filename=test2.mp4&type=output'
]
}
afterEach(() => {
vi.clearAllMocks()
})
function mountVideoPreview(
props: Partial<ComponentProps<typeof VideoPreview>> = {}
) {
return mount(VideoPreview, {
props: { ...defaultProps, ...props } as ComponentProps<
typeof VideoPreview
>,
global: {
plugins: [createTestingPinia({ createSpy: vi.fn }), i18n],
stubs: {
Skeleton: true
}
}
})
}
describe('batch cycling with identical URLs', () => {
it('should not enter persistent loading state when cycling through identical videos', async () => {
const sameUrl = '/api/view?filename=test.mp4&type=output'
const wrapper = mountVideoPreview({
imageUrls: [sameUrl, sameUrl, sameUrl]
})
// Simulate initial video load
await wrapper.find('video').trigger('loadeddata')
await nextTick()
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
// Click second navigation dot to cycle to identical URL
const dots = wrapper.findAll('[aria-label^="View video"]')
await dots[1].trigger('click')
await nextTick()
// Should NOT be in loading state since URL didn't change
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
})
it('should show loader when cycling to a different URL', async () => {
const wrapper = mountVideoPreview({
imageUrls: [
'/api/view?filename=a.mp4&type=output',
'/api/view?filename=b.mp4&type=output'
]
})
// Simulate initial video load
await wrapper.find('video').trigger('loadeddata')
await nextTick()
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
// Click second dot — different URL
const dots = wrapper.findAll('[aria-label^="View video"]')
await dots[1].trigger('click')
await nextTick()
// Should be in loading state since URL changed
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(true)
})
})
})

View File

@@ -205,7 +205,6 @@ const handleDownload = () => {
severity: 'error',
summary: 'Error',
detail: t('g.failedToDownloadVideo'),
life: 3000,
group: 'video-preview'
})
}
@@ -217,11 +216,15 @@ const handleRemove = () => {
}
const setCurrentIndex = (index: number) => {
if (currentIndex.value === index) return
if (index >= 0 && index < props.imageUrls.length) {
const urlChanged = props.imageUrls[index] !== currentVideoUrl.value
currentIndex.value = index
actualDimensions.value = null
showLoader.value = true
videoError.value = false
if (urlChanged) {
actualDimensions.value = null
showLoader.value = true
}
}
}
@@ -243,12 +246,13 @@ const handleFocusOut = (event: FocusEvent) => {
}
}
const getNavigationDotClass = (index: number) => {
return [
const getNavigationDotClass = (index: number) =>
cn(
'w-2 h-2 rounded-full transition-all duration-200 border-0 cursor-pointer',
index === currentIndex.value ? 'bg-white' : 'bg-white/50 hover:bg-white/80'
]
}
index === currentIndex.value
? 'bg-base-foreground'
: 'bg-base-foreground/50 hover:bg-base-foreground/80'
)
const handleKeyDown = (event: KeyboardEvent) => {
if (props.imageUrls.length <= 1) return

View File

@@ -311,6 +311,37 @@ describe('ImagePreview', () => {
expect(imgElement.attributes('alt')).toBe('Node output 2')
})
describe('batch cycling with identical URLs', () => {
it('should not enter persistent loading state when cycling through identical images', async () => {
vi.useFakeTimers()
try {
const sameUrl = '/api/view?filename=test.png&type=output'
const wrapper = mountImagePreview({
imageUrls: [sameUrl, sameUrl, sameUrl]
})
// Simulate initial image load
await wrapper.find('img').trigger('load')
await nextTick()
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
// Click second navigation dot to cycle
const dots = wrapper.findAll('.w-2.h-2.rounded-full')
await dots[1].trigger('click')
await nextTick()
// Advance past the delayed loader timeout
await vi.advanceTimersByTimeAsync(300)
await nextTick()
// Should NOT be in loading state since URL didn't change
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
} finally {
vi.useRealTimers()
}
})
})
describe('URL change detection', () => {
it('should NOT reset loading state when imageUrls prop is reassigned with identical URLs', async () => {
vi.useFakeTimers()
@@ -343,30 +374,33 @@ describe('ImagePreview', () => {
})
it('should reset loading state when imageUrls prop changes to different URLs', async () => {
const urls = ['/api/view?filename=test.png&type=output']
const wrapper = mountImagePreview({ imageUrls: urls })
vi.useFakeTimers()
try {
const urls = ['/api/view?filename=test.png&type=output']
const wrapper = mountImagePreview({ imageUrls: urls })
// Simulate image load completing
const img = wrapper.find('img')
await img.trigger('load')
await nextTick()
// Simulate image load completing
const img = wrapper.find('img')
await img.trigger('load')
await nextTick()
// Verify loader is hidden
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
// Verify loader is hidden
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(false)
// Change to different URL
await wrapper.setProps({
imageUrls: ['/api/view?filename=different.png&type=output']
})
await nextTick()
// Change to different URL
await wrapper.setProps({
imageUrls: ['/api/view?filename=different.png&type=output']
})
await nextTick()
// After 250ms timeout, loading state should be reset (aria-busy="true")
// We can check the internal state via the Skeleton appearing
// or wait for the timeout
await new Promise((resolve) => setTimeout(resolve, 300))
await nextTick()
// Advance past the 250ms delayed loader timeout
await vi.advanceTimersByTimeAsync(300)
await nextTick()
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(true)
expect(wrapper.find('[aria-busy="true"]').exists()).toBe(true)
} finally {
vi.useRealTimers()
}
})
it('should handle empty to non-empty URL transitions correctly', async () => {

View File

@@ -235,7 +235,6 @@ const handleDownload = () => {
severity: 'error',
summary: 'Error',
detail: t('g.failedToDownloadImage'),
life: 3000,
group: 'image-preview'
})
}
@@ -249,9 +248,10 @@ const handleRemove = () => {
const setCurrentIndex = (index: number) => {
if (currentIndex.value === index) return
if (index >= 0 && index < props.imageUrls.length) {
const urlChanged = props.imageUrls[index] !== currentImageUrl.value
currentIndex.value = index
startDelayedLoader()
imageError.value = false
if (urlChanged) startDelayedLoader()
}
}

View File

@@ -122,46 +122,85 @@
<NodeBadges v-bind="badges" :pricing="undefined" class="mt-auto" />
</div>
</template>
<Button
variant="textonly"
<div
v-if="
(hasAnyError && showErrorsTabEnabled) ||
lgraphNode?.isSubgraphNode() ||
showAdvancedState ||
showAdvancedInputsButton
"
:class="
cn(
'w-full h-7 rounded-b-2xl py-2 -z-1 text-xs rounded-t-none',
hasAnyError && 'hover:bg-destructive-background-hover',
!isCollapsed && '-mt-5 pt-7 h-12'
'flex w-full h-7 rounded-b-2xl -z-1 text-xs rounded-t-none overflow-hidden divide-x divide-component-node-border',
!isCollapsed && '-mt-5 h-12'
)
"
as-child
>
<button
v-if="hasAnyError && showErrorsTabEnabled"
@click.stop="useRightSidePanelStore().openPanel('errors')"
>
<span>{{ t('g.error') }}</span>
<i class="icon-[lucide--info] size-4" />
</button>
<button
v-else-if="lgraphNode?.isSubgraphNode()"
<Button
v-if="lgraphNode?.isSubgraphNode()"
variant="textonly"
:class="
cn(
'flex-1 rounded-none h-full',
hasAnyError &&
showErrorsTabEnabled &&
!nodeData.color &&
'bg-node-component-header-surface',
isCollapsed ? 'py-2' : 'pt-7 pb-2'
)
"
data-testid="subgraph-enter-button"
@click.stop="handleEnterSubgraph"
>
<span>{{ t('g.enterSubgraph') }}</span>
<i class="icon-[comfy--workflow] size-4" />
</button>
<button
v-else-if="showAdvancedState || showAdvancedInputsButton"
<span class="truncate">{{
hasAnyError && showErrorsTabEnabled
? t('g.enter')
: t('g.enterSubgraph')
}}</span>
<i class="icon-[comfy--workflow] size-4 shrink-0" />
</Button>
<Button
v-if="hasAnyError && showErrorsTabEnabled"
variant="textonly"
:class="
cn(
'flex-1 rounded-none h-full bg-error hover:bg-destructive-background-hover',
isCollapsed ? 'py-2' : 'pt-7 pb-2'
)
"
@click.stop="useRightSidePanelStore().openPanel('errors')"
>
<span class="truncate">{{ t('g.error') }}</span>
<i class="icon-[lucide--info] size-4 shrink-0" />
</Button>
<!-- Advanced inputs (non-subgraph nodes only) -->
<Button
v-if="
!lgraphNode?.isSubgraphNode() &&
(showAdvancedState || showAdvancedInputsButton)
"
variant="textonly"
:class="
cn('flex-1 rounded-none h-full', isCollapsed ? 'py-2' : 'pt-7 pb-2')
"
@click.stop="showAdvancedState = !showAdvancedState"
>
<template v-if="showAdvancedState">
<span>{{ t('rightSidePanel.hideAdvancedInputsButton') }}</span>
<i class="icon-[lucide--chevron-up] size-4" />
<span class="truncate">{{
t('rightSidePanel.hideAdvancedInputsButton')
}}</span>
<i class="icon-[lucide--chevron-up] size-4 shrink-0" />
</template>
<template v-else>
<span>{{ t('rightSidePanel.showAdvancedInputsButton') }} </span>
<i class="icon-[lucide--settings-2] size-4" />
<span class="truncate">{{
t('rightSidePanel.showAdvancedInputsButton')
}}</span>
<i class="icon-[lucide--settings-2] size-4 shrink-0" />
</template>
</button>
</Button>
</Button>
</div>
<template v-if="!isCollapsed && nodeData.resizable !== false">
<div
v-for="handle in RESIZE_HANDLES"
@@ -246,7 +285,7 @@ import { useNodePreviewState } from '@/renderer/extensions/vueNodes/preview/useN
import { nonWidgetedInputs } from '@/renderer/extensions/vueNodes/utils/nodeDataUtils'
import { applyLightThemeColor } from '@/renderer/extensions/vueNodes/utils/nodeStyleUtils'
import { app } from '@/scripts/app'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useNodeOutputStore } from '@/stores/imagePreviewStore'
import { useRightSidePanelStore } from '@/stores/workspace/rightSidePanelStore'
import { isTransparent } from '@/utils/colorUtil'
@@ -293,9 +332,9 @@ const isSelected = computed(() => {
const nodeLocatorId = computed(() => getLocatorIdFromNodeData(nodeData))
const { executing, progress } = useNodeExecutionState(nodeLocatorId)
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
const hasExecutionError = computed(
() => executionStore.lastExecutionErrorNodeId === nodeData.id
() => executionErrorStore.lastExecutionErrorNodeId === nodeData.id
)
const hasAnyError = computed((): boolean => {
@@ -303,7 +342,9 @@ const hasAnyError = computed((): boolean => {
hasExecutionError.value ||
nodeData.hasErrors ||
error ||
(executionStore.lastNodeErrors?.[nodeData.id]?.errors.length ?? 0) > 0
executionErrorStore.getNodeErrors(nodeLocatorId.value) ||
(lgraphNode.value &&
executionErrorStore.isContainerWithInternalError(lgraphNode.value))
)
})

View File

@@ -101,7 +101,7 @@ import {
stripGraphPrefix,
useWidgetValueStore
} from '@/stores/widgetValueStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import type { SimplifiedWidget, WidgetValue } from '@/types/simplifiedWidget'
import { cn } from '@/utils/tailwindUtil'
@@ -116,7 +116,7 @@ const { nodeData } = defineProps<NodeWidgetsProps>()
const { shouldHandleNodePointerEvents, forwardEventToCanvas } =
useCanvasInteractions()
const { bringNodeToFront } = useNodeZIndex()
const executionStore = useExecutionStore()
const executionErrorStore = useExecutionErrorStore()
function handleWidgetPointerEvent(event: PointerEvent) {
if (shouldHandleNodePointerEvents.value) return
@@ -170,7 +170,7 @@ interface ProcessedWidget {
const processedWidgets = computed((): ProcessedWidget[] => {
if (!nodeData?.widgets) return []
const nodeErrors = executionStore.lastNodeErrors?.[nodeData.id ?? '']
const nodeErrors = executionErrorStore.lastNodeErrors?.[nodeData.id ?? '']
const nodeId = nodeData.id
const { widgets } = nodeData

View File

@@ -213,8 +213,7 @@ const handleDownload = () => {
toast.add({
severity: 'error',
summary: t('g.error'),
detail: t('g.failedToDownloadFile'),
life: 3000
detail: t('g.failedToDownloadFile')
})
}
}

View File

@@ -47,6 +47,7 @@ export function useTextPreviewWidget(
typeof value === 'string' ? value : String(value)
},
getMinHeight: () => options.minHeight ?? 42 + PADDING,
serialize: false,
read_only: true
},
type: inputSpec.type

View File

@@ -1180,9 +1180,16 @@ export class ComfyApi extends EventTarget {
async getGlobalSubgraphData(id: string): Promise<string> {
const resp = await api.fetchApi('/global_subgraphs/' + id)
if (resp.status !== 200) return ''
if (resp.status !== 200) {
throw new Error(
`Failed to fetch global subgraph '${id}': ${resp.status} ${resp.statusText}`
)
}
const subgraph: GlobalSubgraphData = await resp.json()
return subgraph?.data ?? ''
if (!subgraph?.data) {
throw new Error(`Global subgraph '${id}' returned empty data`)
}
return subgraph.data as string
}
async getGlobalSubgraphs(): Promise<Record<string, GlobalSubgraphData>> {
const resp = await api.fetchApi('/global_subgraphs')
@@ -1263,15 +1270,13 @@ export class ComfyApi extends EventTarget {
useToastStore().add({
severity: 'error',
summary:
'Unloading of models failed. Installed ComfyUI may be an outdated version.',
life: 5000
'Unloading of models failed. Installed ComfyUI may be an outdated version.'
})
}
} catch (error) {
useToastStore().add({
severity: 'error',
summary: 'An error occurred while trying to unload models.',
life: 5000
summary: 'An error occurred while trying to unload models.'
})
}
}

View File

@@ -9,6 +9,7 @@ import { layoutStore } from '@/renderer/core/layout/store/layoutStore'
import { flushScheduledSlotLayoutSync } from '@/renderer/extensions/vueNodes/composables/useSlotElementTracking'
import { registerProxyWidgets } from '@/core/graph/subgraph/proxyWidget'
import { st, t } from '@/i18n'
import { ChangeTracker } from '@/scripts/changeTracker'
import type { IContextMenuValue } from '@/lib/litegraph/src/interfaces'
import {
LGraph,
@@ -60,6 +61,7 @@ import { useApiKeyAuthStore } from '@/stores/apiKeyAuthStore'
import { useCommandStore } from '@/stores/commandStore'
import { useDomWidgetStore } from '@/stores/domWidgetStore'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { useExtensionStore } from '@/stores/extensionStore'
import { useFirebaseAuthStore } from '@/stores/firebaseAuthStore'
import { useNodeOutputStore } from '@/stores/imagePreviewStore'
@@ -218,18 +220,18 @@ export class ComfyApp {
/**
* The node errors from the previous execution.
* @deprecated Use useExecutionStore().lastNodeErrors instead
* @deprecated Use app.extensionManager.lastNodeErrors instead
*/
get lastNodeErrors(): Record<NodeId, NodeError> | null {
return useExecutionStore().lastNodeErrors
return useExecutionErrorStore().lastNodeErrors
}
/**
* The error from the previous execution.
* @deprecated Use useExecutionStore().lastExecutionError instead
* @deprecated Use app.extensionManager.lastExecutionError instead
*/
get lastExecutionError(): ExecutionErrorWsMessage | null {
return useExecutionStore().lastExecutionError
return useExecutionErrorStore().lastExecutionError
}
/**
@@ -713,7 +715,7 @@ export class ComfyApp {
})
}
} else if (useSettingStore().get('Comfy.RightSidePanel.ShowErrorsTab')) {
useExecutionStore().showErrorOverlay()
useExecutionErrorStore().showErrorOverlay()
} else {
useDialogService().showExecutionErrorDialog(detail)
}
@@ -1273,47 +1275,71 @@ export class ComfyApp {
useLitegraphService().fitView()
}
}
} catch (error) {
useDialogService().showErrorDialog(error, {
title: t('errorDialog.loadWorkflowTitle'),
reportType: 'loadWorkflowError'
})
console.error(error)
return
}
forEachNode(this.rootGraph, (node) => {
const size = node.computeSize()
size[0] = Math.max(node.size[0], size[0])
size[1] = Math.max(node.size[1], size[1])
node.setSize(size)
if (node.widgets) {
// If you break something in the backend and want to patch workflows in the frontend
// This is the place to do this
for (let widget of node.widgets) {
if (node.type == 'KSampler' || node.type == 'KSamplerAdvanced') {
if (widget.name == 'sampler_name') {
if (
typeof widget.value === 'string' &&
widget.value.startsWith('sample_')
) {
widget.value = widget.value.slice(7)
ChangeTracker.isLoadingGraph = true
try {
try {
// @ts-expect-error Discrepancies between zod and litegraph - in progress
this.rootGraph.configure(graphData)
// Save original renderer version before scaling (it gets modified during scaling)
const originalMainGraphRenderer =
this.rootGraph.extra.workflowRendererVersion
// Scale main graph
ensureCorrectLayoutScale(originalMainGraphRenderer)
// Scale all subgraphs that were loaded with the workflow
// Use original main graph renderer as fallback (not the modified one)
for (const subgraph of this.rootGraph.subgraphs.values()) {
ensureCorrectLayoutScale(
subgraph.extra.workflowRendererVersion || originalMainGraphRenderer,
subgraph
)
}
if (canvasVisible) fitView()
} catch (error) {
useDialogService().showErrorDialog(error, {
title: t('errorDialog.loadWorkflowTitle'),
reportType: 'loadWorkflowError'
})
console.error(error)
return
}
forEachNode(this.rootGraph, (node) => {
const size = node.computeSize()
size[0] = Math.max(node.size[0], size[0])
size[1] = Math.max(node.size[1], size[1])
node.setSize(size)
if (node.widgets) {
// If you break something in the backend and want to patch workflows in the frontend
// This is the place to do this
for (let widget of node.widgets) {
if (node.type == 'KSampler' || node.type == 'KSamplerAdvanced') {
if (widget.name == 'sampler_name') {
if (
typeof widget.value === 'string' &&
widget.value.startsWith('sample_')
) {
widget.value = widget.value.slice(7)
}
}
}
}
if (
node.type == 'KSampler' ||
node.type == 'KSamplerAdvanced' ||
node.type == 'PrimitiveNode'
) {
if (widget.name == 'control_after_generate') {
if (widget.value === true) {
widget.value = 'randomize'
} else if (widget.value === false) {
widget.value = 'fixed'
if (
node.type == 'KSampler' ||
node.type == 'KSamplerAdvanced' ||
node.type == 'PrimitiveNode'
) {
if (widget.name == 'control_after_generate') {
if (widget.value === true) {
widget.value = 'randomize'
} else if (widget.value === false) {
widget.value = 'fixed'
}
}
}
}
if (reset_invalid_values) {
if (widget.type == 'combo') {
const values = widget.options.values as
| (string | number | boolean)[]
@@ -1321,60 +1347,73 @@ export class ComfyApp {
if (
values &&
values.length > 0 &&
!values.includes(widget.value as string | number | boolean)
(widget.value == null ||
(reset_invalid_values &&
!values.includes(
widget.value as string | number | boolean
)))
) {
widget.value = values[0]
}
}
}
}
useExtensionService().invokeExtensions('loadedGraphNode', node)
})
await useExtensionService().invokeExtensionsAsync(
'afterConfigureGraph',
missingNodeTypes
)
const telemetryPayload = {
missing_node_count: missingNodeTypes.length,
missing_node_types: missingNodeTypes.map((node) =>
typeof node === 'string' ? node : node.type
),
open_source: openSource ?? 'unknown'
}
useTelemetry()?.trackWorkflowOpened(telemetryPayload)
useTelemetry()?.trackWorkflowImported(telemetryPayload)
await useWorkflowService().afterLoadNewGraph(
workflow,
this.rootGraph.serialize() as unknown as ComfyWorkflowJSON
)
// If the canvas was not visible and we're a fresh load, resize the canvas and fit the view
// This fixes switching from app mode to a new graph mode workflow (e.g. load template)
if (!canvasVisible && (!workflow || typeof workflow === 'string')) {
this.canvas.resize()
requestAnimationFrame(() => fitView())
}
useExtensionService().invokeExtensions('loadedGraphNode', node)
})
// Store pending warnings on the workflow for deferred display
const activeWf = useWorkspaceStore().workflow.activeWorkflow
if (activeWf) {
const warnings: PendingWarnings = {}
if (missingNodeTypes.length && showMissingNodesDialog) {
warnings.missingNodeTypes = missingNodeTypes
}
if (missingModels.length && showMissingModelsDialog) {
const paths = await api.getFolderPaths()
warnings.missingModels = { missingModels: missingModels, paths }
}
if (warnings.missingNodeTypes || warnings.missingModels) {
activeWf.pendingWarnings = warnings
}
}
await useExtensionService().invokeExtensionsAsync(
'afterConfigureGraph',
missingNodeTypes
)
if (!deferWarnings) {
useWorkflowService().showPendingWarnings()
}
const telemetryPayload = {
missing_node_count: missingNodeTypes.length,
missing_node_types: missingNodeTypes.map((node) =>
typeof node === 'string' ? node : node.type
),
open_source: openSource ?? 'unknown'
requestAnimationFrame(() => {
this.canvas.setDirty(true, true)
})
} finally {
ChangeTracker.isLoadingGraph = false
}
useTelemetry()?.trackWorkflowOpened(telemetryPayload)
useTelemetry()?.trackWorkflowImported(telemetryPayload)
await useWorkflowService().afterLoadNewGraph(
workflow,
this.rootGraph.serialize() as unknown as ComfyWorkflowJSON
)
// Store pending warnings on the workflow for deferred display
const activeWf = useWorkspaceStore().workflow.activeWorkflow
if (activeWf) {
const warnings: PendingWarnings = {}
if (missingNodeTypes.length && showMissingNodesDialog) {
warnings.missingNodeTypes = missingNodeTypes
}
if (missingModels.length && showMissingModelsDialog) {
const paths = await api.getFolderPaths()
warnings.missingModels = { missingModels: missingModels, paths }
}
if (warnings.missingNodeTypes || warnings.missingModels) {
activeWf.pendingWarnings = warnings
}
}
if (!deferWarnings) {
useWorkflowService().showPendingWarnings()
}
requestAnimationFrame(() => {
this.canvas.setDirty(true, true)
})
}
async graphToPrompt(graph = this.rootGraph) {
@@ -1402,9 +1441,8 @@ export class ComfyApp {
this.processingQueue = true
const executionStore = useExecutionStore()
executionStore.lastNodeErrors = null
executionStore.lastExecutionError = null
executionStore.lastPromptError = null
const executionErrorStore = useExecutionErrorStore()
executionErrorStore.clearAllErrors()
// Get auth token for backend nodes - uses workspace token if enabled, otherwise Firebase token
const comfyOrgAuthToken = await useFirebaseAuthStore().getAuthToken()
@@ -1440,8 +1478,8 @@ export class ComfyApp {
})
delete api.authToken
delete api.apiKey
executionStore.lastNodeErrors = res.node_errors ?? null
if (executionStore.lastNodeErrors?.length) {
executionErrorStore.lastNodeErrors = res.node_errors ?? null
if (executionErrorStore.lastNodeErrors?.length) {
this.canvas.draw(true, true)
} else {
try {
@@ -1477,7 +1515,8 @@ export class ComfyApp {
console.error(error)
if (error instanceof PromptExecutionError) {
executionStore.lastNodeErrors = error.response.node_errors ?? null
executionErrorStore.lastNodeErrors =
error.response.node_errors ?? null
// Store prompt-level error separately only when no node-specific errors exist,
// because node errors already carry the full context. Prompt-level errors
@@ -1489,13 +1528,13 @@ export class ComfyApp {
if (!hasNodeErrors) {
const respError = error.response.error
if (respError && typeof respError === 'object') {
executionStore.lastPromptError = {
executionErrorStore.lastPromptError = {
type: respError.type,
message: respError.message,
details: respError.details ?? ''
}
} else if (typeof respError === 'string') {
executionStore.lastPromptError = {
executionErrorStore.lastPromptError = {
type: 'error',
message: respError,
details: ''
@@ -1504,7 +1543,7 @@ export class ComfyApp {
}
if (useSettingStore().get('Comfy.RightSidePanel.ShowErrorsTab')) {
executionStore.showErrorOverlay()
executionErrorStore.showErrorOverlay()
}
this.canvas.draw(true, true)
}
@@ -1533,7 +1572,7 @@ export class ComfyApp {
} finally {
this.processingQueue = false
}
return !executionStore.lastNodeErrors
return !executionErrorStore.lastNodeErrors
}
showErrorOnFileLoad(file: File) {
@@ -1880,10 +1919,8 @@ export class ComfyApp {
clean() {
const nodeOutputStore = useNodeOutputStore()
nodeOutputStore.resetAllOutputsAndPreviews()
const executionStore = useExecutionStore()
executionStore.lastNodeErrors = null
executionStore.lastExecutionError = null
executionStore.lastPromptError = null
const executionErrorStore = useExecutionErrorStore()
executionErrorStore.clearAllErrors()
useDomWidgetStore().clear()

View File

@@ -28,6 +28,14 @@ logger.setLevel('info')
export class ChangeTracker {
static MAX_HISTORY = 50
/**
* Guard flag to prevent checkState from running during loadGraphData.
* Between rootGraph.configure() and afterLoadNewGraph(), the rootGraph
* contains the NEW workflow's data while activeWorkflow still points to
* the OLD workflow. Any checkState call in that window would serialize
* the wrong graph into the old workflow's activeState, corrupting it.
*/
static isLoadingGraph = false
/**
* The active state of the workflow.
*/
@@ -77,6 +85,7 @@ export class ChangeTracker {
scale: app.canvas.ds.scale,
offset: [app.canvas.ds.offset[0], app.canvas.ds.offset[1]]
}
this.nodeOutputs = clone(app.nodeOutputs)
const navigation = useSubgraphNavigationStore().exportState()
// Always store the navigation state, even if empty (root level)
this.subgraphState = { navigation }
@@ -130,7 +139,7 @@ export class ChangeTracker {
}
checkState() {
if (!app.graph || this.changeCount) return
if (!app.graph || this.changeCount || ChangeTracker.isLoadingGraph) return
const currentState = clone(app.rootGraph.serialize()) as ComfyWorkflowJSON
if (!this.activeState) {
this.activeState = currentState

View File

@@ -0,0 +1,306 @@
import { defineStore } from 'pinia'
import { computed, ref, watch } from 'vue'
import { useCanvasStore } from '@/renderer/core/canvas/canvasStore'
import { useWorkflowStore } from '@/platform/workflow/management/stores/workflowStore'
import { app } from '@/scripts/app'
import type {
ExecutionErrorWsMessage,
NodeError,
PromptError
} from '@/schemas/apiSchema'
import type { NodeId } from '@/platform/workflow/validation/schemas/workflowSchema'
import type { LGraphNode } from '@/lib/litegraph/src/litegraph'
import type { NodeExecutionId, NodeLocatorId } from '@/types/nodeIdentification'
import {
executionIdToNodeLocatorId,
forEachNode,
getNodeByExecutionId,
getExecutionIdByNode
} from '@/utils/graphTraversalUtil'
/**
* Store dedicated to execution error state management.
*
* Extracted from executionStore to separate error-related concerns
* (state, computed properties, graph flag propagation, overlay UI)
* from execution flow management (progress, queuing, events).
*/
export const useExecutionErrorStore = defineStore('executionError', () => {
const workflowStore = useWorkflowStore()
const canvasStore = useCanvasStore()
const lastNodeErrors = ref<Record<NodeId, NodeError> | null>(null)
const lastExecutionError = ref<ExecutionErrorWsMessage | null>(null)
const lastPromptError = ref<PromptError | null>(null)
const isErrorOverlayOpen = ref(false)
function showErrorOverlay() {
isErrorOverlayOpen.value = true
}
function dismissErrorOverlay() {
isErrorOverlayOpen.value = false
}
/** Clear all error state. Called at execution start. */
function clearAllErrors() {
lastExecutionError.value = null
lastPromptError.value = null
lastNodeErrors.value = null
isErrorOverlayOpen.value = false
}
/** Clear only prompt-level errors. Called during resetExecutionState. */
function clearPromptError() {
lastPromptError.value = null
}
const lastExecutionErrorNodeLocatorId = computed(() => {
const err = lastExecutionError.value
if (!err) return null
return executionIdToNodeLocatorId(app.rootGraph, String(err.node_id))
})
const lastExecutionErrorNodeId = computed(() => {
const locator = lastExecutionErrorNodeLocatorId.value
if (!locator) return null
const localId = workflowStore.nodeLocatorIdToNodeId(locator)
return localId != null ? String(localId) : null
})
/** Whether a runtime execution error is present */
const hasExecutionError = computed(() => !!lastExecutionError.value)
/** Whether a prompt-level error is present (e.g. invalid_prompt, prompt_no_outputs) */
const hasPromptError = computed(() => !!lastPromptError.value)
/** Whether any node validation errors are present */
const hasNodeError = computed(
() => !!lastNodeErrors.value && Object.keys(lastNodeErrors.value).length > 0
)
/** Whether any error (node validation, runtime execution, or prompt-level) is present */
const hasAnyError = computed(
() => hasExecutionError.value || hasPromptError.value || hasNodeError.value
)
const allErrorExecutionIds = computed<string[]>(() => {
const ids: string[] = []
if (lastNodeErrors.value) {
ids.push(...Object.keys(lastNodeErrors.value))
}
if (lastExecutionError.value) {
const nodeId = lastExecutionError.value.node_id
if (nodeId !== null && nodeId !== undefined) {
ids.push(String(nodeId))
}
}
return ids
})
/** Count of prompt-level errors (0 or 1) */
const promptErrorCount = computed(() => (lastPromptError.value ? 1 : 0))
/** Count of all individual node validation errors */
const nodeErrorCount = computed(() => {
if (!lastNodeErrors.value) return 0
let count = 0
for (const nodeError of Object.values(lastNodeErrors.value)) {
count += nodeError.errors.length
}
return count
})
/** Count of runtime execution errors (0 or 1) */
const executionErrorCount = computed(() => (lastExecutionError.value ? 1 : 0))
/** Total count of all individual errors */
const totalErrorCount = computed(
() =>
promptErrorCount.value + nodeErrorCount.value + executionErrorCount.value
)
/** Pre-computed Set of graph node IDs (as strings) that have errors in the current graph scope. */
const activeGraphErrorNodeIds = computed<Set<string>>(() => {
const ids = new Set<string>()
if (!app.rootGraph) return ids
// Fall back to rootGraph when currentGraph hasn't been initialized yet
const activeGraph = canvasStore.currentGraph ?? app.rootGraph
if (lastNodeErrors.value) {
for (const executionId of Object.keys(lastNodeErrors.value)) {
const graphNode = getNodeByExecutionId(app.rootGraph, executionId)
if (graphNode?.graph === activeGraph) {
ids.add(String(graphNode.id))
}
}
}
if (lastExecutionError.value) {
const execNodeId = String(lastExecutionError.value.node_id)
const graphNode = getNodeByExecutionId(app.rootGraph, execNodeId)
if (graphNode?.graph === activeGraph) {
ids.add(String(graphNode.id))
}
}
return ids
})
/** Map of node errors indexed by locator ID. */
const nodeErrorsByLocatorId = computed<Record<NodeLocatorId, NodeError>>(
() => {
if (!lastNodeErrors.value) return {}
const map: Record<NodeLocatorId, NodeError> = {}
for (const [executionId, nodeError] of Object.entries(
lastNodeErrors.value
)) {
const locatorId = executionIdToNodeLocatorId(app.rootGraph, executionId)
if (locatorId) {
map[locatorId] = nodeError
}
}
return map
}
)
/** Get node errors by locator ID. */
const getNodeErrors = (
nodeLocatorId: NodeLocatorId
): NodeError | undefined => {
return nodeErrorsByLocatorId.value[nodeLocatorId]
}
/** Check if a specific slot has validation errors. */
const slotHasError = (
nodeLocatorId: NodeLocatorId,
slotName: string
): boolean => {
const nodeError = getNodeErrors(nodeLocatorId)
if (!nodeError) return false
return nodeError.errors.some((e) => e.extra_info?.input_name === slotName)
}
/**
* Set of all execution ID prefixes derived from active error nodes,
* including the error nodes themselves.
*
* Example: error at "65:70:63" → Set { "65", "65:70", "65:70:63" }
*/
const errorAncestorExecutionIds = computed<Set<NodeExecutionId>>(() => {
const ids = new Set<NodeExecutionId>()
for (const executionId of allErrorExecutionIds.value) {
const parts = executionId.split(':')
// Add every prefix including the full ID (error leaf node itself)
for (let i = 1; i <= parts.length; i++) {
ids.add(parts.slice(0, i).join(':'))
}
}
return ids
})
/** True if the node has errors inside it at any nesting depth. */
function isContainerWithInternalError(node: LGraphNode): boolean {
if (!app.rootGraph) return false
const execId = getExecutionIdByNode(app.rootGraph, node)
if (!execId) return false
return errorAncestorExecutionIds.value.has(execId)
}
/**
* Update node and slot error flags when validation errors change.
* Propagates errors up subgraph chains.
*/
watch(lastNodeErrors, () => {
if (!app.rootGraph) return
// Clear all error flags
forEachNode(app.rootGraph, (node) => {
node.has_errors = false
if (node.inputs) {
for (const slot of node.inputs) {
slot.hasErrors = false
}
}
})
if (!lastNodeErrors.value) return
// Set error flags on nodes and slots
for (const [executionId, nodeError] of Object.entries(
lastNodeErrors.value
)) {
const node = getNodeByExecutionId(app.rootGraph, executionId)
if (!node) continue
node.has_errors = true
// Mark input slots with errors
if (node.inputs) {
for (const error of nodeError.errors) {
const slotName = error.extra_info?.input_name
if (!slotName) continue
const slot = node.inputs.find((s) => s.name === slotName)
if (slot) {
slot.hasErrors = true
}
}
}
// Propagate errors to parent subgraph nodes
const parts = executionId.split(':')
for (let i = parts.length - 1; i > 0; i--) {
const parentExecutionId = parts.slice(0, i).join(':')
const parentNode = getNodeByExecutionId(
app.rootGraph,
parentExecutionId
)
if (parentNode) {
parentNode.has_errors = true
}
}
}
})
return {
// Raw state
lastNodeErrors,
lastExecutionError,
lastPromptError,
// Clearing
clearAllErrors,
clearPromptError,
// Overlay UI
isErrorOverlayOpen,
showErrorOverlay,
dismissErrorOverlay,
// Derived state
hasExecutionError,
hasPromptError,
hasNodeError,
hasAnyError,
allErrorExecutionIds,
totalErrorCount,
lastExecutionErrorNodeId,
activeGraphErrorNodeIds,
// Lookup helpers
getNodeErrors,
slotHasError,
errorAncestorExecutionIds,
isContainerWithInternalError
}
})

View File

@@ -2,6 +2,8 @@ import { setActivePinia } from 'pinia'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import { app } from '@/scripts/app'
import { useExecutionStore } from '@/stores/executionStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import { executionIdToNodeLocatorId } from '@/utils/graphTraversalUtil'
// Create mock functions that will be shared
const mockNodeExecutionIdToNodeLocatorId = vi.fn()
@@ -80,20 +82,20 @@ describe('useExecutionStore - NodeLocatorId conversions', () => {
// Mock app.rootGraph.getNodeById to return the mock node
vi.mocked(app.rootGraph.getNodeById).mockReturnValue(mockNode)
const result = store.executionIdToNodeLocatorId('123:456')
const result = executionIdToNodeLocatorId(app.rootGraph, '123:456')
expect(result).toBe('a1b2c3d4-e5f6-7890-abcd-ef1234567890:456')
})
it('should convert simple node ID to NodeLocatorId', () => {
const result = store.executionIdToNodeLocatorId('123')
const result = executionIdToNodeLocatorId(app.rootGraph, '123')
// For simple node IDs, it should return the ID as-is
expect(result).toBe('123')
})
it('should handle numeric node IDs', () => {
const result = store.executionIdToNodeLocatorId(123)
const result = executionIdToNodeLocatorId(app.rootGraph, 123)
// For numeric IDs, it should convert to string and return as-is
expect(result).toBe('123')
@@ -103,7 +105,9 @@ describe('useExecutionStore - NodeLocatorId conversions', () => {
// Mock app.rootGraph.getNodeById to return null (node not found)
vi.mocked(app.rootGraph.getNodeById).mockReturnValue(null)
expect(store.executionIdToNodeLocatorId('999:456')).toBe(undefined)
expect(executionIdToNodeLocatorId(app.rootGraph, '999:456')).toBe(
undefined
)
})
})
@@ -174,13 +178,13 @@ describe('useExecutionStore - reconcileInitializingJobs', () => {
})
})
describe('useExecutionStore - Node Error Lookups', () => {
let store: ReturnType<typeof useExecutionStore>
describe('useExecutionErrorStore - Node Error Lookups', () => {
let store: ReturnType<typeof useExecutionErrorStore>
beforeEach(() => {
vi.clearAllMocks()
setActivePinia(createTestingPinia({ stubActions: false }))
store = useExecutionStore()
store = useExecutionErrorStore()
})
describe('getNodeErrors', () => {

View File

@@ -1,8 +1,7 @@
import { defineStore } from 'pinia'
import { computed, ref, watch } from 'vue'
import { computed, ref } from 'vue'
import { useNodeProgressText } from '@/composables/node/useNodeProgressText'
import type { LGraph, Subgraph } from '@/lib/litegraph/src/litegraph'
import { isCloud } from '@/platform/distribution/types'
import { useTelemetry } from '@/platform/telemetry'
import type { ComfyWorkflow } from '@/platform/workflow/management/stores/workflowStore'
@@ -20,22 +19,20 @@ import type {
ExecutionInterruptedWsMessage,
ExecutionStartWsMessage,
ExecutionSuccessWsMessage,
NodeError,
NodeProgressState,
NotificationWsMessage,
ProgressStateWsMessage,
ProgressTextWsMessage,
ProgressWsMessage,
PromptError
ProgressWsMessage
} from '@/schemas/apiSchema'
import { api } from '@/scripts/api'
import { app } from '@/scripts/app'
import { useNodeOutputStore } from '@/stores/imagePreviewStore'
import { useJobPreviewStore } from '@/stores/jobPreviewStore'
import { useExecutionErrorStore } from '@/stores/executionErrorStore'
import type { NodeLocatorId } from '@/types/nodeIdentification'
import { createNodeLocatorId } from '@/types/nodeIdentification'
import { forEachNode, getNodeByExecutionId } from '@/utils/graphTraversalUtil'
import { classifyCloudValidationError } from '@/utils/executionErrorUtil'
import { executionIdToNodeLocatorId } from '@/utils/graphTraversalUtil'
interface QueuedJob {
/**
@@ -49,73 +46,14 @@ interface QueuedJob {
workflow?: ComfyWorkflow
}
const subgraphNodeIdToSubgraph = (id: string, graph: LGraph | Subgraph) => {
const node = graph.getNodeById(id)
if (node?.isSubgraphNode()) return node.subgraph
}
/**
* Recursively get the subgraph objects for the given subgraph instance IDs
* @param currentGraph The current graph
* @param subgraphNodeIds The instance IDs
* @param subgraphs The subgraphs
* @returns The subgraphs that correspond to each of the instance IDs.
*/
function getSubgraphsFromInstanceIds(
currentGraph: LGraph | Subgraph,
subgraphNodeIds: string[],
subgraphs: Subgraph[] = []
): Subgraph[] | undefined {
// Last segment is the node portion; nothing to do.
if (subgraphNodeIds.length === 1) return subgraphs
const currentPart = subgraphNodeIds.shift()
if (currentPart === undefined) return subgraphs
const subgraph = subgraphNodeIdToSubgraph(currentPart, currentGraph)
if (!subgraph) {
console.warn(`Subgraph not found: ${currentPart}`)
return undefined
}
subgraphs.push(subgraph)
return getSubgraphsFromInstanceIds(subgraph, subgraphNodeIds, subgraphs)
}
/**
* Convert execution context node IDs to NodeLocatorIds
* @param nodeId The node ID from execution context (could be execution ID)
* @returns The NodeLocatorId
*/
function executionIdToNodeLocatorId(
nodeId: string | number
): NodeLocatorId | undefined {
const nodeIdStr = String(nodeId)
if (!nodeIdStr.includes(':')) {
// It's a top-level node ID
return nodeIdStr
}
// It's an execution node ID
const parts = nodeIdStr.split(':')
const localNodeId = parts[parts.length - 1]
const subgraphs = getSubgraphsFromInstanceIds(app.rootGraph, parts)
if (!subgraphs) return undefined
const nodeLocatorId = createNodeLocatorId(subgraphs.at(-1)!.id, localNodeId)
return nodeLocatorId
}
export const useExecutionStore = defineStore('execution', () => {
const workflowStore = useWorkflowStore()
const canvasStore = useCanvasStore()
const executionErrorStore = useExecutionErrorStore()
const clientId = ref<string | null>(null)
const activeJobId = ref<string | null>(null)
const queuedJobs = ref<Record<NodeId, QueuedJob>>({})
const lastNodeErrors = ref<Record<NodeId, NodeError> | null>(null)
const lastExecutionError = ref<ExecutionErrorWsMessage | null>(null)
const lastPromptError = ref<PromptError | null>(null)
// This is the progress of all nodes in the currently executing workflow
const nodeProgressStates = ref<Record<string, NodeProgressState>>({})
const nodeProgressStatesByJob = ref<
@@ -168,7 +106,7 @@ export const useExecutionStore = defineStore('execution', () => {
const parts = String(state.display_node_id).split(':')
for (let i = 0; i < parts.length; i++) {
const executionId = parts.slice(0, i + 1).join(':')
const locatorId = executionIdToNodeLocatorId(executionId)
const locatorId = executionIdToNodeLocatorId(app.rootGraph, executionId)
if (!locatorId) continue
result[locatorId] = mergeExecutionProgressStates(
@@ -245,19 +183,6 @@ export const useExecutionStore = defineStore('execution', () => {
return total > 0 ? done / total : 0
})
const lastExecutionErrorNodeLocatorId = computed(() => {
const err = lastExecutionError.value
if (!err) return null
return executionIdToNodeLocatorId(String(err.node_id))
})
const lastExecutionErrorNodeId = computed(() => {
const locator = lastExecutionErrorNodeLocatorId.value
if (!locator) return null
const localId = workflowStore.nodeLocatorIdToNodeId(locator)
return localId != null ? String(localId) : null
})
function bindExecutionEvents() {
api.addEventListener('notification', handleNotification)
api.addEventListener('execution_start', handleExecutionStart)
@@ -289,10 +214,7 @@ export const useExecutionStore = defineStore('execution', () => {
}
function handleExecutionStart(e: CustomEvent<ExecutionStartWsMessage>) {
lastExecutionError.value = null
lastPromptError.value = null
lastNodeErrors.value = null
isErrorOverlayOpen.value = false
executionErrorStore.clearAllErrors()
activeJobId.value = e.detail.prompt_id
queuedJobs.value[activeJobId.value] ??= { nodes: {} }
clearInitializationByJobId(activeJobId.value)
@@ -410,7 +332,7 @@ export const useExecutionStore = defineStore('execution', () => {
if (handleServiceLevelError(e.detail)) return
// OSS path / Cloud fallback (real runtime errors)
lastExecutionError.value = e.detail
executionErrorStore.lastExecutionError = e.detail
clearInitializationByJobId(e.detail.prompt_id)
resetExecutionState(e.detail.prompt_id)
}
@@ -422,7 +344,7 @@ export const useExecutionStore = defineStore('execution', () => {
clearInitializationByJobId(detail.prompt_id)
resetExecutionState(detail.prompt_id)
lastPromptError.value = {
executionErrorStore.lastPromptError = {
type: detail.exception_type ?? 'error',
message: detail.exception_type
? `${detail.exception_type}: ${detail.exception_message}`
@@ -442,9 +364,9 @@ export const useExecutionStore = defineStore('execution', () => {
resetExecutionState(detail.prompt_id)
if (result.kind === 'nodeErrors') {
lastNodeErrors.value = result.nodeErrors
executionErrorStore.lastNodeErrors = result.nodeErrors
} else {
lastPromptError.value = result.promptError
executionErrorStore.lastPromptError = result.promptError
}
return true
}
@@ -515,7 +437,7 @@ export const useExecutionStore = defineStore('execution', () => {
}
activeJobId.value = null
_executingNodeProgress.value = null
lastPromptError.value = null
executionErrorStore.clearPromptError()
}
function getNodeIdIfExecuting(nodeId: string | number) {
@@ -596,207 +518,11 @@ export const useExecutionStore = defineStore('execution', () => {
() => runningJobIds.value.length
)
/** Map of node errors indexed by locator ID. */
const nodeErrorsByLocatorId = computed<Record<NodeLocatorId, NodeError>>(
() => {
if (!lastNodeErrors.value) return {}
const map: Record<NodeLocatorId, NodeError> = {}
for (const [executionId, nodeError] of Object.entries(
lastNodeErrors.value
)) {
const locatorId = executionIdToNodeLocatorId(executionId)
if (locatorId) {
map[locatorId] = nodeError
}
}
return map
}
)
/** Get node errors by locator ID. */
const getNodeErrors = (
nodeLocatorId: NodeLocatorId
): NodeError | undefined => {
return nodeErrorsByLocatorId.value[nodeLocatorId]
}
/** Check if a specific slot has validation errors. */
const slotHasError = (
nodeLocatorId: NodeLocatorId,
slotName: string
): boolean => {
const nodeError = getNodeErrors(nodeLocatorId)
if (!nodeError) return false
return nodeError.errors.some((e) => e.extra_info?.input_name === slotName)
}
/**
* Update node and slot error flags when validation errors change.
* Propagates errors up subgraph chains.
*/
watch(lastNodeErrors, () => {
if (!app.rootGraph) return
// Clear all error flags
forEachNode(app.rootGraph, (node) => {
node.has_errors = false
if (node.inputs) {
for (const slot of node.inputs) {
slot.hasErrors = false
}
}
})
if (!lastNodeErrors.value) return
// Set error flags on nodes and slots
for (const [executionId, nodeError] of Object.entries(
lastNodeErrors.value
)) {
const node = getNodeByExecutionId(app.rootGraph, executionId)
if (!node) continue
node.has_errors = true
// Mark input slots with errors
if (node.inputs) {
for (const error of nodeError.errors) {
const slotName = error.extra_info?.input_name
if (!slotName) continue
const slot = node.inputs.find((s) => s.name === slotName)
if (slot) {
slot.hasErrors = true
}
}
}
// Propagate errors to parent subgraph nodes
const parts = executionId.split(':')
for (let i = parts.length - 1; i > 0; i--) {
const parentExecutionId = parts.slice(0, i).join(':')
const parentNode = getNodeByExecutionId(
app.rootGraph,
parentExecutionId
)
if (parentNode) {
parentNode.has_errors = true
}
}
}
})
/** Whether a runtime execution error is present */
const hasExecutionError = computed(() => !!lastExecutionError.value)
/** Whether a prompt-level error is present (e.g. invalid_prompt, prompt_no_outputs) */
const hasPromptError = computed(() => !!lastPromptError.value)
/** Whether any node validation errors are present */
const hasNodeError = computed(
() => !!lastNodeErrors.value && Object.keys(lastNodeErrors.value).length > 0
)
/** Whether any error (node validation, runtime execution, or prompt-level) is present */
const hasAnyError = computed(
() => hasExecutionError.value || hasPromptError.value || hasNodeError.value
)
const allErrorExecutionIds = computed<string[]>(() => {
const ids: string[] = []
if (lastNodeErrors.value) {
ids.push(...Object.keys(lastNodeErrors.value))
}
if (lastExecutionError.value) {
const nodeId = lastExecutionError.value.node_id
if (nodeId !== null && nodeId !== undefined) {
ids.push(String(nodeId))
}
}
return ids
})
/** Count of prompt-level errors (0 or 1) */
const promptErrorCount = computed(() => (lastPromptError.value ? 1 : 0))
/** Count of all individual node validation errors */
const nodeErrorCount = computed(() => {
if (!lastNodeErrors.value) return 0
let count = 0
for (const nodeError of Object.values(lastNodeErrors.value)) {
count += nodeError.errors.length
}
return count
})
/** Count of runtime execution errors (0 or 1) */
const executionErrorCount = computed(() => (lastExecutionError.value ? 1 : 0))
/** Total count of all individual errors */
const totalErrorCount = computed(
() =>
promptErrorCount.value + nodeErrorCount.value + executionErrorCount.value
)
/** Pre-computed Set of graph node IDs (as strings) that have errors in the current graph scope. */
const activeGraphErrorNodeIds = computed<Set<string>>(() => {
const ids = new Set<string>()
if (!app.rootGraph) return ids
// Fall back to rootGraph when currentGraph hasn't been initialized yet
const activeGraph = canvasStore.currentGraph ?? app.rootGraph
if (lastNodeErrors.value) {
for (const executionId of Object.keys(lastNodeErrors.value)) {
const graphNode = getNodeByExecutionId(app.rootGraph, executionId)
if (graphNode?.graph === activeGraph) {
ids.add(String(graphNode.id))
}
}
}
if (lastExecutionError.value) {
const execNodeId = String(lastExecutionError.value.node_id)
const graphNode = getNodeByExecutionId(app.rootGraph, execNodeId)
if (graphNode?.graph === activeGraph) {
ids.add(String(graphNode.id))
}
}
return ids
})
function hasInternalErrorForNode(nodeId: string | number): boolean {
const prefix = `${nodeId}:`
return allErrorExecutionIds.value.some((id) => id.startsWith(prefix))
}
const isErrorOverlayOpen = ref(false)
function showErrorOverlay() {
isErrorOverlayOpen.value = true
}
function dismissErrorOverlay() {
isErrorOverlayOpen.value = false
}
return {
isIdle,
clientId,
activeJobId,
queuedJobs,
lastNodeErrors,
lastExecutionError,
lastPromptError,
hasAnyError,
allErrorExecutionIds,
totalErrorCount,
lastExecutionErrorNodeId,
executingNodeId,
executingNodeIds,
activeJob,
@@ -823,16 +549,7 @@ export const useExecutionStore = defineStore('execution', () => {
// Raw executing progress data for backward compatibility in ComfyApp.
_executingNodeProgress,
// NodeLocatorId conversion helpers
executionIdToNodeLocatorId,
nodeLocatorIdToExecutionId,
jobIdToWorkflowId,
// Node error lookup helpers
getNodeErrors,
slotHasError,
hasInternalErrorForNode,
activeGraphErrorNodeIds,
isErrorOverlayOpen,
showErrorOverlay,
dismissErrorOverlay
jobIdToWorkflowId
}
})

Some files were not shown because too many files have changed in this diff Show More