Compare commits

..

7 Commits

Author SHA1 Message Date
bymyself
fefb09cd9c test: trim RoleBadge tests to user-visible label behavior
Removes assertions on the root tag name, Vue rerender mechanics, and
class-fallthrough utility forwarding — these locked down implementation
details rather than user-visible behavior. The meaningful regression
coverage for role labels lives in MemberListItem.test.ts.

Addresses review feedback:
https://github.com/Comfy-Org/ComfyUI_frontend/pull/11383#discussion_r3176431334
2026-05-04 13:34:15 -07:00
bymyself
b29ad1f9fe fix: rely on Vue class fallthrough in RoleBadge
Removes `inheritAttrs: false` and the manual `cn()` class merge so all
caller-provided attributes (aria-*, data-*, title, style, id, listeners)
fall through to the root span. Tailwind class merging is handled by
Vue's default class fallthrough.

Addresses review feedback:
https://github.com/Comfy-Org/ComfyUI_frontend/pull/11383#discussion_r3176431333
2026-05-04 13:33:27 -07:00
bymyself
2d036e1dc1 fix: update cn import after tailwindUtil shim removal
Rebased onto main where PR #11453 removed the @/utils/tailwindUtil
re-export shim. Switch RoleBadge to import cn directly from
@comfyorg/tailwind-utils.
2026-05-01 21:12:47 -07:00
bymyself
50d8b7a98a test: add MemberListItem unit tests for role badge label coverage
Covers getRoleBadgeLabel function to fix codecov/patch failure.
2026-05-01 21:10:04 -07:00
bymyself
2cbeaae36b fix: rename attr passthrough test to clarify behavioral intent
Addresses review feedback:
https://github.com/Comfy-Org/ComfyUI_frontend/pull/11383#discussion_r3106189426
2026-05-01 21:10:03 -07:00
bymyself
18d448c740 refactor: extract RoleBadge component and reuse across workspace UI
Replace 4 inline badge spans with the new RoleBadge component in
WorkspaceSwitcherPopover, MembersPanelContent, and
TeamWorkspacesDialogContent. The component accepts a label prop and
supports class merging via inheritAttrs: false + cn().

Fixes #10971
2026-05-01 21:10:03 -07:00
Christian Byrne
e831daae59 feat(website): point robots.txt at /sitemap-index.xml + AI crawler rules (#11823)
## Summary

Once
[comfy-router#22](https://github.com/Comfy-Org/comfy-router/pull/22)
ships, `comfy.org/sitemap-index.xml` will return a unified index
aggregating both the website (38 URLs) and workflow-templates sitemaps.
This PR:

1. Reverts `Sitemap:` back to `/sitemap-index.xml` (was `/sitemap-0.xml`
in #11802 as a workaround for the 404).
2. Adds explicit allow records for 21 search and AI/LLM crawlers
(GPTBot, ChatGPT-User, OAI-SearchBot, Google-Extended, ClaudeBot,
Claude-Web, anthropic-ai, PerplexityBot, Perplexity-User,
Applebot-Extended, Bytespider, Amazonbot, CCBot, Meta-ExternalAgent,
Meta-ExternalFetcher, Diffbot, etc.).
3. Adds `Disallow:` for `/_astro/`, `/_website/`, `/_vercel/` — Vercel
build artifacts that aren't useful to crawl.

## Why granular UAs

Stacked `User-agent:` records (per [RFC 9309
§2.2](https://datatracker.ietf.org/doc/html/rfc9309#section-2.2)) share
one rule block. Listing each bot explicitly:

- Signals intent to AI bots that look for their UA in robots.txt before
crawling more aggressively.
- Surfaces our crawl policy clearly to anyone inspecting the file.
- Lets us add per-bot Disallows in future without restructuring.

## Merge order

⚠️ **Do NOT merge until comfy-router#22 is deployed to production.**
Until then, `/sitemap-index.xml` returns 404 and this PR would re-break
the issue PR #11802 patched. Verification:

```bash
curl -sI https://comfy.org/sitemap-index.xml
# expect: HTTP/2 200, x-served-by: worker-sitemap-index
```

Once that returns 200, this is safe to merge.

## Verification (after merge + deploy)

```bash
# robots.txt is served and points at the unified index
curl -s https://comfy.org/robots.txt | grep '^Sitemap:'
# → Sitemap: https://comfy.org/sitemap-index.xml

# Each AI crawler can fetch it
for ua in 'GPTBot/1.0' 'ClaudeBot/1.0' 'PerplexityBot/1.0' 'Google-Extended' 'Applebot-Extended'; do
  curl -s -o /dev/null -w "$ua → %{http_code}\n" -A "$ua" https://comfy.org/robots.txt
done

# Sitemap is reachable from robots.txt
SITEMAP=$(curl -s https://comfy.org/robots.txt | awk -F': ' '/^Sitemap:/ {print $2}')
curl -s "$SITEMAP" | xmllint --noout - && echo "valid XML"
```

## Linear / closes

- Closes FE-437 (AI crawler rules)
- Updates FE-432 — the robots.txt change in #11802 was a workaround
that's no longer needed once #22 ships

┆Issue is synchronized with this [Notion
page](https://app.notion.com/p/PR-11823-feat-website-point-robots-txt-at-sitemap-index-xml-AI-crawler-rules-3546d73d3650811dbceedd06c00db444)
by [Unito](https://www.unito.io)
2026-05-01 21:04:45 -07:00
30 changed files with 320 additions and 1549 deletions

1
.gitignore vendored
View File

@@ -16,7 +16,6 @@ yarn.lock
.eslintcache
.prettiercache
.stylelintcache
*.tsbuildinfo
node_modules
.pnpm-store

View File

@@ -90,17 +90,6 @@ const config: StorybookConfig = {
process.cwd() +
'/packages/shared-frontend-utils/src/networkUtil.ts'
},
{
find: '@/utils/linkFixer',
replacement:
process.cwd() + '/packages/workflow-validation/src/linkRepair.ts'
},
{
find: '@/platform/workflow/validation/schemas/workflowSchema',
replacement:
process.cwd() +
'/packages/workflow-validation/src/workflowSchema.ts'
},
{
find: '@',
replacement: process.cwd() + '/src'

View File

@@ -1,4 +1,33 @@
User-agent: *
Allow: /
# robots.txt for comfy.org
# Open to all crawlers — including AI/LLM bots — for maximum visibility
# in AI-powered search, chat-based answer engines, and traditional search.
# Granular UAs are listed explicitly to signal intent; rules are shared
# via stacked user-agent records (RFC 9309 §2.2).
Sitemap: https://comfy.org/sitemap-0.xml
User-agent: *
User-agent: Googlebot
User-agent: Bingbot
User-agent: DuckDuckBot
User-agent: GPTBot
User-agent: ChatGPT-User
User-agent: OAI-SearchBot
User-agent: Google-Extended
User-agent: ClaudeBot
User-agent: Claude-Web
User-agent: anthropic-ai
User-agent: PerplexityBot
User-agent: Perplexity-User
User-agent: Applebot
User-agent: Applebot-Extended
User-agent: Bytespider
User-agent: Amazonbot
User-agent: CCBot
User-agent: Meta-ExternalAgent
User-agent: Meta-ExternalFetcher
User-agent: Diffbot
Allow: /
Disallow: /_astro/
Disallow: /_website/
Disallow: /_vercel/
Sitemap: https://comfy.org/sitemap-index.xml

View File

@@ -63,7 +63,6 @@
"@comfyorg/registry-types": "workspace:*",
"@comfyorg/shared-frontend-utils": "workspace:*",
"@comfyorg/tailwind-utils": "workspace:*",
"@comfyorg/workflow-validation": "workspace:*",
"@formkit/auto-animate": "catalog:",
"@iconify/json": "catalog:",
"@primeuix/forms": "catalog:",

View File

@@ -1,41 +0,0 @@
{
"name": "@comfyorg/workflow-validation",
"version": "0.1.0",
"description": "Workflow JSON schemas, link topology validator, and link repair for ComfyUI workflows",
"homepage": "https://comfy.org",
"license": "GPL-3.0-only",
"repository": "https://github.com/Comfy-Org/ComfyUI_frontend",
"type": "module",
"main": "./src/index.ts",
"types": "./src/index.ts",
"exports": {
".": "./src/index.ts",
"./linkRepair": "./src/linkRepair.ts",
"./linkTopology": "./src/linkTopology.ts",
"./workflowSchema": "./src/workflowSchema.ts",
"./serialised": "./src/serialised.ts"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"build": "vite build --config vite.config.mts && tsx ../../scripts/prepare-workflow-validation.ts",
"typecheck": "tsc --noEmit"
},
"dependencies": {
"zod": "catalog:",
"zod-validation-error": "catalog:"
},
"devDependencies": {
"typescript": "catalog:",
"vite": "catalog:",
"vite-plugin-dts": "catalog:"
},
"packageManager": "pnpm@10.17.1",
"nx": {
"tags": [
"scope:shared",
"type:validation"
]
}
}

View File

@@ -1,38 +0,0 @@
export type {
SerialisedGraph,
SerialisedLinkArray,
SerialisedLinkObject,
SerialisedNode,
SerialisedNodeInput,
SerialisedNodeOutput
} from './serialised'
export {
describeTopologyError,
toLinkContext,
validateLinkTopology
} from './linkTopology'
export type { LinkContext, TopologyError } from './linkTopology'
export { LinkRepairAbortedError, repairLinks } from './linkRepair'
export type { RepairResult } from './linkRepair'
export { repairLinks as fixBadLinks } from './linkRepair'
export {
validateComfyWorkflow,
zComfyWorkflow,
zComfyWorkflow1,
zNodeId
} from './workflowSchema'
export type {
ComfyApiWorkflow,
ComfyLinkObject,
ComfyNode,
ComfyWorkflowJSON,
ModelFile,
NodeId,
Reroute,
WorkflowId,
WorkflowJSON04
} from './workflowSchema'

View File

@@ -1,166 +0,0 @@
import { describe, expect, it } from 'vitest'
import { LinkRepairAbortedError, repairLinks } from './linkRepair'
import type {
SerialisedGraph,
SerialisedLinkArray,
SerialisedLinkObject,
SerialisedNode,
SerialisedNodeInput,
SerialisedNodeOutput
} from './serialised'
function input(link: number | null): SerialisedNodeInput {
return { name: 'i', type: '*', link }
}
function output(links: number[]): SerialisedNodeOutput {
return { name: 'o', type: '*', links }
}
function makeGraph(
nodes: SerialisedNode[],
links: Array<SerialisedLinkArray | SerialisedLinkObject>
): SerialisedGraph {
return { nodes, links }
}
describe('repairLinks abort behaviour', () => {
it('throws LinkRepairAbortedError carrying the topology context when the patched view diverges from the live graph', () => {
const node1: SerialisedNode = {
id: 1,
outputs: [output([10, 11])]
}
const node2: SerialisedNode = {
id: 2,
inputs: [input(null)]
}
const graph = makeGraph(
[node1, node2],
[
[10, 1, 0, 2, 0, '*'],
[11, 1, 0, 2, 0, '*']
]
)
let thrown: unknown
try {
repairLinks(graph, { fix: true, silent: true })
} catch (err) {
thrown = err
}
if (thrown instanceof LinkRepairAbortedError) {
expect(thrown.topologyError.link.linkId).toBeGreaterThan(0)
expect(typeof thrown.message).toBe('string')
}
})
it('LinkRepairAbortedError exposes a topologyError discriminated union', () => {
const err = new LinkRepairAbortedError({
kind: 'target-link-mismatch',
link: {
linkId: 99,
originId: 1,
originSlot: 0,
targetId: 2,
targetSlot: 0
},
actualLink: 5
})
expect(err.topologyError.kind).toBe('target-link-mismatch')
expect(err.message).toContain('[link=99 src=1:0 tgt=2:0]')
expect(err.name).toBe('LinkRepairAbortedError')
})
})
describe('repairLinks delete-with-missing-index path', () => {
it('does not corrupt the link array when the deleted link disappears mid-iteration', () => {
const node1: SerialisedNode = { id: 1, outputs: [output([99])] }
const node2: SerialisedNode = { id: 2, inputs: [input(99)] }
const graph: SerialisedGraph = {
nodes: [node1, node2],
links: [
[42, 1, 0, 2, 5, '*'],
[99, 1, 0, 2, 0, '*']
]
}
repairLinks(graph, { fix: true, silent: true })
const surviving = graph.links.find(
(l): l is SerialisedLinkArray =>
Array.isArray(l) && (l as SerialisedLinkArray)[0] === 99
)
expect(surviving).toBeDefined()
})
})
describe('repairLinks live-graph branch', () => {
it('uses graph.getNodeById and treats links as a record when the live-graph hook is present', () => {
const node1: SerialisedNode = {
id: 1,
outputs: [output([])]
}
const node2: SerialisedNode = {
id: 2,
inputs: [input(null)]
}
const links: Record<number, SerialisedLinkObject> = {
42: {
id: 42,
origin_id: 999,
origin_slot: 0,
target_id: 2,
target_slot: 0,
type: '*'
}
}
const liveGraph = {
nodes: [node1, node2],
links: links as unknown as SerialisedGraph['links'],
getNodeById: (id: string | number) =>
[node1, node2].find((n) => n.id == id)
} as SerialisedGraph & {
getNodeById: (id: string | number) => SerialisedNode | undefined
}
repairLinks(liveGraph, { fix: true, silent: true })
expect((links as Record<number, SerialisedLinkObject>)[42]).toBeUndefined()
})
})
describe('repairLinks describeTopologyError coverage via abort', () => {
it('produces a message tuple for every kind of LinkRepairAbortedError path', () => {
const link = {
linkId: 1,
originId: 1,
originSlot: 0,
targetId: 2,
targetSlot: 0
}
const cases = [
new LinkRepairAbortedError({ kind: 'missing-origin-node', link }),
new LinkRepairAbortedError({ kind: 'missing-target-node', link }),
new LinkRepairAbortedError({
kind: 'origin-slot-out-of-bounds',
link,
originSlotCount: 2
}),
new LinkRepairAbortedError({
kind: 'target-slot-out-of-bounds',
link,
targetSlotCount: 4
}),
new LinkRepairAbortedError({ kind: 'origin-link-not-listed', link }),
new LinkRepairAbortedError({
kind: 'target-link-mismatch',
link,
actualLink: null
})
]
for (const err of cases) {
expect(err.message).toContain('[link=1 src=1:0 tgt=2:0]')
}
})
})

View File

@@ -1,164 +0,0 @@
import { describe, expect, it } from 'vitest'
import { describeTopologyError, validateLinkTopology } from './linkTopology'
import type { SerialisedGraph } from './serialised'
function makeGraph(partial: Partial<SerialisedGraph>): SerialisedGraph {
return { nodes: [], links: [], ...partial }
}
describe('validateLinkTopology', () => {
it('returns no errors for a valid graph', () => {
const graph = makeGraph({
nodes: [
{ id: 1, outputs: [{ name: 'o', type: '*', links: [10] }] },
{ id: 2, inputs: [{ name: 'i', type: '*', link: 10 }] }
],
links: [[10, 1, 0, 2, 0, '*']]
})
expect(validateLinkTopology(graph)).toEqual([])
})
it('reports target slot out of bounds (seedance regression)', () => {
const graph = makeGraph({
nodes: [
{ id: 9, outputs: [{ name: 'o', type: 'STRING', links: [29] }] },
{
id: 14,
inputs: [
{ name: 'a', type: 'STRING', link: null },
{ name: 'b', type: 'STRING', link: null },
{ name: 'c', type: 'STRING', link: null },
{ name: 'd', type: 'STRING', link: 55 },
{ name: 'e', type: 'STRING', link: null }
]
}
],
links: [[29, 9, 0, 14, 9, 'STRING']]
})
const errors = validateLinkTopology(graph)
expect(errors).toHaveLength(1)
expect(errors[0]).toMatchObject({
kind: 'target-slot-out-of-bounds',
link: { linkId: 29, targetId: 14, targetSlot: 9 },
targetSlotCount: 5
})
expect(describeTopologyError(errors[0]!)).toContain(
'[link=29 src=9:0 tgt=14:9]'
)
})
it('reports a missing origin node', () => {
const graph = makeGraph({
nodes: [{ id: 2, inputs: [{ name: 'i', type: '*', link: 10 }] }],
links: [[10, 999, 0, 2, 0, '*']]
})
const errors = validateLinkTopology(graph)
expect(errors[0]?.kind).toBe('missing-origin-node')
})
it('reports a target-link mismatch', () => {
const graph = makeGraph({
nodes: [
{ id: 1, outputs: [{ name: 'o', type: '*', links: [10] }] },
{ id: 2, inputs: [{ name: 'i', type: '*', link: 999 }] }
],
links: [[10, 1, 0, 2, 0, '*']]
})
const errors = validateLinkTopology(graph)
expect(errors[0]).toMatchObject({
kind: 'target-link-mismatch',
actualLink: 999
})
})
it('accepts object-form links for valid graphs', () => {
const graph = makeGraph({
nodes: [
{ id: 1, outputs: [{ name: 'o', type: '*', links: [10] }] },
{ id: 2, inputs: [{ name: 'i', type: '*', link: 10 }] }
],
links: [
{
id: 10,
origin_id: 1,
origin_slot: 0,
target_id: 2,
target_slot: 0,
type: '*'
}
]
})
expect(validateLinkTopology(graph)).toEqual([])
})
it('reports object-form links with out-of-bounds slots', () => {
const graph = makeGraph({
nodes: [
{ id: 1, outputs: [{ name: 'o', type: '*', links: [10] }] },
{
id: 2,
inputs: [{ name: 'a', type: '*', link: null }]
}
],
links: [
{
id: 10,
origin_id: 1,
origin_slot: 0,
target_id: 2,
target_slot: 5,
type: '*'
}
]
})
const errors = validateLinkTopology(graph)
expect(errors[0]).toMatchObject({
kind: 'target-slot-out-of-bounds',
link: { linkId: 10, targetId: 2, targetSlot: 5 }
})
})
})
describe('describeTopologyError', () => {
it('formats every error kind with the [linkId, src, srcSlot, tgt, tgtSlot] tuple', () => {
const link = {
linkId: 7,
originId: 3,
originSlot: 1,
targetId: 4,
targetSlot: 2
}
const tuple = '[link=7 src=3:1 tgt=4:2]'
expect(
describeTopologyError({ kind: 'missing-origin-node', link })
).toContain(tuple)
expect(
describeTopologyError({ kind: 'missing-target-node', link })
).toContain(tuple)
expect(
describeTopologyError({
kind: 'origin-slot-out-of-bounds',
link,
originSlotCount: 0
})
).toContain(tuple)
expect(
describeTopologyError({
kind: 'target-slot-out-of-bounds',
link,
targetSlotCount: 5
})
).toContain(tuple)
expect(
describeTopologyError({ kind: 'origin-link-not-listed', link })
).toContain(tuple)
expect(
describeTopologyError({
kind: 'target-link-mismatch',
link,
actualLink: null
})
).toContain(tuple)
})
})

View File

@@ -1,158 +0,0 @@
import type {
SerialisedGraph,
SerialisedLinkArray,
SerialisedLinkObject,
SerialisedNode
} from './serialised'
export interface LinkContext {
linkId: number
originId: string | number
originSlot: number
targetId: string | number
targetSlot: number
}
export type TopologyError =
| { kind: 'missing-origin-node'; link: LinkContext }
| { kind: 'missing-target-node'; link: LinkContext }
| {
kind: 'origin-slot-out-of-bounds'
link: LinkContext
originSlotCount: number
}
| {
kind: 'target-slot-out-of-bounds'
link: LinkContext
targetSlotCount: number
}
| { kind: 'origin-link-not-listed'; link: LinkContext }
| {
kind: 'target-link-mismatch'
link: LinkContext
actualLink: number | null
}
export function describeTopologyError(error: TopologyError): string {
const { linkId, originId, originSlot, targetId, targetSlot } = error.link
const tuple = `[link=${linkId} src=${originId}:${originSlot} tgt=${targetId}:${targetSlot}]`
switch (error.kind) {
case 'missing-origin-node':
return `${tuple} origin node ${originId} does not exist in graph`
case 'missing-target-node':
return `${tuple} target node ${targetId} does not exist in graph`
case 'origin-slot-out-of-bounds':
return `${tuple} origin slot ${originSlot} is out of bounds; node ${originId} has ${error.originSlotCount} output slot(s)`
case 'target-slot-out-of-bounds':
return `${tuple} target slot ${targetSlot} is out of bounds; node ${targetId} has ${error.targetSlotCount} input slot(s)`
case 'origin-link-not-listed':
return `${tuple} link is not listed in node ${originId}.outputs[${originSlot}].links`
case 'target-link-mismatch':
return `${tuple} node ${targetId}.inputs[${targetSlot}].link is ${error.actualLink}, expected ${linkId}`
}
}
function isLinkObject(
l: SerialisedLinkArray | SerialisedLinkObject
): l is SerialisedLinkObject {
return !Array.isArray(l) && typeof l === 'object'
}
export function toLinkContext(
l: SerialisedLinkArray | SerialisedLinkObject
): LinkContext {
if (isLinkObject(l)) {
return {
linkId: l.id,
originId: l.origin_id,
originSlot: l.origin_slot,
targetId: l.target_id,
targetSlot: l.target_slot
}
}
return {
linkId: l[0],
originId: l[1],
originSlot: l[2],
targetId: l[3],
targetSlot: l[4]
}
}
function getNodeById(
graph: SerialisedGraph,
id: string | number
): SerialisedNode | undefined {
return graph.nodes.find((n) => n.id == id)
}
function iterateLinks(
graph: SerialisedGraph
): Array<SerialisedLinkArray | SerialisedLinkObject> {
if (Array.isArray(graph.links)) {
return graph.links.filter(
(l): l is SerialisedLinkArray | SerialisedLinkObject => l != null
)
}
const result: Array<SerialisedLinkArray | SerialisedLinkObject> = []
for (const l of Object.values(graph.links)) {
if (l) result.push(l as SerialisedLinkObject)
}
return result
}
/**
* Pure topology check: every link must reference real nodes, in-bounds
* slots, and consistent input/output endpoints. Does not mutate the
* graph. Use `repairLinks` (separate module) to attempt auto-fix.
*/
export function validateLinkTopology(graph: SerialisedGraph): TopologyError[] {
const errors: TopologyError[] = []
for (const l of iterateLinks(graph)) {
const link = toLinkContext(l)
const origin = getNodeById(graph, link.originId)
const target = getNodeById(graph, link.targetId)
if (!origin) errors.push({ kind: 'missing-origin-node', link })
if (!target) errors.push({ kind: 'missing-target-node', link })
if (!origin || !target) continue
const outputs = origin.outputs ?? []
const originSlotOutOfBounds =
link.originSlot < 0 || link.originSlot >= outputs.length
if (originSlotOutOfBounds) {
errors.push({
kind: 'origin-slot-out-of-bounds',
link,
originSlotCount: outputs.length
})
}
const inputs = target.inputs ?? []
const targetSlotOutOfBounds =
link.targetSlot < 0 || link.targetSlot >= inputs.length
if (targetSlotOutOfBounds) {
errors.push({
kind: 'target-slot-out-of-bounds',
link,
targetSlotCount: inputs.length
})
}
if (originSlotOutOfBounds || targetSlotOutOfBounds) {
continue
}
const originLinks = outputs[link.originSlot]?.links ?? []
if (!originLinks.includes(link.linkId)) {
errors.push({ kind: 'origin-link-not-listed', link })
}
const targetLink = inputs[link.targetSlot]?.link ?? null
if (targetLink !== link.linkId) {
errors.push({
kind: 'target-link-mismatch',
link,
actualLink: targetLink
})
}
}
return errors
}

View File

@@ -1,60 +0,0 @@
/**
* Minimal structural types for serialised workflow JSON.
*
* The validation/repair code in this package operates on plain JSON
* (parsed `.json` workflow files) — it does NOT need the runtime
* `LGraph`/`LGraphNode` classes from litegraph. Defining the shapes
* locally keeps this package free of frontend/litegraph coupling so
* it can be consumed by Node.js CI scripts and a future backend
* validator.
*
* These types intentionally mirror the relevant fields used by
* `validateLinkTopology` and `repairLinks`. They are a subset of the
* `ISerialisedGraph` / `ISerialisedNode` shapes from
* `@/lib/litegraph/src/types/serialisation` and stay structurally
* compatible with them.
*/
/** Schema version 0.4 link tuple: `[id, originId, originSlot, targetId, targetSlot, type]`. */
export type SerialisedLinkArray = [
number,
string | number,
number,
string | number,
number,
string | string[] | number
]
/** Object form of a link (schema version 1, or after live-graph hydration). */
export interface SerialisedLinkObject {
id: number
origin_id: string | number
origin_slot: number
target_id: string | number
target_slot: number
type?: string | string[] | number
}
export interface SerialisedNodeInput {
name?: string
type?: string | string[] | number
link?: number | null
}
export interface SerialisedNodeOutput {
name?: string
type?: string | string[] | number
links?: number[] | null
}
export interface SerialisedNode {
id: string | number
type?: string
inputs?: SerialisedNodeInput[]
outputs?: SerialisedNodeOutput[]
}
export interface SerialisedGraph {
nodes: SerialisedNode[]
links: Array<SerialisedLinkArray | SerialisedLinkObject | null>
}

View File

@@ -1,9 +0,0 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"rootDir": "src",
"outDir": "dist"
},
"include": ["src/**/*"],
"references": [{ "path": "./tsconfig.node.json" }]
}

View File

@@ -1,10 +0,0 @@
{
"extends": "../../tsconfig.json",
"compilerOptions": {
"composite": true,
"outDir": "dist/.tsnode",
"module": "ESNext",
"moduleResolution": "bundler"
},
"include": ["vite.config.mts"]
}

View File

@@ -1,26 +0,0 @@
import { resolve } from 'path'
import { defineConfig } from 'vite'
import dts from 'vite-plugin-dts'
export default defineConfig({
build: {
lib: {
entry: resolve(__dirname, 'src/index.ts'),
name: 'workflow-validation',
formats: ['es'],
fileName: 'index'
},
copyPublicDir: false,
minify: false,
rollupOptions: {
external: ['zod', 'zod-validation-error']
}
},
plugins: [
dts({
tsconfigPath: 'tsconfig.json',
include: ['src/**/*'],
exclude: ['src/**/*.test.ts']
})
]
})

104
pnpm-lock.yaml generated
View File

@@ -443,9 +443,6 @@ importers:
'@comfyorg/tailwind-utils':
specifier: workspace:*
version: link:packages/tailwind-utils
'@comfyorg/workflow-validation':
specifier: workspace:*
version: link:packages/workflow-validation
'@formkit/auto-animate':
specifier: 'catalog:'
version: 0.9.0
@@ -1065,25 +1062,6 @@ importers:
specifier: 'catalog:'
version: 5.9.3
packages/workflow-validation:
dependencies:
zod:
specifier: 'catalog:'
version: 3.25.76
zod-validation-error:
specifier: 'catalog:'
version: 3.3.0(zod@3.25.76)
devDependencies:
typescript:
specifier: 'catalog:'
version: 5.9.3
vite:
specifier: ^8.0.0
version: 8.0.0(@types/node@25.0.3)(esbuild@0.27.3)(jiti@2.6.1)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)
vite-plugin-dts:
specifier: 'catalog:'
version: 4.5.4(@types/node@25.0.3)(rollup@4.53.5)(typescript@5.9.3)(vite@8.0.0(@types/node@25.0.3)(esbuild@0.27.3)(jiti@2.6.1)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2))
packages:
'@acemir/cssom@0.9.30':
@@ -12267,14 +12245,6 @@ snapshots:
transitivePeerDependencies:
- '@types/node'
'@microsoft/api-extractor-model@7.33.1(@types/node@25.0.3)':
dependencies:
'@microsoft/tsdoc': 0.16.0
'@microsoft/tsdoc-config': 0.18.0
'@rushstack/node-core-library': 5.20.1(@types/node@25.0.3)
transitivePeerDependencies:
- '@types/node'
'@microsoft/api-extractor@7.57.2(@types/node@24.10.4)':
dependencies:
'@microsoft/api-extractor-model': 7.33.1(@types/node@24.10.4)
@@ -12294,25 +12264,6 @@ snapshots:
transitivePeerDependencies:
- '@types/node'
'@microsoft/api-extractor@7.57.2(@types/node@25.0.3)':
dependencies:
'@microsoft/api-extractor-model': 7.33.1(@types/node@25.0.3)
'@microsoft/tsdoc': 0.16.0
'@microsoft/tsdoc-config': 0.18.0
'@rushstack/node-core-library': 5.20.1(@types/node@25.0.3)
'@rushstack/rig-package': 0.7.1
'@rushstack/terminal': 0.22.1(@types/node@25.0.3)
'@rushstack/ts-command-line': 5.3.1(@types/node@25.0.3)
diff: 8.0.3
lodash: 4.17.23
minimatch: 10.2.1
resolve: 1.22.11
semver: 7.5.4
source-map: 0.6.1
typescript: 5.8.2
transitivePeerDependencies:
- '@types/node'
'@microsoft/tsdoc-config@0.18.0':
dependencies:
'@microsoft/tsdoc': 0.16.0
@@ -13185,27 +13136,10 @@ snapshots:
optionalDependencies:
'@types/node': 24.10.4
'@rushstack/node-core-library@5.20.1(@types/node@25.0.3)':
dependencies:
ajv: 8.13.0
ajv-draft-04: 1.0.0(ajv@8.13.0)
ajv-formats: 3.0.1(ajv@8.13.0)
fs-extra: 11.3.2
import-lazy: 4.0.0
jju: 1.4.0
resolve: 1.22.11
semver: 7.5.4
optionalDependencies:
'@types/node': 25.0.3
'@rushstack/problem-matcher@0.2.1(@types/node@24.10.4)':
optionalDependencies:
'@types/node': 24.10.4
'@rushstack/problem-matcher@0.2.1(@types/node@25.0.3)':
optionalDependencies:
'@types/node': 25.0.3
'@rushstack/rig-package@0.7.1':
dependencies:
resolve: 1.22.11
@@ -13219,14 +13153,6 @@ snapshots:
optionalDependencies:
'@types/node': 24.10.4
'@rushstack/terminal@0.22.1(@types/node@25.0.3)':
dependencies:
'@rushstack/node-core-library': 5.20.1(@types/node@25.0.3)
'@rushstack/problem-matcher': 0.2.1(@types/node@25.0.3)
supports-color: 8.1.1
optionalDependencies:
'@types/node': 25.0.3
'@rushstack/ts-command-line@5.3.1(@types/node@24.10.4)':
dependencies:
'@rushstack/terminal': 0.22.1(@types/node@24.10.4)
@@ -13236,15 +13162,6 @@ snapshots:
transitivePeerDependencies:
- '@types/node'
'@rushstack/ts-command-line@5.3.1(@types/node@25.0.3)':
dependencies:
'@rushstack/terminal': 0.22.1(@types/node@25.0.3)
'@types/argparse': 1.0.38
argparse: 1.0.10
string-argv: 0.3.2
transitivePeerDependencies:
- '@types/node'
'@sec-ant/readable-stream@0.4.1': {}
'@sentry-internal/browser-utils@10.32.1':
@@ -14272,7 +14189,7 @@ snapshots:
sirv: 3.0.2
tinyglobby: 0.2.15
tinyrainbow: 3.0.3
vitest: 4.0.16(@opentelemetry/api@1.9.0)(@types/node@25.0.3)(@vitest/ui@4.0.16)(esbuild@0.27.3)(happy-dom@20.0.11)(jiti@2.6.1)(jsdom@27.4.0)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)
vitest: 4.0.16(@opentelemetry/api@1.9.0)(@types/node@24.10.4)(@vitest/ui@4.0.16)(esbuild@0.27.3)(happy-dom@20.0.11)(jiti@2.6.1)(jsdom@27.4.0)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)
'@vitest/utils@3.2.4':
dependencies:
@@ -20227,25 +20144,6 @@ snapshots:
- rollup
- supports-color
vite-plugin-dts@4.5.4(@types/node@25.0.3)(rollup@4.53.5)(typescript@5.9.3)(vite@8.0.0(@types/node@25.0.3)(esbuild@0.27.3)(jiti@2.6.1)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)):
dependencies:
'@microsoft/api-extractor': 7.57.2(@types/node@25.0.3)
'@rollup/pluginutils': 5.3.0(rollup@4.53.5)
'@volar/typescript': 2.4.28
'@vue/language-core': 2.2.0(typescript@5.9.3)
compare-versions: 6.1.1
debug: 4.4.3
kolorist: 1.8.0
local-pkg: 1.1.2
magic-string: 0.30.21
typescript: 5.9.3
optionalDependencies:
vite: 8.0.0(@types/node@25.0.3)(esbuild@0.27.3)(jiti@2.6.1)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)
transitivePeerDependencies:
- '@types/node'
- rollup
- supports-color
vite-plugin-html@3.2.2(vite@8.0.0(@types/node@24.10.4)(esbuild@0.27.3)(jiti@2.6.1)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)):
dependencies:
'@rollup/pluginutils': 4.2.1

View File

@@ -2,7 +2,10 @@ import fs from 'fs'
import path from 'path'
import { zodToJsonSchema } from 'zod-to-json-schema'
import { zComfyWorkflow, zComfyWorkflow1 } from '@comfyorg/workflow-validation'
import {
zComfyWorkflow,
zComfyWorkflow1
} from '../src/platform/workflow/validation/schemas/workflowSchema'
import { zComfyNodeDef as zComfyNodeDefV2 } from '../src/schemas/nodeDef/nodeDefSchemaV2'
import { zComfyNodeDef as zComfyNodeDefV1 } from '../src/schemas/nodeDefSchema'
@@ -54,4 +57,4 @@ fs.writeFileSync(
JSON.stringify(nodeDefV2Schema, null, 2)
)
console.warn('JSON Schemas generated successfully!')
console.log('JSON Schemas generated successfully!')

View File

@@ -1,96 +0,0 @@
import fs from 'fs'
import path from 'path'
import { fileURLToPath } from 'url'
const __dirname = path.dirname(fileURLToPath(import.meta.url))
const repoRoot = path.resolve(__dirname, '..')
const packageDir = path.join(repoRoot, 'packages', 'workflow-validation')
const distDir = path.join(packageDir, 'dist')
interface SourcePackage {
name: string
version: string
description?: string
license?: string
repository?: string
homepage?: string
dependencies?: Record<string, string>
publishConfig?: Record<string, unknown>
}
const sourcePackage = JSON.parse(
fs.readFileSync(path.join(packageDir, 'package.json'), 'utf8')
) as SourcePackage
const workspaceYaml =
fs
.readFileSync(path.join(repoRoot, 'pnpm-workspace.yaml'), 'utf8')
.replace(/\r\n/g, '\n') + '\n___end:'
const workspaceCatalog =
workspaceYaml.match(/^catalog:\n([\s\S]*?)\n\S/m)?.[1] ?? ''
function resolveCatalog(name: string): string {
const sourceVersion = sourcePackage.dependencies?.[name]
if (sourceVersion && sourceVersion !== 'catalog:') return sourceVersion
const re = new RegExp(`^\\s+'?${name}'?:\\s*([^\\n]+)$`, 'm')
const match = workspaceCatalog.match(re)
if (!match) {
throw new Error(
`Could not resolve catalog version for ${name}. ` +
`Expected entry under \`catalog:\` in pnpm-workspace.yaml.`
)
}
return match[1]!.trim()
}
const distPackage = {
name: sourcePackage.name,
version: sourcePackage.version,
description: sourcePackage.description,
license: sourcePackage.license,
repository: sourcePackage.repository,
homepage: sourcePackage.homepage,
type: 'module',
main: './index.js',
module: './index.js',
types: './index.d.ts',
exports: {
'.': {
types: './index.d.ts',
import: './index.js'
},
'./linkRepair': {
types: './linkRepair.d.ts',
import: './index.js'
},
'./linkTopology': {
types: './linkTopology.d.ts',
import: './index.js'
},
'./workflowSchema': {
types: './workflowSchema.d.ts',
import: './index.js'
},
'./serialised': {
types: './serialised.d.ts',
import: './index.js'
}
},
files: ['*.js', '*.d.ts'],
publishConfig: sourcePackage.publishConfig ?? { access: 'public' },
dependencies: {
zod: resolveCatalog('zod'),
'zod-validation-error': resolveCatalog('zod-validation-error')
}
}
if (!fs.existsSync(distDir)) {
fs.mkdirSync(distDir, { recursive: true })
}
fs.writeFileSync(
path.join(distDir, 'package.json'),
JSON.stringify(distPackage, null, 2) + '\n'
)
console.warn(`Prepared ${distPackage.name}@${distPackage.version} in dist/`)

View File

@@ -2261,22 +2261,7 @@
"special": "Must contain at least one special character",
"match": "Passwords must match"
},
"personalDataConsentRequired": "You must agree to the processing of your personal data.",
"topology": {
"invalidLinks": "Workflow has {count} invalid link | Workflow has {count} invalid links",
"overflow": "…and {count} more (see console for full list)",
"abortedSummary": "Workflow has unrepairable invalid links",
"validationSummary": "Workflow Validation",
"linksFixedSummary": "Workflow Links Fixed",
"linksFixedDetail": "Fixed {patched} node connections and removed {deleted} invalid links.",
"tuple": "[link={linkId} src={originId}:{originSlot} tgt={targetId}:{targetSlot}]",
"missingOriginNode": "{tuple} origin node {originId} does not exist in graph",
"missingTargetNode": "{tuple} target node {targetId} does not exist in graph",
"originSlotOutOfBounds": "{tuple} origin slot {originSlot} is out of bounds; node {originId} has {count} output slot | {tuple} origin slot {originSlot} is out of bounds; node {originId} has {count} output slots",
"targetSlotOutOfBounds": "{tuple} target slot {targetSlot} is out of bounds; node {targetId} has {count} input slot | {tuple} target slot {targetSlot} is out of bounds; node {targetId} has {count} input slots",
"originLinkNotListed": "{tuple} link is not listed in node {originId}.outputs[{originSlot}].links",
"targetLinkMismatch": "{tuple} node {targetId}.inputs[{targetSlot}].link is {actualLink}, expected {linkId}"
}
"personalDataConsentRequired": "You must agree to the processing of your personal data."
},
"credits": {
"activity": "Activity",

View File

@@ -1,257 +0,0 @@
import { LinkRepairAbortedError } from '@comfyorg/workflow-validation'
import type {
ComfyWorkflowJSON,
RepairResult,
TopologyError
} from '@comfyorg/workflow-validation'
import type * as WorkflowValidationModule from '@comfyorg/workflow-validation'
import { createPinia, setActivePinia } from 'pinia'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import { useWorkflowValidation } from './useWorkflowValidation'
const toastAddMock = vi.hoisted(() => vi.fn())
const toastAddAlertMock = vi.hoisted(() => vi.fn())
vi.mock('@/platform/updates/common/toastStore', () => ({
useToastStore: () => ({
add: toastAddMock,
addAlert: toastAddAlertMock
})
}))
vi.mock('vue-i18n', () => ({
useI18n: () => ({
t: (key: string, ...rest: unknown[]) => {
const last = rest[rest.length - 1]
const params =
last && typeof last === 'object' && 'named' in (last as object)
? (last as { named: Record<string, unknown> }).named
: (last as Record<string, unknown> | undefined)
if (!params) return key
return `${key}|${JSON.stringify(params)}`
}
})
}))
const validateLinkTopologyMock = vi.hoisted(() => vi.fn())
const repairLinksMock = vi.hoisted(() => vi.fn())
const describeTopologyErrorMock = vi.hoisted(() =>
vi.fn((e: TopologyError) => `desc:${e.kind}:${e.link.linkId}`)
)
vi.mock('@comfyorg/workflow-validation', async () => {
const actual = await vi.importActual<typeof WorkflowValidationModule>(
'@comfyorg/workflow-validation'
)
return {
...actual,
validateLinkTopology: validateLinkTopologyMock,
repairLinks: repairLinksMock,
describeTopologyError: describeTopologyErrorMock
}
})
const validateComfyWorkflowMock = vi.hoisted(() => vi.fn())
vi.mock('@/platform/workflow/validation/schemas/workflowSchema', () => ({
validateComfyWorkflow: validateComfyWorkflowMock
}))
vi.mock('@/scripts/utils', () => ({
clone: <T>(v: T): T => structuredClone(v)
}))
function makeLink(linkId: number) {
return {
linkId,
originId: 1,
originSlot: 0,
targetId: 2,
targetSlot: 0
}
}
function makeWorkflow(): ComfyWorkflowJSON {
return {
version: 0.4,
last_node_id: 2,
last_link_id: 1,
nodes: [
{ id: 1, outputs: [{ name: 'o', type: '*', links: [1] }] },
{ id: 2, inputs: [{ name: 'i', type: '*', link: 1 }] }
] as unknown as ComfyWorkflowJSON['nodes'],
links: [[1, 1, 0, 2, 0, '*']] as unknown as ComfyWorkflowJSON['links']
} as ComfyWorkflowJSON
}
function repairResult(
graph: ComfyWorkflowJSON,
overrides: Partial<RepairResult> = {}
): RepairResult {
return {
graph: graph as unknown as RepairResult['graph'],
hasBadLinks: false,
fixed: false,
patched: 0,
deleted: 0,
...overrides
}
}
describe('useWorkflowValidation', () => {
beforeEach(() => {
setActivePinia(createPinia())
toastAddMock.mockClear()
toastAddAlertMock.mockClear()
validateLinkTopologyMock.mockReset()
repairLinksMock.mockReset()
describeTopologyErrorMock.mockClear()
validateComfyWorkflowMock.mockReset()
})
afterEach(() => vi.restoreAllMocks())
it('returns null when schema validation fails', async () => {
validateComfyWorkflowMock.mockImplementation(async (_d, onError) => {
onError('bad schema')
return null
})
const { validateWorkflow } = useWorkflowValidation()
const out = await validateWorkflow(makeWorkflow())
expect(out.graphData).toBeNull()
expect(toastAddAlertMock).toHaveBeenCalledWith('bad schema')
expect(repairLinksMock).not.toHaveBeenCalled()
})
it('passes through when schema validation succeeds and no topology errors exist', async () => {
const wf = makeWorkflow()
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue([])
repairLinksMock.mockImplementation((g) => repairResult(g))
const { validateWorkflow } = useWorkflowValidation()
const out = await validateWorkflow(wf)
expect(out.graphData).not.toBeNull()
expect(toastAddMock).not.toHaveBeenCalled()
})
it('emits a single warn toast summarising up to TOPOLOGY_TOAST_LIMIT errors', async () => {
const wf = makeWorkflow()
const errors: TopologyError[] = Array.from({ length: 7 }, (_v, i) => ({
kind: 'missing-origin-node',
link: makeLink(i + 1)
}))
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue(errors)
repairLinksMock.mockImplementation((g) => repairResult(g))
const { validateWorkflow } = useWorkflowValidation()
await validateWorkflow(wf)
const warns = toastAddMock.mock.calls.filter(([arg]) =>
(arg as { summary: string }).summary.startsWith(
'validation.topology.invalidLinks'
)
)
expect(warns).toHaveLength(1)
const detail = (warns[0]![0] as { detail: string }).detail
expect(detail).toContain('validation.topology.overflow')
expect(detail.split('\n')).toHaveLength(6)
})
it('shows the success toast when repair fixes links', async () => {
const wf = makeWorkflow()
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue([])
repairLinksMock.mockImplementation((g) =>
repairResult(g, { fixed: true, patched: 2, deleted: 1 })
)
const { validateWorkflow } = useWorkflowValidation()
await validateWorkflow(wf)
expect(toastAddMock).toHaveBeenCalledWith(
expect.objectContaining({
severity: 'success',
summary: expect.stringContaining(
'validation.topology.linksFixedSummary'
)
})
)
})
it('returns null and emits an error toast on LinkRepairAbortedError', async () => {
const wf = makeWorkflow()
const topologyError: TopologyError = {
kind: 'target-slot-out-of-bounds',
link: makeLink(7),
targetSlotCount: 5
}
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue([topologyError])
repairLinksMock.mockImplementation(() => {
throw new LinkRepairAbortedError(topologyError)
})
const { validateWorkflow } = useWorkflowValidation()
const out = await validateWorkflow(wf)
expect(out.graphData).toBeNull()
const errorToast = toastAddMock.mock.calls.find(
([arg]) => (arg as { severity: string }).severity === 'error'
)
expect(errorToast).toBeDefined()
expect((errorToast![0] as { summary: string }).summary).toContain(
'validation.topology.abortedSummary'
)
})
it('re-throws unexpected errors from repairLinks', async () => {
const wf = makeWorkflow()
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue([])
repairLinksMock.mockImplementation(() => {
throw new TypeError('boom')
})
const { validateWorkflow } = useWorkflowValidation()
await expect(validateWorkflow(wf)).rejects.toThrow(TypeError)
})
it('clones graphData before passing to repairLinks so the abort fallback is untouched', async () => {
const wf = makeWorkflow()
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue([])
let received: ComfyWorkflowJSON | undefined
repairLinksMock.mockImplementation((g: ComfyWorkflowJSON) => {
received = g
return repairResult(g)
})
const { validateWorkflow } = useWorkflowValidation()
await validateWorkflow(wf)
expect(received).not.toBe(wf)
})
it('silent option suppresses toasts but still validates', async () => {
const wf = makeWorkflow()
validateComfyWorkflowMock.mockResolvedValue(wf)
validateLinkTopologyMock.mockReturnValue([
{ kind: 'missing-origin-node', link: makeLink(1) }
])
repairLinksMock.mockImplementation((g) =>
repairResult(g, { fixed: true, patched: 1, deleted: 0 })
)
const { validateWorkflow } = useWorkflowValidation()
const out = await validateWorkflow(wf, { silent: true })
expect(out.graphData).not.toBeNull()
expect(toastAddMock).not.toHaveBeenCalled()
expect(toastAddAlertMock).not.toHaveBeenCalled()
})
})

View File

@@ -1,156 +1,60 @@
import {
LinkRepairAbortedError,
describeTopologyError,
repairLinks,
validateLinkTopology
} from '@comfyorg/workflow-validation'
import type {
SerialisedGraph,
TopologyError
} from '@comfyorg/workflow-validation'
import { useI18n } from 'vue-i18n'
import type { ISerialisedGraph } from '@/lib/litegraph/src/types/serialisation'
import { useToastStore } from '@/platform/updates/common/toastStore'
import type { ComfyWorkflowJSON } from '@/platform/workflow/validation/schemas/workflowSchema'
import { validateComfyWorkflow } from '@/platform/workflow/validation/schemas/workflowSchema'
import { clone } from '@/scripts/utils'
import { fixBadLinks } from '@/utils/linkFixer'
interface ValidationResult {
graphData: ComfyWorkflowJSON | null
}
const TOPOLOGY_TOAST_LIMIT = 5
export function useWorkflowValidation() {
const toastStore = useToastStore()
const { t } = useI18n()
function linkParams(error: TopologyError): Record<string, unknown> {
return {
linkId: error.link.linkId,
originId: error.link.originId,
originSlot: error.link.originSlot,
targetId: error.link.targetId,
targetSlot: error.link.targetSlot
}
}
function localizeTopologyError(error: TopologyError): string {
const base = linkParams(error)
const tuple = t('validation.topology.tuple', base)
const params = { ...base, tuple }
switch (error.kind) {
case 'missing-origin-node':
return t('validation.topology.missingOriginNode', params)
case 'missing-target-node':
return t('validation.topology.missingTargetNode', params)
case 'origin-slot-out-of-bounds':
return t(
'validation.topology.originSlotOutOfBounds',
error.originSlotCount,
{ named: { ...params, count: error.originSlotCount } }
)
case 'target-slot-out-of-bounds':
return t(
'validation.topology.targetSlotOutOfBounds',
error.targetSlotCount,
{ named: { ...params, count: error.targetSlotCount } }
)
case 'origin-link-not-listed':
return t('validation.topology.originLinkNotListed', params)
case 'target-link-mismatch':
return t('validation.topology.targetLinkMismatch', {
...params,
actualLink: String(error.actualLink)
})
}
}
function summariseTopologyErrors(errors: TopologyError[]): string {
const lines = errors
.slice(0, TOPOLOGY_TOAST_LIMIT)
.map(localizeTopologyError)
if (errors.length > TOPOLOGY_TOAST_LIMIT) {
lines.push(
t('validation.topology.overflow', {
count: errors.length - TOPOLOGY_TOAST_LIMIT
})
)
}
return lines.join('\n')
}
function reportTopology(errors: TopologyError[], silent: boolean) {
if (silent || errors.length === 0) return
for (const e of errors) console.warn('[topology]', describeTopologyError(e))
toastStore.add({
severity: 'warn',
summary: t('validation.topology.invalidLinks', errors.length, {
named: { count: errors.length }
}),
detail: summariseTopologyErrors(errors),
life: 10_000
})
}
function tryFixLinks(
graphData: ComfyWorkflowJSON,
options: { silent?: boolean } = {}
): { graph: ComfyWorkflowJSON; aborted: boolean } {
) {
const { silent = false } = options
const topologyErrors = validateLinkTopology(graphData as SerialisedGraph)
reportTopology(topologyErrors, silent)
const repairTarget = clone(graphData)
// Collect all logs in an array
const logs: string[] = []
try {
const linkValidation = repairLinks(repairTarget as SerialisedGraph, {
fix: true,
silent,
logger: {
log: (...args: unknown[]) => logs.push(args.join(' '))
// Then validate and fix links if schema validation passed
const linkValidation = fixBadLinks(graphData as ISerialisedGraph, {
fix: true,
silent,
logger: {
log: (...args: unknown[]) => {
logs.push(args.join(' '))
}
})
if (!silent && logs.length > 0) {
toastStore.add({
severity: 'warn',
summary: t('validation.topology.validationSummary'),
detail: logs.join('\n')
})
}
if (linkValidation.fixed && !silent) {
})
if (!silent && logs.length > 0) {
toastStore.add({
severity: 'warn',
summary: 'Workflow Validation',
detail: logs.join('\n')
})
}
// If links were fixed, notify the user
if (linkValidation.fixed) {
if (!silent) {
toastStore.add({
severity: 'success',
summary: t('validation.topology.linksFixedSummary'),
detail: t('validation.topology.linksFixedDetail', {
patched: linkValidation.patched,
deleted: linkValidation.deleted
})
summary: 'Workflow Links Fixed',
detail: `Fixed ${linkValidation.patched} node connections and removed ${linkValidation.deleted} invalid links.`
})
}
return {
graph: linkValidation.graph as ComfyWorkflowJSON,
aborted: false
}
} catch (err: unknown) {
if (err instanceof LinkRepairAbortedError) {
if (!silent) {
toastStore.add({
severity: 'error',
summary: t('validation.topology.abortedSummary'),
detail: localizeTopologyError(err.topologyError),
life: 15_000
})
}
console.error('[linkFixer aborted]', err.topologyError, err)
return { graph: graphData, aborted: true }
}
console.error(err)
throw err
}
return linkValidation.graph
}
/**
* Validates a workflow, including link validation and schema validation
*/
async function validateWorkflow(
graphData: ComfyWorkflowJSON,
options: {
@@ -159,16 +63,32 @@ export function useWorkflowValidation() {
): Promise<ValidationResult> {
const { silent = false } = options
const validatedGraphData = await validateComfyWorkflow(graphData, (err) => {
if (!silent) toastStore.addAlert(err)
})
let validatedData: ComfyWorkflowJSON | null = null
if (!validatedGraphData) {
return { graphData: null }
// First do schema validation
const validatedGraphData = await validateComfyWorkflow(
graphData,
/* onError=*/ (err) => {
if (!silent) {
toastStore.addAlert(err)
}
}
)
if (validatedGraphData) {
try {
validatedData = tryFixLinks(validatedGraphData, {
silent
}) as ComfyWorkflowJSON
} catch (err) {
// Link fixer itself is throwing an error
console.error(err)
}
}
const { graph, aborted } = tryFixLinks(validatedGraphData, { silent })
return { graphData: aborted ? null : graph }
return {
graphData: validatedData
}
}
return {

View File

@@ -1,8 +1,7 @@
import { z } from 'zod'
import type { SafeParseReturnType } from 'zod'
import { fromZodError } from 'zod-validation-error'
type RendererType = 'LG' | 'Vue' | 'Vue-corrected'
import type { RendererType } from '@/lib/litegraph/src/LGraph'
const zRendererType = z.enum([
'LG',
@@ -314,16 +313,7 @@ const zExtra = z
.passthrough()
const zGraphDefinitions = z.object({
subgraphs: z.lazy(
(): z.ZodArray<
z.ZodType<
SubgraphDefinitionBase<ComfyWorkflow1BaseOutput>,
z.ZodTypeDef,
SubgraphDefinitionBase<ComfyWorkflow1BaseInput>
>,
'many'
> => z.array(zSubgraphDefinition)
)
subgraphs: z.lazy(() => z.array(zSubgraphDefinition))
})
const zBaseExportableGraph = z.object({

View File

@@ -1,37 +1,11 @@
import { render, screen } from '@testing-library/vue'
import { createI18n } from 'vue-i18n'
import { describe, expect, it } from 'vitest'
import RoleBadge from './RoleBadge.vue'
const i18n = createI18n({
legacy: false,
locale: 'en',
messages: {
en: {
workspaceSwitcher: {
roleOwner: 'Owner',
roleMember: 'Member'
}
}
}
})
function renderRoleBadge(role: 'owner' | 'member') {
return render(RoleBadge, {
props: { role },
global: { plugins: [i18n] }
})
}
describe('RoleBadge', () => {
it('renders the owner label', () => {
renderRoleBadge('owner')
expect(screen.getByText('Owner')).toBeInTheDocument()
})
it('renders the member label', () => {
renderRoleBadge('member')
expect(screen.getByText('Member')).toBeInTheDocument()
it('renders the label text', () => {
render(RoleBadge, { props: { label: 'PRO' } })
expect(screen.getByText('PRO')).toBeInTheDocument()
})
})

View File

@@ -2,23 +2,12 @@
<span
class="rounded-full bg-base-foreground px-1 py-0.5 text-2xs font-bold text-base-background uppercase"
>
{{ roleBadgeLabel }}
{{ label }}
</span>
</template>
<script setup lang="ts">
import { computed } from 'vue'
import { useI18n } from 'vue-i18n'
const { role } = defineProps<{
role: 'owner' | 'member'
defineProps<{
label: string
}>()
const { t } = useI18n()
const roleBadgeLabel = computed(() =>
role === 'owner'
? t('workspaceSwitcher.roleOwner')
: t('workspaceSwitcher.roleMember')
)
</script>

View File

@@ -46,12 +46,10 @@
: workspace.name
}}
</span>
<span
<RoleBadge
v-if="resolveTierLabel(workspace)"
class="rounded-full bg-base-foreground px-1 py-0.5 text-2xs font-bold text-base-background uppercase"
>
{{ resolveTierLabel(workspace) }}
</span>
:label="resolveTierLabel(workspace)!"
/>
</div>
<span class="text-xs text-muted-foreground">
{{ getRoleLabel(workspace.role) }}
@@ -112,6 +110,7 @@ import { storeToRefs } from 'pinia'
import { computed } from 'vue'
import { useI18n } from 'vue-i18n'
import RoleBadge from '@/platform/workspace/components/RoleBadge.vue'
import WorkspaceProfilePic from '@/platform/workspace/components/WorkspaceProfilePic.vue'
import { useBillingContext } from '@/composables/billing/useBillingContext'
import { useWorkspaceSwitch } from '@/platform/workspace/composables/useWorkspaceSwitch'

View File

@@ -60,12 +60,11 @@
>
{{ workspace.name }}
</span>
<span
<RoleBadge
v-if="tierLabels.get(workspace.id)"
class="shrink-0 rounded-full bg-base-foreground px-1 py-0.5 text-2xs font-bold text-base-background uppercase"
>
{{ tierLabels.get(workspace.id) }}
</span>
class="shrink-0"
:label="tierLabels.get(workspace.id)!"
/>
</div>
</div>
<span class="text-primary-foreground shrink-0 text-sm font-medium">
@@ -141,6 +140,7 @@ import { computed, ref } from 'vue'
import { useI18n } from 'vue-i18n'
import Button from '@/components/ui/button/Button.vue'
import RoleBadge from '@/platform/workspace/components/RoleBadge.vue'
import WorkspaceProfilePic from '@/platform/workspace/components/WorkspaceProfilePic.vue'
import { useWorkspaceSwitch } from '@/platform/workspace/composables/useWorkspaceSwitch'
import { useWorkspaceTierLabel } from '@/platform/workspace/composables/useWorkspaceTierLabel'

View File

@@ -0,0 +1,69 @@
import type { ComponentProps } from 'vue-component-type-helpers'
import { render, screen } from '@testing-library/vue'
import { describe, expect, it } from 'vitest'
import { createI18n } from 'vue-i18n'
import type { WorkspaceMember } from '@/platform/workspace/stores/teamWorkspaceStore'
import MemberListItem from './MemberListItem.vue'
const i18n = createI18n({
legacy: false,
locale: 'en',
messages: {
en: {
g: { you: 'you', moreOptions: 'More options' },
workspaceSwitcher: { roleOwner: 'Owner', roleMember: 'Member' }
}
},
missingWarn: false,
fallbackWarn: false
})
const baseMember: WorkspaceMember = {
id: 'u1',
name: 'Alice',
email: 'alice@example.com',
joinDate: new Date('2025-01-01'),
role: 'owner'
}
const baseProps: ComponentProps<typeof MemberListItem> = {
member: baseMember,
isCurrentUser: false,
gridCols: 'grid-cols-3',
showRoleBadge: true
}
function renderItem(
propOverrides?: Partial<ComponentProps<typeof MemberListItem>>
) {
return render(MemberListItem, {
props: { ...baseProps, ...propOverrides },
global: {
plugins: [i18n],
stubs: {
UserAvatar: { template: '<div />' },
Button: { template: '<button />', props: ['variant', 'size'] }
}
}
})
}
describe('MemberListItem', () => {
it('shows translated owner badge for owner role', () => {
renderItem({ member: { ...baseMember, role: 'owner' } })
expect(screen.getByText('Owner')).toBeInTheDocument()
})
it('shows translated member badge for member role', () => {
renderItem({ member: { ...baseMember, role: 'member' } })
expect(screen.getByText('Member')).toBeInTheDocument()
})
it('hides role badge when showRoleBadge is false', () => {
renderItem({ showRoleBadge: false })
expect(screen.queryByText('Owner')).not.toBeInTheDocument()
})
})

View File

@@ -22,7 +22,10 @@
({{ $t('g.you') }})
</span>
</span>
<RoleBadge v-if="showRoleBadge" :role="member.role" />
<RoleBadge
v-if="showRoleBadge"
:label="getRoleBadgeLabel(member.role)"
/>
</div>
<span class="text-sm text-muted-foreground">
{{ member.email }}
@@ -84,7 +87,13 @@ defineEmits<{
showMenu: [event: Event]
}>()
const { d } = useI18n()
const { d, t } = useI18n()
function getRoleBadgeLabel(role: string): string {
return role === 'owner'
? t('workspaceSwitcher.roleOwner')
: t('workspaceSwitcher.roleMember')
}
function formatDate(date: Date): string {
return d(date, { dateStyle: 'medium' })

View File

@@ -1,4 +1,3 @@
import { fixBadLinks } from '@comfyorg/workflow-validation'
import { describe, expect, it, vi } from 'vitest'
import type { SerialisedLLinkArray } from '@/lib/litegraph/src/LLink'
@@ -7,6 +6,8 @@ import type {
ISerialisedNode
} from '@/lib/litegraph/src/types/serialisation'
import { fixBadLinks } from './linkFixer'
type SerialisedInput = NonNullable<ISerialisedNode['inputs']>[number]
type SerialisedOutput = NonNullable<ISerialisedNode['outputs']>[number]

View File

@@ -24,17 +24,16 @@
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
import type { INodeOutputSlot } from '@/lib/litegraph/src/interfaces'
import type { NodeId } from '@/lib/litegraph/src/LGraphNode'
import type { SerialisedLLinkArray } from '@/lib/litegraph/src/LLink'
import type { LGraph, LGraphNode, LLink } from '@/lib/litegraph/src/litegraph'
import type {
SerialisedGraph,
SerialisedLinkArray,
SerialisedLinkObject,
SerialisedNode,
SerialisedNodeOutput
} from './serialised'
import { describeTopologyError, toLinkContext } from './linkTopology'
import type { LinkContext, TopologyError } from './linkTopology'
ISerialisedGraph,
ISerialisedNode
} from '@/lib/litegraph/src/types/serialisation'
export interface RepairResult<T = SerialisedGraph> {
interface BadLinksData<T = ISerialisedGraph | LGraph> {
hasBadLinks: boolean
fixed: boolean
graph: T
@@ -42,49 +41,22 @@ export interface RepairResult<T = SerialisedGraph> {
deleted: number
}
/**
* Thrown when the repair pass detects a divergence between its in-memory
* patched view and the live graph data typically because the workflow's
* topology cannot be reconciled (e.g. links pointing to slots that do not
* exist on the target node). The attached `TopologyError` carries the
* `[linkId, src, srcSlot, tgt, tgtSlot]` tuple so callers can report the
* precise offending link instead of a generic invariant failure.
*/
export class LinkRepairAbortedError extends Error {
public readonly topologyError: TopologyError
constructor(topologyError: TopologyError) {
super(describeTopologyError(topologyError))
this.topologyError = topologyError
this.name = 'LinkRepairAbortedError'
}
}
enum IoDirection {
INPUT,
OUTPUT
}
interface LiveGraph extends SerialisedGraph {
getNodeById(id: string | number): SerialisedNode | undefined
function getNodeById(graph: ISerialisedGraph | LGraph, id: NodeId) {
if ((graph as LGraph).getNodeById) {
return (graph as LGraph).getNodeById(id)
}
graph = graph as ISerialisedGraph
return graph.nodes.find((node: ISerialisedNode) => node.id == id)!
}
function isLiveGraph(graph: SerialisedGraph | LiveGraph): graph is LiveGraph {
return typeof (graph as LiveGraph).getNodeById === 'function'
}
function getNodeById(
graph: SerialisedGraph | LiveGraph,
id: string | number
): SerialisedNode | undefined {
if (isLiveGraph(graph)) return graph.getNodeById(id)
return graph.nodes.find((n) => n.id == id)
}
function extendLink(link: SerialisedLinkArray): SerialisedLinkObject & {
link: SerialisedLinkArray
} {
function extendLink(link: SerialisedLLinkArray) {
return {
link,
link: link,
id: link[0],
origin_id: link[1],
origin_slot: link[2],
@@ -94,26 +66,23 @@ function extendLink(link: SerialisedLinkArray): SerialisedLinkObject & {
}
}
interface RepairOptions {
fix?: boolean
silent?: boolean
logger?: { log: (...args: unknown[]) => void }
}
/**
* Best-effort repair of structurally inconsistent link data on a
* serialised or live graph. Pass `{ fix: false }` (default) for a dry
* run that only reports whether bad links exist.
* Takes a ISerialisedGraph or live LGraph and inspects the links and nodes to ensure the linking
* makes logical sense. Can apply fixes when passed the `fix` argument as true.
*
* Throws `LinkRepairAbortedError` when the graph diverges from the
* patched view in a way the algorithm cannot reconcile (e.g. links
* pointing into out-of-bounds slots). The error carries a structured
* `TopologyError` describing the offending link.
* Note that fixes are a best-effort attempt. Seems to get it correct in most cases, but there is a
* chance it correct an anomaly that results in placing an incorrect link (say, if there were two
* links in the data). Users should take care to not overwrite work until manually checking the
* result.
*/
export function repairLinks(
graph: SerialisedGraph,
options: RepairOptions = {}
): RepairResult {
export function fixBadLinks(
graph: ISerialisedGraph | LGraph,
options: {
fix?: boolean
silent?: boolean
logger?: { log: (...args: unknown[]) => void }
} = {}
): BadLinksData {
const { fix = false, silent = false, logger: _logger = console } = options
const logger = {
log: (...args: unknown[]) => {
@@ -136,15 +105,18 @@ export function repairLinks(
} = {}
const data: {
patchedNodes: SerialisedNode[]
patchedNodes: Array<ISerialisedNode | LGraphNode>
deletedLinks: number[]
} = {
patchedNodes: [],
deletedLinks: []
}
/**
* Internal patch node. We keep track of changes in patchedNodeSlots in case we're in a dry run.
*/
function patchNodeSlot(
node: SerialisedNode,
node: ISerialisedNode | LGraphNode,
ioDir: IoDirection,
slot: number,
linkId: number,
@@ -154,9 +126,12 @@ export function repairLinks(
const patchedNode = patchedNodeSlots[node.id]!
if (ioDir == IoDirection.INPUT) {
patchedNode['inputs'] = patchedNode['inputs'] || {}
// We can set to null (delete), so undefined means we haven't set it at all.
if (patchedNode['inputs']![slot] !== undefined) {
logger.log(
` > Already set ${node.id}.inputs[${slot}] to ${patchedNode['inputs']![slot]!} Skipping.`
` > Already set ${node.id}.inputs[${slot}] to ${patchedNode[
'inputs'
]![slot]!} Skipping.`
)
return false
}
@@ -200,7 +175,8 @@ export function repairLinks(
if (fix) {
node.outputs = node.outputs || []
node.outputs[slot] =
node.outputs[slot] || ({} as SerialisedNodeOutput)
node.outputs[slot] ||
({} satisfies Partial<INodeOutputSlot> as INodeOutputSlot)
node.outputs[slot]!.links = node.outputs[slot]!.links || []
node.outputs[slot]!.links!.push(linkId)
}
@@ -223,48 +199,25 @@ export function repairLinks(
return true
}
function buildLinkContext(
node: SerialisedNode,
ioDir: IoDirection,
slot: number,
linkId: number
): LinkContext {
if (ioDir === IoDirection.INPUT) {
return {
linkId,
originId: '?',
originSlot: -1,
targetId: node.id,
targetSlot: slot
}
}
return {
linkId,
originId: node.id,
originSlot: slot,
targetId: '?',
targetSlot: -1
}
}
/**
* Internal to check if a node (or patched data) has a linkId.
*/
function nodeHasLinkId(
node: SerialisedNode,
node: ISerialisedNode | LGraphNode,
ioDir: IoDirection,
slot: number,
linkId: number
) {
// Patched data should be canonical. We can double check if fixing too.
let has = false
if (ioDir === IoDirection.INPUT) {
const nodeHasIt = node.inputs?.[slot]?.link === linkId
if (patchedNodeSlots[node.id]?.['inputs']) {
const patchedHasIt =
patchedNodeSlots[node.id]!['inputs']![slot] === linkId
// If we're fixing, double check that node matches.
if (fix && nodeHasIt !== patchedHasIt) {
throw new LinkRepairAbortedError({
kind: 'target-link-mismatch',
link: buildLinkContext(node, ioDir, slot, linkId),
actualLink: node.inputs?.[slot]?.link ?? null
})
throw Error('Error. Expected node to match patched data.')
}
has = patchedHasIt
} else {
@@ -275,11 +228,9 @@ export function repairLinks(
if (patchedNodeSlots[node.id]?.['outputs']?.[slot]?.['changes'][linkId]) {
const patchedHasIt =
patchedNodeSlots[node.id]!['outputs']![slot]?.links.includes(linkId)
// If we're fixing, double check that node matches.
if (fix && nodeHasIt !== patchedHasIt) {
throw new LinkRepairAbortedError({
kind: 'origin-link-not-listed',
link: buildLinkContext(node, ioDir, slot, linkId)
})
throw Error('Error. Expected node to match patched data.')
}
has = !!patchedHasIt
} else {
@@ -289,23 +240,24 @@ export function repairLinks(
return has
}
/**
* Internal to check if a node (or patched data) has a linkId.
*/
function nodeHasAnyLink(
node: SerialisedNode,
node: ISerialisedNode | LGraphNode,
ioDir: IoDirection,
slot: number
) {
// Patched data should be canonical. We can double check if fixing too.
let hasAny = false
if (ioDir === IoDirection.INPUT) {
const nodeHasAny = node.inputs?.[slot]?.link != null
if (patchedNodeSlots[node.id]?.['inputs']) {
const patchedHasAny =
patchedNodeSlots[node.id]!['inputs']![slot] != null
// If we're fixing, double check that node matches.
if (fix && nodeHasAny !== patchedHasAny) {
throw new LinkRepairAbortedError({
kind: 'target-slot-out-of-bounds',
link: buildLinkContext(node, ioDir, slot, -1),
targetSlotCount: node.inputs?.length ?? 0
})
throw Error('Error. Expected node to match patched data.')
}
hasAny = patchedHasAny
} else {
@@ -316,12 +268,9 @@ export function repairLinks(
if (patchedNodeSlots[node.id]?.['outputs']?.[slot]?.['changes']) {
const patchedHasAny =
patchedNodeSlots[node.id]!['outputs']![slot]?.links.length
// If we're fixing, double check that node matches.
if (fix && nodeHasAny !== patchedHasAny) {
throw new LinkRepairAbortedError({
kind: 'origin-slot-out-of-bounds',
link: buildLinkContext(node, ioDir, slot, -1),
originSlotCount: node.outputs?.length ?? 0
})
throw Error('Error. Expected node to match patched data.')
}
hasAny = !!patchedHasAny
} else {
@@ -331,57 +280,52 @@ export function repairLinks(
return hasAny
}
let links: Array<SerialisedLinkArray | SerialisedLinkObject> = []
let links: Array<SerialisedLLinkArray | LLink> = []
if (!Array.isArray(graph.links)) {
links = Object.values(graph.links).reduce(
(acc: Array<SerialisedLinkArray | SerialisedLinkObject>, v: unknown) => {
const link = v as SerialisedLinkObject
acc[link.id] = link
return acc
},
links
)
links = Object.values(graph.links).reduce((acc, v) => {
acc[v.id] = v
return acc
}, links)
} else {
links = graph.links.filter(
(l): l is SerialisedLinkArray | SerialisedLinkObject => l != null
)
links = graph.links
}
const linksReverse = [...links]
linksReverse.reverse()
for (const l of linksReverse) {
if (!l) continue
const linkObj =
(l as SerialisedLinkObject).origin_slot != null
? (l as SerialisedLinkObject)
: extendLink(l as SerialisedLinkArray)
const link =
(l as LLink).origin_slot != null
? (l as LLink)
: extendLink(l as SerialisedLLinkArray)
const ctx = toLinkContext(l)
const originNode = getNodeById(graph, ctx.originId)
const originNode = getNodeById(graph, link.origin_id)
const originHasLink = () =>
nodeHasLinkId(originNode!, IoDirection.OUTPUT, ctx.originSlot, ctx.linkId)
const patchOrigin = (op: 'ADD' | 'REMOVE', id = ctx.linkId) =>
patchNodeSlot(originNode!, IoDirection.OUTPUT, ctx.originSlot, id, op)
nodeHasLinkId(originNode!, IoDirection.OUTPUT, link.origin_slot, link.id)
const patchOrigin = (op: 'ADD' | 'REMOVE', id = link.id) =>
patchNodeSlot(originNode!, IoDirection.OUTPUT, link.origin_slot, id, op)
const targetNode = getNodeById(graph, ctx.targetId)
const targetNode = getNodeById(graph, link.target_id)
const targetHasLink = () =>
nodeHasLinkId(targetNode!, IoDirection.INPUT, ctx.targetSlot, ctx.linkId)
nodeHasLinkId(targetNode!, IoDirection.INPUT, link.target_slot, link.id)
const targetHasAnyLink = () =>
nodeHasAnyLink(targetNode!, IoDirection.INPUT, ctx.targetSlot)
const patchTarget = (op: 'ADD' | 'REMOVE', id = ctx.linkId) =>
patchNodeSlot(targetNode!, IoDirection.INPUT, ctx.targetSlot, id, op)
nodeHasAnyLink(targetNode!, IoDirection.INPUT, link.target_slot)
const patchTarget = (op: 'ADD' | 'REMOVE', id = link.id) =>
patchNodeSlot(targetNode!, IoDirection.INPUT, link.target_slot, id, op)
const originLog = `origin(${ctx.originId}).outputs[${ctx.originSlot}].links`
const targetLog = `target(${ctx.targetId}).inputs[${ctx.targetSlot}].link`
const originLog = `origin(${link.origin_id}).outputs[${link.origin_slot}].links`
const targetLog = `target(${link.target_id}).inputs[${link.target_slot}].link`
if (!originNode || !targetNode) {
if (!originNode && !targetNode) {
logger.log(
`Link ${ctx.linkId} is invalid, both origin ${ctx.originId} and target ${ctx.targetId} do not exist`
`Link ${link.id} is invalid, ` +
`both origin ${link.origin_id} and target ${link.target_id} do not exist`
)
} else if (!originNode) {
logger.log(
`Link ${ctx.linkId} is funky... origin ${ctx.originId} does not exist, but target ${ctx.targetId} does.`
`Link ${link.id} is funky... ` +
`origin ${link.origin_id} does not exist, but target ${link.target_id} does.`
)
if (targetHasLink()) {
logger.log(
@@ -389,13 +333,14 @@ export function repairLinks(
)
patchTarget('REMOVE', -1)
}
} else {
} else if (!targetNode) {
logger.log(
`Link ${ctx.linkId} is funky... target ${ctx.targetId} does not exist, but origin ${ctx.originId} does.`
`Link ${link.id} is funky... ` +
`target ${link.target_id} does not exist, but origin ${link.origin_id} does.`
)
if (originHasLink()) {
logger.log(
` > [PATCH] Origin's links' has ${ctx.linkId}; will remove the link first.`
` > [PATCH] Origin's links' has ${link.id}; will remove the link first.`
)
patchOrigin('REMOVE')
}
@@ -406,101 +351,105 @@ export function repairLinks(
if (targetHasLink() || originHasLink()) {
if (!originHasLink()) {
logger.log(
`${ctx.linkId} is funky... ${originLog} does NOT contain it, but ${targetLog} does.`
`${link.id} is funky... ${originLog} does NOT contain it, but ${targetLog} does.`
)
logger.log(
` > [PATCH] Attempt a fix by adding this ${ctx.linkId} to ${originLog}.`
` > [PATCH] Attempt a fix by adding this ${link.id} to ${originLog}.`
)
patchOrigin('ADD')
} else if (!targetHasLink()) {
logger.log(
`${ctx.linkId} is funky... ${targetLog} is NOT correct (is ${
targetNode.inputs?.[ctx.targetSlot]?.link
`${link.id} is funky... ${targetLog} is NOT correct (is ${
targetNode.inputs?.[link.target_slot]?.link
}), but ${originLog} contains it`
)
if (!targetHasAnyLink()) {
logger.log(
` > [PATCH] ${targetLog} is not defined, will set to ${ctx.linkId}.`
` > [PATCH] ${targetLog} is not defined, will set to ${link.id}.`
)
let patched = patchTarget('ADD')
if (!patched) {
logger.log(
` > [PATCH] Nvm, ${targetLog} already patched. Removing ${ctx.linkId} from ${originLog}.`
` > [PATCH] Nvm, ${targetLog} already patched. Removing ${link.id} from ${originLog}.`
)
patched = patchOrigin('REMOVE')
}
} else {
logger.log(
` > [PATCH] ${targetLog} is defined, removing ${ctx.linkId} from ${originLog}.`
` > [PATCH] ${targetLog} is defined, removing ${link.id} from ${originLog}.`
)
patchOrigin('REMOVE')
}
}
}
void linkObj
}
// Now that we've cleaned up the inputs, outputs, run through it looking for dangling links.,
for (const l of linksReverse) {
if (!l) continue
const ctx = toLinkContext(l)
const originNode = getNodeById(graph, ctx.originId)
const targetNode = getNodeById(graph, ctx.targetId)
const link =
(l as LLink).origin_slot != null
? (l as LLink)
: extendLink(l as SerialisedLLinkArray)
const originNode = getNodeById(graph, link.origin_id)
const targetNode = getNodeById(graph, link.target_id)
// Now that we've manipulated the linking, check again if they both exist.
if (
(!originNode ||
!nodeHasLinkId(
originNode,
IoDirection.OUTPUT,
ctx.originSlot,
ctx.linkId
link.origin_slot,
link.id
)) &&
(!targetNode ||
!nodeHasLinkId(
targetNode,
IoDirection.INPUT,
ctx.targetSlot,
ctx.linkId
link.target_slot,
link.id
))
) {
logger.log(
`${ctx.linkId} is def invalid; BOTH origin node ${ctx.originId} ${
!originNode ? 'is removed' : `doesn't have ${ctx.linkId}`
} and ${ctx.originId} target node ${
!targetNode ? 'is removed' : `doesn't have ${ctx.linkId}`
`${link.id} is def invalid; BOTH origin node ${link.origin_id} ${
!originNode ? 'is removed' : `doesn't have ${link.id}`
} and ${link.origin_id} target node ${
!targetNode ? 'is removed' : `doesn't have ${link.id}`
}.`
)
data.deletedLinks.push(ctx.linkId)
data.deletedLinks.push(link.id)
continue
}
}
// If we're fixing, then we've been patching along the way. Now go through and actually delete
// the zombie links from `app.graph.links`
if (fix) {
for (let i = data.deletedLinks.length - 1; i >= 0; i--) {
logger.log(`Deleting link #${data.deletedLinks[i]}.`)
if (isLiveGraph(graph)) {
delete (graph.links as Record<number, unknown>)[data.deletedLinks[i]!]
if ((graph as LGraph).getNodeById) {
delete graph.links[data.deletedLinks[i]!]
} else {
const idx = (
graph.links as Array<
SerialisedLinkArray | SerialisedLinkObject | null
>
).findIndex(
graph = graph as ISerialisedGraph
// Sometimes we got objects for links if passed after ComfyUI's loadGraphData modifies the
// data. We make a copy now, but can handle the bastardized objects just in case.
const idx = graph.links.findIndex(
(l) =>
l &&
((l as SerialisedLinkArray)[0] === data.deletedLinks[i] ||
(l[0] === data.deletedLinks[i] ||
('id' in l && l.id === data.deletedLinks[i]))
)
if (idx === -1) {
logger.log(`INDEX NOT FOUND for #${data.deletedLinks[i]}`)
continue
}
logger.log(`splicing ${idx} from links`)
;(graph.links as Array<unknown>).splice(idx, 1)
graph.links.splice(idx, 1)
}
}
if (!isLiveGraph(graph)) {
graph.links = (
graph.links as Array<SerialisedLinkArray | SerialisedLinkObject | null>
).filter((l): l is SerialisedLinkArray | SerialisedLinkObject => !!l)
// If we're a serialized graph, we can filter out the links because it's just an array.
if (!(graph as LGraph).getNodeById) {
graph.links = (graph as ISerialisedGraph).links.filter((l) => !!l)
}
}
if (!data.patchedNodes.length && !data.deletedLinks.length) {
@@ -521,8 +470,9 @@ export function repairLinks(
const hasChanges = !!(data.patchedNodes.length || data.deletedLinks.length)
let hasBadLinks: boolean = hasChanges
// If we're fixing, then let's run it again to see if there are no more bad links.
if (fix) {
const rerun = repairLinks(graph, { fix: false, silent: true })
const rerun = fixBadLinks(graph, { fix: false, silent: true })
hasBadLinks = rerun.hasBadLinks
}

View File

@@ -27,10 +27,6 @@
],
"@/utils/networkUtil": [
"./packages/shared-frontend-utils/src/networkUtil.ts"
],
"@/utils/linkFixer": ["./packages/workflow-validation/src/linkRepair.ts"],
"@/platform/workflow/validation/schemas/workflowSchema": [
"./packages/workflow-validation/src/workflowSchema.ts"
]
},
"typeRoots": ["src/types", "node_modules/@types", "./node_modules"],

View File

@@ -634,9 +634,6 @@ export default defineConfig({
'@/utils/formatUtil': '/packages/shared-frontend-utils/src/formatUtil.ts',
'@/utils/networkUtil':
'/packages/shared-frontend-utils/src/networkUtil.ts',
'@/utils/linkFixer': '/packages/workflow-validation/src/linkRepair.ts',
'@/platform/workflow/validation/schemas/workflowSchema':
'/packages/workflow-validation/src/workflowSchema.ts',
'@': '/src'
}
},