mirror of
https://github.com/Comfy-Org/ComfyUI_frontend.git
synced 2026-05-06 22:21:51 +00:00
Compare commits
2 Commits
test/cover
...
pysssss/fi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
48907bdcb1 | ||
|
|
c29dd37de4 |
23
.github/actions/ashby-pull/action.yaml
vendored
23
.github/actions/ashby-pull/action.yaml
vendored
@@ -1,23 +0,0 @@
|
||||
name: Ashby Pull
|
||||
description: 'Refresh the apps/website Ashby roles snapshot from the Ashby job board API'
|
||||
inputs:
|
||||
api_key:
|
||||
description: 'Ashby API key (WEBSITE_ASHBY_API_KEY).'
|
||||
required: true
|
||||
job_board_name:
|
||||
description: 'Ashby job board name (WEBSITE_ASHBY_JOB_BOARD_NAME).'
|
||||
required: true
|
||||
runs:
|
||||
using: 'composite'
|
||||
steps:
|
||||
# Note: this action assumes the frontend repo is checked out at the workspace root.
|
||||
|
||||
- name: Setup frontend
|
||||
uses: ./.github/actions/setup-frontend
|
||||
|
||||
- name: Refresh Ashby snapshot
|
||||
shell: bash
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ inputs.api_key }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ inputs.job_board_name }}
|
||||
run: pnpm --filter @comfyorg/website ashby:refresh-snapshot
|
||||
87
.github/actions/changes-filter/action.yaml
vendored
87
.github/actions/changes-filter/action.yaml
vendored
@@ -1,87 +0,0 @@
|
||||
# Outputs default to 'true' for non-pull_request events (push, merge_group):
|
||||
# granular path filtering is a PR-only optimization. This avoids the silent
|
||||
# skip footgun where a job gated on e.g. `app-website-changes == 'true'`
|
||||
# would never run on push.
|
||||
#
|
||||
# Shared dependency files (root package.json, pnpm-lock.yaml,
|
||||
# pnpm-workspace.yaml) are folded into every app-* and packages-changes
|
||||
# output so a lockfile bump correctly invalidates each granular gate. They
|
||||
# are NOT folded into docs-changes.
|
||||
#
|
||||
# Two paths-filter steps are needed because predicate-quantifier=every is
|
||||
# required for the negated globs in `should-run` but breaks multi-pattern
|
||||
# OR filters like `docs:` and `deps:`.
|
||||
#
|
||||
# Requires the caller to have checked out the repository.
|
||||
|
||||
name: 'Detect Path Changes'
|
||||
description: >
|
||||
Computes typed *-changes outputs and a back-compat should-run for
|
||||
path-gated CI jobs.
|
||||
|
||||
outputs:
|
||||
should-run:
|
||||
description: 'Any file outside `apps/`, `docs/`, `.storybook/`, or `**/*.md` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.relevant.outputs.relevant == 'true' }}
|
||||
app-website-changes:
|
||||
description: 'Shared deps or `apps/website/**` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.deps == 'true' || steps.filter.outputs.app_website == 'true' }}
|
||||
app-desktop-changes:
|
||||
description: 'Shared deps or `apps/desktop-ui/**` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.deps == 'true' || steps.filter.outputs.app_desktop == 'true' }}
|
||||
app-frontend-changes:
|
||||
description: 'Shared deps or `src/**` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.deps == 'true' || steps.filter.outputs.app_frontend == 'true' }}
|
||||
packages-changes:
|
||||
description: 'Shared deps or `packages/**` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.deps == 'true' || steps.filter.outputs.packages == 'true' }}
|
||||
storybook-changes:
|
||||
description: 'Shared deps or `.storybook/**` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.deps == 'true' || steps.filter.outputs.storybook == 'true' }}
|
||||
docs-changes:
|
||||
description: '`docs/**` or any `**/*.md` changed (deps NOT folded in).'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.docs == 'true' }}
|
||||
dependency-changes:
|
||||
description: 'Root `package.json`, `pnpm-lock.yaml`, or `pnpm-workspace.yaml` changed.'
|
||||
value: ${{ github.event_name != 'pull_request' || steps.filter.outputs.deps == 'true' }}
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Filter typed changes
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: filter
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
filters: |
|
||||
app_website:
|
||||
- 'apps/website/**'
|
||||
app_desktop:
|
||||
- 'apps/desktop-ui/**'
|
||||
app_frontend:
|
||||
- 'src/**'
|
||||
packages:
|
||||
- 'packages/**'
|
||||
storybook:
|
||||
- '.storybook/**'
|
||||
docs:
|
||||
- 'docs/**'
|
||||
- '**/*.md'
|
||||
deps:
|
||||
- 'package.json'
|
||||
- 'pnpm-lock.yaml'
|
||||
- 'pnpm-workspace.yaml'
|
||||
|
||||
- name: Filter relevant changes
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: relevant
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
predicate-quantifier: 'every'
|
||||
filters: |
|
||||
relevant:
|
||||
- '**'
|
||||
- '!apps/**'
|
||||
- '!docs/**'
|
||||
- '!.storybook/**'
|
||||
- '!**/*.md'
|
||||
17
.github/workflows/ci-dist-telemetry-scan.yaml
vendored
17
.github/workflows/ci-dist-telemetry-scan.yaml
vendored
@@ -12,30 +12,17 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should-run: ${{ steps.changes.outputs.should-run }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
scan:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v4.4.0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
23
.github/workflows/ci-oss-assets-validation.yaml
vendored
23
.github/workflows/ci-oss-assets-validation.yaml
vendored
@@ -14,29 +14,16 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should-run: ${{ steps.changes.outputs.should-run }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
validate-fonts:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v4.4.0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'pnpm'
|
||||
@@ -81,17 +68,15 @@ jobs:
|
||||
echo '✅ No proprietary fonts found in dist'
|
||||
|
||||
validate-licenses:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v4.4.0
|
||||
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v6
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # v6.2.0
|
||||
with:
|
||||
node-version-file: '.nvmrc'
|
||||
cache: 'pnpm'
|
||||
|
||||
16
.github/workflows/ci-perf-report.yaml
vendored
16
.github/workflows/ci-perf-report.yaml
vendored
@@ -3,8 +3,10 @@ name: 'CI: Performance Report'
|
||||
on:
|
||||
push:
|
||||
branches: [main, core/*]
|
||||
paths-ignore: ['**/*.md']
|
||||
pull_request:
|
||||
branches-ignore: [wip/*, draft/*, temp/*]
|
||||
paths-ignore: ['**/*.md']
|
||||
|
||||
concurrency:
|
||||
group: perf-${{ github.ref }}
|
||||
@@ -14,20 +16,8 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should-run: ${{ steps.changes.outputs.should-run }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
perf-tests:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' && github.repository == 'Comfy-Org/ComfyUI_frontend' }}
|
||||
if: github.repository == 'Comfy-Org/ComfyUI_frontend'
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
container:
|
||||
|
||||
15
.github/workflows/ci-size-data.yaml
vendored
15
.github/workflows/ci-size-data.yaml
vendored
@@ -16,21 +16,8 @@ permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
if: github.repository == 'Comfy-Org/ComfyUI_frontend'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should-run: ${{ steps.changes.outputs.should-run }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
collect:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' }}
|
||||
if: github.repository == 'Comfy-Org/ComfyUI_frontend'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
35
.github/workflows/ci-tests-e2e.yaml
vendored
35
.github/workflows/ci-tests-e2e.yaml
vendored
@@ -4,6 +4,7 @@ name: 'CI: Tests E2E'
|
||||
on:
|
||||
push:
|
||||
branches: [main, master, core/*, desktop/*]
|
||||
paths-ignore: ['**/*.md']
|
||||
pull_request:
|
||||
branches-ignore: [wip/*, draft/*, temp/*]
|
||||
merge_group:
|
||||
@@ -14,20 +15,36 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
# Detect whether e2e-relevant files changed. Required checks see "skipped"
|
||||
# (which counts as passing) when only docs/apps/storybook files are touched,
|
||||
# avoiding the stall that paths-ignore would cause.
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should-run: ${{ steps.changes.outputs.should-run }}
|
||||
should_run: ${{ github.event_name != 'pull_request' || steps.filter.outputs.e2e }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
- name: Checkout repository
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
uses: actions/checkout@v6
|
||||
- name: Check for e2e-relevant changes
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
id: filter
|
||||
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3.0.2
|
||||
with:
|
||||
predicate-quantifier: 'every'
|
||||
filters: |
|
||||
e2e:
|
||||
- '**'
|
||||
- '!apps/**'
|
||||
- '!docs/**'
|
||||
- '!.storybook/**'
|
||||
- '!**/*.md'
|
||||
|
||||
setup:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' }}
|
||||
if: ${{ needs.changes.outputs.should_run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
@@ -177,7 +194,7 @@ jobs:
|
||||
merge-reports:
|
||||
needs: [changes, playwright-tests-chromium-sharded]
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ !cancelled() && needs.changes.outputs.should-run == 'true' }}
|
||||
if: ${{ !cancelled() && needs.changes.outputs.should_run == 'true' }}
|
||||
steps:
|
||||
- name: Install pnpm
|
||||
uses: pnpm/action-setup@fc06bc1257f339d1d5d8b3a19a8cae5388b55320 # v4.4.0
|
||||
@@ -216,7 +233,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check E2E results
|
||||
env:
|
||||
SHOULD_RUN: ${{ needs.changes.outputs.should-run }}
|
||||
SHOULD_RUN: ${{ needs.changes.outputs.should_run }}
|
||||
SHARDED: ${{ needs.playwright-tests-chromium-sharded.result }}
|
||||
BROWSERS: ${{ needs.playwright-tests.result }}
|
||||
run: |
|
||||
@@ -234,7 +251,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
if: >-
|
||||
${{
|
||||
needs.changes.outputs.should-run == 'true' &&
|
||||
needs.changes.outputs.should_run == 'true' &&
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.head.repo.fork == false
|
||||
}}
|
||||
@@ -261,7 +278,7 @@ jobs:
|
||||
if: >-
|
||||
${{
|
||||
always() &&
|
||||
needs.changes.outputs.should-run == 'true' &&
|
||||
needs.changes.outputs.should_run == 'true' &&
|
||||
github.event_name == 'pull_request' &&
|
||||
github.event.pull_request.head.repo.fork == false
|
||||
}}
|
||||
|
||||
47
.github/workflows/ci-tests-storybook.yaml
vendored
47
.github/workflows/ci-tests-storybook.yaml
vendored
@@ -8,29 +8,10 @@ on:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
storybook-changes: ${{ steps.changes.outputs.storybook-changes }}
|
||||
app-frontend-changes: ${{ steps.changes.outputs.app-frontend-changes }}
|
||||
packages-changes: ${{ steps.changes.outputs.packages-changes }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
# Post starting comment for non-forked PRs
|
||||
comment-on-pr-start:
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'pull_request'
|
||||
&& github.event.pull_request.head.repo.fork == false
|
||||
&& (needs.changes.outputs.storybook-changes == 'true'
|
||||
|| needs.changes.outputs.app-frontend-changes == 'true'
|
||||
|| needs.changes.outputs.packages-changes == 'true')
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
@@ -49,13 +30,8 @@ jobs:
|
||||
|
||||
# Build Storybook for all PRs (free Cloudflare deployment)
|
||||
storybook-build:
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'pull_request'
|
||||
&& (needs.changes.outputs.storybook-changes == 'true'
|
||||
|| needs.changes.outputs.app-frontend-changes == 'true'
|
||||
|| needs.changes.outputs.packages-changes == 'true')
|
||||
if: github.event_name == 'pull_request'
|
||||
outputs:
|
||||
conclusion: ${{ steps.job-status.outputs.conclusion }}
|
||||
workflow-url: ${{ steps.workflow-url.outputs.url }}
|
||||
@@ -91,15 +67,8 @@ jobs:
|
||||
|
||||
# Chromatic deployment only for version-bump-* branches or manual triggers
|
||||
chromatic-deployment:
|
||||
needs: changes
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
github.event_name == 'workflow_dispatch'
|
||||
|| (github.event_name == 'pull_request'
|
||||
&& startsWith(github.head_ref, 'version-bump-')
|
||||
&& (needs.changes.outputs.storybook-changes == 'true'
|
||||
|| needs.changes.outputs.app-frontend-changes == 'true'
|
||||
|| needs.changes.outputs.packages-changes == 'true'))
|
||||
if: github.event_name == 'workflow_dispatch' || (github.event_name == 'pull_request' && startsWith(github.head_ref, 'version-bump-'))
|
||||
outputs:
|
||||
conclusion: ${{ steps.job-status.outputs.conclusion }}
|
||||
workflow-url: ${{ steps.workflow-url.outputs.url }}
|
||||
@@ -138,15 +107,9 @@ jobs:
|
||||
|
||||
# Deploy and comment for non-forked PRs only
|
||||
deploy-and-comment:
|
||||
needs: [changes, storybook-build]
|
||||
needs: [storybook-build]
|
||||
runs-on: ubuntu-latest
|
||||
if: |
|
||||
always()
|
||||
&& github.event_name == 'pull_request'
|
||||
&& github.event.pull_request.head.repo.fork == false
|
||||
&& (needs.changes.outputs.storybook-changes == 'true'
|
||||
|| needs.changes.outputs.app-frontend-changes == 'true'
|
||||
|| needs.changes.outputs.packages-changes == 'true')
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false && always()
|
||||
permissions:
|
||||
pull-requests: write
|
||||
contents: read
|
||||
|
||||
15
.github/workflows/ci-tests-unit.yaml
vendored
15
.github/workflows/ci-tests-unit.yaml
vendored
@@ -4,8 +4,10 @@ name: 'CI: Tests Unit'
|
||||
on:
|
||||
push:
|
||||
branches: [main, master, dev*, core/*, desktop/*]
|
||||
paths-ignore: ['**/*.md']
|
||||
pull_request:
|
||||
branches-ignore: [wip/*, draft/*, temp/*]
|
||||
paths-ignore: ['**/*.md']
|
||||
merge_group:
|
||||
|
||||
concurrency:
|
||||
@@ -13,20 +15,7 @@ concurrency:
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
should-run: ${{ steps.changes.outputs.should-run }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
test:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.should-run == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
|
||||
@@ -52,9 +52,6 @@ jobs:
|
||||
run: vercel pull --yes --environment=preview
|
||||
|
||||
- name: Build project artifacts
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
run: vercel build
|
||||
|
||||
- name: Fetch head commit metadata
|
||||
@@ -149,9 +146,6 @@ jobs:
|
||||
run: vercel pull --yes --environment=production
|
||||
|
||||
- name: Build project artifacts
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
run: vercel build --prod
|
||||
|
||||
- name: Deploy project artifacts to Vercel
|
||||
|
||||
25
.github/workflows/ci-website-build.yaml
vendored
25
.github/workflows/ci-website-build.yaml
vendored
@@ -4,29 +4,23 @@ name: 'CI: Website Build'
|
||||
on:
|
||||
push:
|
||||
branches: [main, master, website/*]
|
||||
paths:
|
||||
- 'apps/website/**'
|
||||
- 'packages/design-system/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
pull_request:
|
||||
branches-ignore: [wip/*, draft/*, temp/*]
|
||||
paths:
|
||||
- 'apps/website/**'
|
||||
- 'packages/design-system/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
app-website-changes: ${{ steps.changes.outputs.app-website-changes }}
|
||||
packages-changes: ${{ steps.changes.outputs.packages-changes }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
build:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.app-website-changes == 'true' || needs.changes.outputs.packages-changes == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
@@ -36,7 +30,4 @@ jobs:
|
||||
uses: ./.github/actions/setup-frontend
|
||||
|
||||
- name: Build website
|
||||
env:
|
||||
WEBSITE_ASHBY_API_KEY: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
WEBSITE_ASHBY_JOB_BOARD_NAME: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
run: pnpm --filter @comfyorg/website build
|
||||
|
||||
30
.github/workflows/ci-website-e2e.yaml
vendored
30
.github/workflows/ci-website-e2e.yaml
vendored
@@ -3,29 +3,25 @@ name: 'CI: Website E2E'
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
paths:
|
||||
- 'apps/website/**'
|
||||
- 'packages/design-system/**'
|
||||
- 'packages/tailwind-utils/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
pull_request:
|
||||
branches-ignore: [wip/*, draft/*, temp/*]
|
||||
paths:
|
||||
- 'apps/website/**'
|
||||
- 'packages/design-system/**'
|
||||
- 'packages/tailwind-utils/**'
|
||||
- 'pnpm-lock.yaml'
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.repository }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
changes:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
outputs:
|
||||
app-website-changes: ${{ steps.changes.outputs.app-website-changes }}
|
||||
packages-changes: ${{ steps.changes.outputs.packages-changes }}
|
||||
steps:
|
||||
- uses: actions/checkout@v6
|
||||
- id: changes
|
||||
uses: ./.github/actions/changes-filter
|
||||
|
||||
website-e2e:
|
||||
needs: changes
|
||||
if: ${{ needs.changes.outputs.app-website-changes == 'true' || needs.changes.outputs.packages-changes == 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: mcr.microsoft.com/playwright:v1.58.1-noble
|
||||
@@ -167,11 +163,7 @@ jobs:
|
||||
post-starting-comment:
|
||||
# Safe to comment from pull_request trigger: fork PRs are excluded by the guard below.
|
||||
# This avoids a ci-*/pr-* workflow_run split for a comment that must appear immediately.
|
||||
needs: changes
|
||||
if: |
|
||||
github.event_name == 'pull_request'
|
||||
&& github.event.pull_request.head.repo.fork == false
|
||||
&& (needs.changes.outputs.app-website-changes == 'true' || needs.changes.outputs.packages-changes == 'true')
|
||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.fork == false
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
59
.github/workflows/release-website.yaml
vendored
59
.github/workflows/release-website.yaml
vendored
@@ -1,59 +0,0 @@
|
||||
# Description: Manual workflow to refresh the apps/website Ashby roles snapshot
|
||||
# and open a PR. Merging the PR triggers the existing Vercel website production
|
||||
# deploy via ci-vercel-website-preview.yaml.
|
||||
name: 'Release: Website'
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: release-website
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
refresh-snapshot:
|
||||
if: github.repository == 'Comfy-Org/ComfyUI_frontend'
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v6
|
||||
with:
|
||||
ref: main
|
||||
persist-credentials: false
|
||||
|
||||
- name: Refresh Ashby snapshot
|
||||
uses: ./.github/actions/ashby-pull
|
||||
with:
|
||||
api_key: ${{ secrets.WEBSITE_ASHBY_API_KEY }}
|
||||
job_board_name: ${{ secrets.WEBSITE_ASHBY_JOB_BOARD_NAME }}
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8.1.0
|
||||
with:
|
||||
token: ${{ secrets.PR_GH_TOKEN }}
|
||||
commit-message: 'chore(website): refresh Ashby roles snapshot'
|
||||
title: 'chore(website): refresh Ashby roles snapshot'
|
||||
body: |
|
||||
Automated refresh of `apps/website/src/data/ashby-roles.snapshot.json`
|
||||
from the Ashby job board API.
|
||||
|
||||
**Flow:**
|
||||
1. `Release: Website` workflow ran (manual trigger).
|
||||
2. This PR opens with the regenerated snapshot.
|
||||
3. `CI: Vercel Website Preview` deploys a preview for review.
|
||||
4. Merging to `main` triggers the production Vercel deploy.
|
||||
|
||||
The snapshot fallback in `apps/website/src/utils/ashby.ts` remains
|
||||
intact: builds without `WEBSITE_ASHBY_API_KEY` continue to use the
|
||||
committed snapshot.
|
||||
|
||||
Triggered by workflow run `${{ github.run_id }}`.
|
||||
branch: chore/refresh-ashby-snapshot-${{ github.run_id }}
|
||||
base: main
|
||||
labels: |
|
||||
Release:Website
|
||||
delete-branch: true
|
||||
@@ -1,58 +0,0 @@
|
||||
# Comfy
|
||||
|
||||
> Comfy is the AI creation engine for visual professionals who demand control over every model, every parameter, and every output. Built around ComfyUI — the open-source node-graph runtime with 60,000+ community nodes and thousands of shared workflows — Comfy ships as a free local app, a managed cloud, an API, and an enterprise platform.
|
||||
|
||||
The Comfy ecosystem spans four surfaces:
|
||||
|
||||
- **ComfyUI (local)** — the open-source node-graph runtime that runs models on your own hardware.
|
||||
- **Comfy Cloud** — managed ComfyUI in the browser, with hosted models and storage.
|
||||
- **Comfy API** — a REST API for triggering workflows from your own apps and pipelines.
|
||||
- **Comfy Enterprise** — single-tenant deployments, BYO keys, data ownership, and orchestration for teams.
|
||||
|
||||
Studios building with Comfy include Series Entertainment, Moment Factory, Open Story Movement, and Ubisoft (La Forge). Use cases concentrate in VFX & animation, advertising & creative studios, gaming, and eCommerce/fashion.
|
||||
|
||||
## Product
|
||||
|
||||
- [Homepage](https://comfy.org/): Overview of Comfy and the four product surfaces (Local, Cloud, API, Enterprise).
|
||||
- [Download Comfy (Local)](https://comfy.org/download/): Free desktop app for macOS, Windows, and Linux — runs ComfyUI on your own GPU.
|
||||
- [Comfy Cloud](https://comfy.org/cloud/): Managed ComfyUI in the browser with hosted models and storage; no local install required.
|
||||
- [Comfy Cloud Pricing](https://comfy.org/cloud/pricing/): Plans and per-credit pricing for individuals and teams using Comfy Cloud.
|
||||
- [Comfy API](https://comfy.org/api/): REST API for triggering ComfyUI workflows programmatically from external apps.
|
||||
- [Comfy Enterprise](https://comfy.org/cloud/enterprise/): Single-tenant ComfyUI deployments with BYO keys, orchestration, and data-ownership guarantees.
|
||||
|
||||
## Workflows and Gallery
|
||||
|
||||
- [Workflow Gallery](https://comfy.org/gallery/): Curated showcase of ComfyUI outputs — images, video, and 3D — produced by the community.
|
||||
- [Community Workflows](https://www.comfy.org/workflows/): Browseable library of community-shared ComfyUI workflows you can load and remix.
|
||||
|
||||
## Customers and Case Studies
|
||||
|
||||
- [Customer Stories](https://comfy.org/customers/): Index of named customers and how they use ComfyUI in production.
|
||||
- [Series Entertainment](https://comfy.org/customers/series-entertainment/): How Series Entertainment rebuilt game and video production around ComfyUI.
|
||||
- [Moment Factory](https://comfy.org/customers/moment-factory/): Architectural-scale 3D projection mapping reimagined with ComfyUI at Moment Factory.
|
||||
- [Ubisoft — Chord](https://comfy.org/customers/ubisoft-chord/): Ubisoft La Forge open-sourcing the Chord model and its ComfyUI integration.
|
||||
- [Open Story Movement](https://comfy.org/customers/open-story-movement/): How an open-source movement around AI storytelling builds on ComfyUI.
|
||||
|
||||
## Developers and Documentation
|
||||
|
||||
- [ComfyUI Docs](https://docs.comfy.org/): Official documentation for installing, configuring, and extending ComfyUI.
|
||||
- [ComfyUI on GitHub](https://github.com/comfyanonymous/ComfyUI): Source repository for the open-source ComfyUI runtime.
|
||||
- [Comfy-Org on GitHub](https://github.com/Comfy-Org): Organization-wide repositories — frontend, registry, manager, docs, and tooling.
|
||||
- [Comfy Registry](https://registry.comfy.org/): Public registry of ComfyUI custom nodes and extensions, with versioning and search.
|
||||
|
||||
## Company
|
||||
|
||||
- [About Comfy](https://comfy.org/about/): Company background, mission, and the team behind ComfyUI.
|
||||
- [Careers](https://comfy.org/careers/): Open roles across engineering, design, product, and go-to-market.
|
||||
- [Contact](https://comfy.org/contact/): Sales, partnership, and general contact form.
|
||||
- [Blog](https://blog.comfy.org/): Product announcements, technical deep-dives, and customer stories.
|
||||
- [Privacy Policy](https://comfy.org/privacy-policy/): How Comfy collects, uses, and protects personal information.
|
||||
- [Terms of Service](https://comfy.org/terms-of-service/): Terms governing use of ComfyUI and related Comfy services.
|
||||
|
||||
## Optional
|
||||
|
||||
- [简体中文 / Chinese homepage](https://comfy.org/zh-CN/): Simplified Chinese localization of the main site.
|
||||
- [Series Entertainment — long-form case study](https://comfy.org/cloud/enterprise-case-studies/how-series-entertainment-rebuilt-game-and-video-production-with-comfyui): Extended write-up of the Series Entertainment deployment.
|
||||
- [Moment Factory — long-form case study](https://comfy.org/cloud/enterprise-case-studies/comfyui-at-architectural-scale-how-moment-factory-reimagined-3d-projection-mapping): Extended write-up of Moment Factory's projection-mapping pipeline.
|
||||
- [Ubisoft Chord announcement (blog)](https://blog.comfy.org/p/ubisoft-open-sources-the-chord-model): Original blog post announcing Ubisoft's open-source Chord model.
|
||||
- [Open-source storytelling (blog)](https://blog.comfy.org/p/how-open-source-is-fueling-the-open): Blog post on how open source is fueling the Open Story Movement.
|
||||
@@ -1,33 +1,4 @@
|
||||
# robots.txt for comfy.org
|
||||
# Open to all crawlers — including AI/LLM bots — for maximum visibility
|
||||
# in AI-powered search, chat-based answer engines, and traditional search.
|
||||
# Granular UAs are listed explicitly to signal intent; rules are shared
|
||||
# via stacked user-agent records (RFC 9309 §2.2).
|
||||
|
||||
User-agent: *
|
||||
User-agent: Googlebot
|
||||
User-agent: Bingbot
|
||||
User-agent: DuckDuckBot
|
||||
User-agent: GPTBot
|
||||
User-agent: ChatGPT-User
|
||||
User-agent: OAI-SearchBot
|
||||
User-agent: Google-Extended
|
||||
User-agent: ClaudeBot
|
||||
User-agent: Claude-Web
|
||||
User-agent: anthropic-ai
|
||||
User-agent: PerplexityBot
|
||||
User-agent: Perplexity-User
|
||||
User-agent: Applebot
|
||||
User-agent: Applebot-Extended
|
||||
User-agent: Bytespider
|
||||
User-agent: Amazonbot
|
||||
User-agent: CCBot
|
||||
User-agent: Meta-ExternalAgent
|
||||
User-agent: Meta-ExternalFetcher
|
||||
User-agent: Diffbot
|
||||
Allow: /
|
||||
Disallow: /_astro/
|
||||
Disallow: /_website/
|
||||
Disallow: /_vercel/
|
||||
|
||||
Sitemap: https://comfy.org/sitemap-index.xml
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
<script setup lang="ts">
|
||||
import type { Locale } from '../../i18n/translations'
|
||||
import { externalLinks } from '../../config/routes'
|
||||
import { t } from '../../i18n/translations'
|
||||
import BrandButton from '../common/BrandButton.vue'
|
||||
|
||||
const { locale = 'en' } = defineProps<{ locale?: Locale }>()
|
||||
</script>
|
||||
@@ -34,15 +32,6 @@ const { locale = 'en' } = defineProps<{ locale?: Locale }>()
|
||||
>
|
||||
{{ t('hero.subtitle', locale) }}
|
||||
</p>
|
||||
|
||||
<BrandButton
|
||||
:href="externalLinks.workflows"
|
||||
variant="outline"
|
||||
size="lg"
|
||||
class="mt-8 w-full p-4 uppercase lg:w-auto lg:min-w-60"
|
||||
>
|
||||
{{ t('hero.runFirstWorkflow', locale) }}
|
||||
</BrandButton>
|
||||
</div>
|
||||
</section>
|
||||
</template>
|
||||
|
||||
@@ -52,15 +52,6 @@ export const customerStories: CustomerStory[] = [
|
||||
detailPrefix: 'customers.detail.ubisoft-chord',
|
||||
readMoreHref:
|
||||
'https://blog.comfy.org/p/ubisoft-open-sources-the-chord-model'
|
||||
},
|
||||
{
|
||||
slug: 'groove-jones',
|
||||
image:
|
||||
'https://media.comfy.org/website/customers/groove-jones/crocs-nfl-dicks-sporting-goods-fooh.webp',
|
||||
category: 'customers.story.groove-jones.category',
|
||||
title: 'customers.story.groove-jones.title',
|
||||
body: 'customers.story.groove-jones.body',
|
||||
detailPrefix: 'customers.detail.groove-jones'
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
@@ -11,10 +11,6 @@ const translations = {
|
||||
'zh-CN':
|
||||
'Comfy 是面向专业视觉人士的 AI 创作引擎。您可以精确掌控每个模型、每个参数和每个输出。'
|
||||
},
|
||||
'hero.runFirstWorkflow': {
|
||||
en: 'Run your first workflow',
|
||||
'zh-CN': '运行你的第一个工作流'
|
||||
},
|
||||
|
||||
// ProductShowcaseSection
|
||||
'showcase.subtitle1': {
|
||||
@@ -2247,20 +2243,6 @@ const translations = {
|
||||
'zh-CN':
|
||||
'育碧 La Forge 开源了 CHORD PBR 材质估算模型及 ComfyUI 自定义节点,为 AAA 游戏制作实现了端到端的纹理生成工作流。'
|
||||
},
|
||||
'customers.story.groove-jones.category': {
|
||||
en: 'CASE STUDY',
|
||||
'zh-CN': '案例研究'
|
||||
},
|
||||
'customers.story.groove-jones.title': {
|
||||
en: "How Groove Jones Delivered a Holiday FOOH Campaign for Dick's Sporting Goods with Comfy",
|
||||
'zh-CN':
|
||||
"Groove Jones 如何借助 Comfy 为 Dick's Sporting Goods 打造节日 FOOH 营销"
|
||||
},
|
||||
'customers.story.groove-jones.body': {
|
||||
en: 'Groove Jones, a Dallas-based creative studio, used Comfy to deliver a hyper-realistic FOOH holiday campaign for the Crocs x NFL collection on a fast-approaching deadline.',
|
||||
'zh-CN':
|
||||
'达拉斯创意工作室 Groove Jones 借助 Comfy,在紧迫的节日档期内为 Crocs x NFL 联名系列交付了超写实的 FOOH 营销内容。'
|
||||
},
|
||||
'customers.story.readMore': {
|
||||
en: 'READ MORE ON THIS TOPIC',
|
||||
'zh-CN': '阅读更多相关内容'
|
||||
@@ -3294,227 +3276,6 @@ const translations = {
|
||||
'zh-CN': 'ComfyUI 博客'
|
||||
},
|
||||
|
||||
// Customer Detail: Groove Jones
|
||||
// Topic 1: Intro
|
||||
'customers.detail.groove-jones.topic-1.label': {
|
||||
en: 'INTRO',
|
||||
'zh-CN': '简介'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-1.block.0': {
|
||||
en: 'Groove Jones, a Dallas-based creative studio, builds AI-driven campaigns and immersive experiences for major brands where photoreal polish, creative ambition, and social-ready speed all have to land together. As their work expanded across AI Video, AR, VR, and WebGL for clients like Crocs, the NFL, and Dick\u2019s Sporting Goods, they faced a recurring challenge: delivering feature-film-quality VFX on commercial timelines and budgets.',
|
||||
'zh-CN':
|
||||
'位于达拉斯的创意工作室 Groove Jones,为众多大牌客户打造由 AI 驱动的营销活动和沉浸式体验,需要同时兼顾照片级的精细度、创意野心,以及适配社交媒体的交付速度。随着他们为 Crocs、NFL 和 Dick\u2019s Sporting Goods 等客户的工作扩展到 AI 视频、AR、VR 和 WebGL,他们反复遇到同一个挑战:用商业项目的工期和预算,交付电影级的 VFX 质量。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-1.block.1': {
|
||||
en: 'For the Crocs x NFL collection holiday launch, that challenge came to a head. The brief called for hyper-realistic video of giant NFL-licensed Crocs parachuting into real Dick\u2019s Sporting Goods parking lots, across multiple locations, delivered on a fast-approaching holiday deadline. A live-action shoot plus a traditional CG pipeline was off the table.',
|
||||
'zh-CN':
|
||||
'在 Crocs x NFL 联名系列的节日上市项目中,这个挑战被推到了极致。Brief 要求制作超写实视频:巨型 NFL 授权 Crocs 鞋款跳伞落入多个真实的 Dick\u2019s Sporting Goods 停车场,并要在紧迫的节日档期前交付。实地拍摄加传统 CG 流水线的方案,已经完全行不通。'
|
||||
},
|
||||
// Topic 2: The Output
|
||||
'customers.detail.groove-jones.topic-2.label': {
|
||||
en: 'THE OUTPUT',
|
||||
'zh-CN': '交付成果'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-2.title': {
|
||||
en: 'The Output Groove Jones Achieved Using Comfy',
|
||||
'zh-CN': 'Groove Jones 借助 Comfy 实现的交付成果'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-2.block.0': {
|
||||
en: 'A full FOOH (faux out-of-home) social campaign delivered on a tight holiday deadline\nHyper-realistic videos of giant NFL-licensed Crocs parachuting onto Dick\u2019s Sporting Goods parking lots\nVertical 9:16 deliverables at 2K for Instagram Reels, TikTok, and YouTube Shorts\nSame-day iteration on client notes instead of week-long asset updates\nWinner, Aaron Awards 2024: Best AI Workflow for Production',
|
||||
'zh-CN':
|
||||
'在紧迫的节日档期内交付完整的 FOOH(虚构户外广告)社媒营销活动\n超写实视频:巨型 NFL 授权 Crocs 鞋款跳伞落入 Dick\u2019s Sporting Goods 停车场\n面向 Instagram Reels、TikTok、YouTube Shorts 的 9:16 竖屏 2K 交付物\n客户反馈当天迭代,不再需要数周的资产更新周期\n荣获 2024 年 Aaron Awards:最佳 AI 制作工作流奖'
|
||||
},
|
||||
// Topic 3: The Problem
|
||||
'customers.detail.groove-jones.topic-3.label': {
|
||||
en: 'THE PROBLEM',
|
||||
'zh-CN': '挑战'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-3.title': {
|
||||
en: 'The Problem Groove Jones Was Trying to Solve',
|
||||
'zh-CN': 'Groove Jones 试图解决的问题'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-3.block.0': {
|
||||
en: 'A traditional pipeline for this creative meant a live-action shoot at multiple store locations plus a full CG build: high-res modeling of every team\u2019s clog, look development, lighting, rendering, compositing, and a new render every time the client wanted a variation. It also meant a large crew (modelers, texture artists, lighting artists, compositors) and a schedule measured in months. Neither the budget nor the holiday window supported that path.',
|
||||
'zh-CN':
|
||||
'按照传统流水线做这个创意,意味着要在多家门店实地拍摄,加上完整的 CG 制作:每支球队鞋款的高精建模、look development、灯光、渲染、合成,客户每次想要新变体都要重新渲染。这也意味着庞大的团队(建模师、纹理师、灯光师、合成师),以及以"月"为单位的工期。无论是预算还是节日档期,都无法支撑这条路径。'
|
||||
},
|
||||
// Topic 4: How Comfy Solved the Problem
|
||||
'customers.detail.groove-jones.topic-4.label': {
|
||||
en: 'HOW COMFY SOLVED THE PROBLEM',
|
||||
'zh-CN': 'Comfy 如何解决问题'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.title': {
|
||||
en: 'How Groove Jones Used Comfy to Solve the Problem',
|
||||
'zh-CN': 'Groove Jones 如何用 Comfy 解决问题'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.block.0': {
|
||||
en: 'Groove Jones\u2019s Senior Creative Technologist, Doug Hogan, rebuilt the production process around Comfy\u2019s node-based workflow system, using their proprietary GrooveTech GenVFX pipeline. Custom LoRAs handled brand accuracy, a single Comfy graph orchestrated multiple generative models, and Nuke handled final polish. For a team with feature-film and commercial roots, the environment was immediately familiar.',
|
||||
'zh-CN':
|
||||
'Groove Jones 的高级创意技术总监 Doug Hogan 围绕 Comfy 的节点式工作流系统重新搭建了制作流程,并基于他们自研的 GrooveTech GenVFX 流水线展开。自定义 LoRA 负责保证品牌一致性,一张 Comfy 图编排多个生成模型,Nuke 负责最终精修。对于有电影和广告制作背景的团队,这套环境上手没有任何门槛。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.block.1.text': {
|
||||
en: 'Comfy felt very similar to working inside a traditional CG and compositing pipeline. Node-based logic, clear data flow, modular builds. It felt natural to our artists already.',
|
||||
'zh-CN':
|
||||
'Comfy 用起来非常像传统 CG 和合成流水线:节点逻辑、清晰的数据流、模块化构建。我们的艺术家用起来毫无违和感。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-4.block.1.name': {
|
||||
en: 'Doug Hogan | Senior Creative Technologist @ Groove Jones',
|
||||
'zh-CN': 'Doug Hogan | Groove Jones 高级创意技术总监'
|
||||
},
|
||||
// Topic 5: Brand-Trained LoRAs
|
||||
'customers.detail.groove-jones.topic-5.label': {
|
||||
en: 'BRAND-TRAINED LORAS',
|
||||
'zh-CN': '品牌定制 LORA'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.title': {
|
||||
en: 'Brand-Trained LoRAs for Hero Assets',
|
||||
'zh-CN': '为主视觉资产定制的品牌 LoRA'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.0': {
|
||||
en: 'Groove Jones trained custom LoRAs on the Crocs NFL Team Clogs and on Dick\u2019s Sporting Goods storefronts, so every generation came out anchored in brand-accurate references. Real team colorways, real product silhouettes, and real store exteriors stayed consistent across shots without per-frame correction, replacing what would normally take weeks of manual look development.',
|
||||
'zh-CN':
|
||||
'Groove Jones 基于 Crocs NFL 球队联名鞋款和 Dick\u2019s Sporting Goods 门店外景训练了定制 LoRA,让每一次生成都能锚定品牌精准的参考素材。真实的球队配色、产品轮廓和门店外观在不同镜头之间保持一致,不需要逐帧修正——而这通常意味着数周的 look development 工作量。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.1.src': {
|
||||
en: 'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-team-lineup.webp',
|
||||
'zh-CN':
|
||||
'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-team-lineup.webp'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.1.alt': {
|
||||
en: 'Grid of brand-accurate NFL team Crocs generated via custom LoRAs',
|
||||
'zh-CN': '通过定制 LoRA 生成的多支 NFL 球队联名 Crocs 网格'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-5.block.1.caption': {
|
||||
en: 'Brand-accurate NFL team colorways generated through custom LoRAs.',
|
||||
'zh-CN': '通过定制 LoRA 生成的、与品牌精准一致的 NFL 球队配色。'
|
||||
},
|
||||
// Topic 6: Multi-Model Orchestration
|
||||
'customers.detail.groove-jones.topic-6.label': {
|
||||
en: 'MULTI-MODEL ORCHESTRATION',
|
||||
'zh-CN': '多模型编排'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.title': {
|
||||
en: 'Multi-Model Orchestration in a Single Graph',
|
||||
'zh-CN': '单张图内的多模型编排'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.block.0': {
|
||||
en: 'The creative required different generative models at different stages: Flux for key-frame still development, Gemini Flash 2.5 (Nano Banana) for fast ideation and variants, and Veo 3.1 plus Moonvalley\u2019s Marey for final video generation. Comfy routed between all four inside one graph, so outputs from one model fed directly into the next without ever leaving the environment.',
|
||||
'zh-CN':
|
||||
'这个创意在不同阶段需要不同的生成模型:Flux 用于关键帧静帧开发,Gemini Flash 2.5(Nano Banana)用于快速构思和变体生成,Veo 3.1 加上 Moonvalley 的 Marey 用于最终的视频生成。Comfy 在一张图里就把这四个模型串起来,前一个模型的输出直接喂给下一个模型,全程无需切换环境。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.block.1.text': {
|
||||
en: 'The Comfy community develops at an almost exponential curve, and we were able to leverage their existing nodes and tools to solve very specific production challenges instead of reinventing the wheel ourselves.',
|
||||
'zh-CN':
|
||||
'Comfy 社区几乎是指数级增长的,我们可以直接利用社区已有的节点和工具去解决非常具体的制作问题,而不必自己重新造轮子。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-6.block.1.name': {
|
||||
en: 'Dale Carman | Co-founder @ Groove Jones',
|
||||
'zh-CN': 'Dale Carman | Groove Jones 联合创始人'
|
||||
},
|
||||
// Topic 7: The Pipeline
|
||||
'customers.detail.groove-jones.topic-7.label': {
|
||||
en: 'THE PIPELINE',
|
||||
'zh-CN': '流水线'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.title': {
|
||||
en: 'Storyboards to Previz to Final Shot in One Pipeline',
|
||||
'zh-CN': '从故事板到 Previz 再到成片,全部在一条流水线内'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.0': {
|
||||
en: 'The workflow opened with traditional storyboards for narrative approval, then moved into CGI blocking to lock composition, camera framing, and story beats. Comfy drove generation from there: the shoe drop, the parking lot reactions, the crowd coverage, and the environmental conversions that turned static summer storefronts into snow-covered holiday scenes, all inside the same graph.',
|
||||
'zh-CN':
|
||||
'工作流从传统故事板开始用于叙事确认,再进入 CGI blocking,锁定构图、镜头取景和叙事节奏。从这里开始 Comfy 接管生成:鞋款空投、停车场反应镜头、人群覆盖、把夏季静态门店外景转换成被雪覆盖的节日场景——全部在同一张图里完成。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.1.src': {
|
||||
en: 'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-dicks-storyboards.webp',
|
||||
'zh-CN':
|
||||
'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-dicks-storyboards.webp'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.1.alt': {
|
||||
en: 'Storyboard grid for the Crocs x NFL holiday campaign',
|
||||
'zh-CN': 'Crocs x NFL 节日营销的故事板网格'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.1.caption': {
|
||||
en: 'Grayscale storyboards used to lock narrative beats before generation.',
|
||||
'zh-CN': '在生成之前用于锁定叙事节奏的灰度故事板。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.2.src': {
|
||||
en: 'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-fooh-sequence.webp',
|
||||
'zh-CN':
|
||||
'https://media.comfy.org/website/customers/groove-jones/nfl-crocs-fooh-sequence.webp'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.2.alt': {
|
||||
en: 'Composition progression from blocking to mid-render to final shot',
|
||||
'zh-CN': '从 blocking 到中间渲染再到最终镜头的构图演进'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-7.block.2.caption': {
|
||||
en: 'Composition progression: wireframe blocking, mid-render, and final shot.',
|
||||
'zh-CN': '构图演进:线框 blocking、中间渲染、最终成片。'
|
||||
},
|
||||
// Topic 8: Version Control
|
||||
'customers.detail.groove-jones.topic-8.label': {
|
||||
en: 'VERSION CONTROL',
|
||||
'zh-CN': '版本管理'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-8.title': {
|
||||
en: 'Workflow Files as Version Control',
|
||||
'zh-CN': '把工作流文件当作版本管理'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-8.block.0': {
|
||||
en: 'Every variant of every shot lived as a Comfy workflow file, which doubled as version control. When notes came in requesting a different team colorway, store exterior, or time of day, the team duplicated a branch instead of rebuilding, which made same-day iteration possible. GPU usage and API credit burn were trackable inside the same environment as the work itself, giving Production real-time visibility into compute cost per iteration.',
|
||||
'zh-CN':
|
||||
'每个镜头的每个变体都以 Comfy 工作流文件的形式存在,文件本身就是版本管理。当客户反馈要求换一支球队配色、换一个门店外景或者换一个时间段时,团队只需复制一个分支,而不是重建——这才让"当天迭代"成为可能。GPU 使用量和 API 额度消耗也都能在同一个环境里追踪到,让制作部门实时看到每次迭代的算力成本。'
|
||||
},
|
||||
// Topic 9: Finishing in Nuke
|
||||
'customers.detail.groove-jones.topic-9.label': {
|
||||
en: 'FINISHING IN NUKE',
|
||||
'zh-CN': 'Nuke 终修'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-9.title': {
|
||||
en: 'Finishing in Nuke',
|
||||
'zh-CN': '在 Nuke 中完成终修'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-9.block.0': {
|
||||
en: 'Generated shots moved into Nuke for final compositing: falling snow, camera shake, crowd ambience, holiday audio, and 2K mastering in 9:16 for Instagram Reels, TikTok, and YouTube Shorts. Because Comfy handled generation cleanly, Nuke focused on polish and motion enhancement rather than patching generative artifacts.',
|
||||
'zh-CN':
|
||||
'生成的镜头进入 Nuke 完成最终合成:飘雪、镜头抖动、人群环境音、节日氛围音效,以及面向 Instagram Reels、TikTok、YouTube Shorts 的 9:16 2K 母带。由于 Comfy 把生成环节处理得很干净,Nuke 可以专注于精修和动态增强,而不是去修补生成模型留下的瑕疵。'
|
||||
},
|
||||
// Topic 10: The Takeaway
|
||||
'customers.detail.groove-jones.topic-10.label': {
|
||||
en: 'THE TAKEAWAY',
|
||||
'zh-CN': '总结'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.title': {
|
||||
en: 'Conclusion',
|
||||
'zh-CN': '结语'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.0': {
|
||||
en: 'By building the FOOH pipeline inside Comfy, Groove Jones turned a brief that would have required an expensive live-action shoot plus months of CG into a fast, iterative, single-environment workflow the client could direct in real time. The project recently won the Aaron Award for Best AI Workflow for Production.',
|
||||
'zh-CN':
|
||||
'通过在 Comfy 中搭建整套 FOOH 流水线,Groove Jones 把一个原本需要昂贵实地拍摄加数月 CG 制作的项目,变成了一套高速迭代、单一环境、客户可以实时指挥的工作流。该项目近期还荣获 Aaron Award 的"最佳 AI 制作工作流"奖。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.1.text': {
|
||||
en: 'At Groove Jones, we care deeply about delivering work that makes people say WOW! But we also care about delivering on time and on budget. VFX projects used to operate at razor thin margins. Comfy solved that for us.',
|
||||
'zh-CN':
|
||||
'在 Groove Jones,我们非常在意交付让人说"WOW!"的作品,但我们同样在意按时按预算交付。VFX 项目以前的利润率薄得像刀刃,Comfy 帮我们彻底解决了这个问题。'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.1.name': {
|
||||
en: 'Dale Carman | Co-founder @ Groove Jones',
|
||||
'zh-CN': 'Dale Carman | Groove Jones 联合创始人'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.2.label': {
|
||||
en: 'GROOVE JONES CONTRIBUTORS',
|
||||
'zh-CN': 'GROOVE JONES 贡献者'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.2.name': {
|
||||
en: 'TBD',
|
||||
'zh-CN': '待补充'
|
||||
},
|
||||
'customers.detail.groove-jones.topic-10.block.2.role': {
|
||||
en: 'TBD',
|
||||
'zh-CN': '待补充'
|
||||
},
|
||||
|
||||
// Contact – FormSection
|
||||
'contact.form.badge': {
|
||||
en: 'CONTACT SALES',
|
||||
|
||||
@@ -7,15 +7,6 @@
|
||||
"github": {
|
||||
"enabled": false
|
||||
},
|
||||
"headers": [
|
||||
{
|
||||
"source": "/(.*)",
|
||||
"has": [
|
||||
{ "type": "host", "value": "website-frontend-comfyui.vercel.app" }
|
||||
],
|
||||
"headers": [{ "key": "X-Robots-Tag", "value": "index, follow" }]
|
||||
}
|
||||
],
|
||||
"redirects": [
|
||||
{
|
||||
"source": "/pricing",
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"last_node_id": 1,
|
||||
"last_link_id": 0,
|
||||
"nodes": [
|
||||
{
|
||||
"id": 1,
|
||||
"type": "Preview3D",
|
||||
"pos": [50, 50],
|
||||
"size": [450, 600],
|
||||
"flags": {},
|
||||
"order": 0,
|
||||
"mode": 0,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"properties": {
|
||||
"Node name for S&R": "Preview3D",
|
||||
"Last Time Model File": "nonexistent_model.glb"
|
||||
},
|
||||
"widgets_values": ["nonexistent_model.glb"]
|
||||
}
|
||||
],
|
||||
"links": [],
|
||||
"groups": [],
|
||||
"config": {},
|
||||
"extra": { "ds": { "offset": [0, 0], "scale": 1 } },
|
||||
"version": 0.4
|
||||
}
|
||||
@@ -119,15 +119,7 @@
|
||||
{ "name": "CLIP", "type": "CLIP", "links": [3, 5], "slot_index": 1 },
|
||||
{ "name": "VAE", "type": "VAE", "links": [8], "slot_index": 2 }
|
||||
],
|
||||
"properties": {
|
||||
"models": [
|
||||
{
|
||||
"name": "v1-5-pruned-emaonly-fp16.safetensors",
|
||||
"url": "https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors",
|
||||
"directory": "checkpoints"
|
||||
}
|
||||
]
|
||||
},
|
||||
"properties": {},
|
||||
"widgets_values": ["v1-5-pruned-emaonly-fp16.safetensors"]
|
||||
}
|
||||
],
|
||||
|
||||
@@ -211,8 +211,7 @@ export const TestIds = {
|
||||
queue: {
|
||||
overlayToggle: 'queue-overlay-toggle',
|
||||
clearHistoryAction: 'clear-history-action',
|
||||
jobAssetsList: 'job-assets-list',
|
||||
notificationBanner: 'queue-notification-banner'
|
||||
jobAssetsList: 'job-assets-list'
|
||||
},
|
||||
errors: {
|
||||
imageLoadError: 'error-loading-image',
|
||||
|
||||
@@ -282,57 +282,6 @@ test.describe('Load3D', () => {
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Load3D silent 404 on missing output model', () => {
|
||||
test('Does not show an error toast when the output model file is missing (404)', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
// Intercept model fetch and return 404 to simulate a missing output file
|
||||
// (e.g. shared workflow opened on a machine that never ran it)
|
||||
await comfyPage.page.route('**/view?**', (route) =>
|
||||
route.fulfill({ status: 404, body: 'Not Found' })
|
||||
)
|
||||
|
||||
// This workflow has a Preview3D node with Last Time Model File set,
|
||||
// triggering the loadFolder: 'output' + silentOnNotFound: true path.
|
||||
await comfyPage.settings.setSetting('Comfy.VueNodes.Enabled', true)
|
||||
|
||||
// Wait for the 404 response before asserting — gives the load attempt time
|
||||
// to complete without using waitForTimeout
|
||||
const responsePromise = comfyPage.page.waitForResponse('**/view?**')
|
||||
await comfyPage.workflow.loadWorkflow('3d/load3d_missing_model')
|
||||
await responsePromise
|
||||
|
||||
await expect(
|
||||
comfyPage.toast.visibleToasts.filter({ hasText: 'Error loading model' })
|
||||
).toHaveCount(0)
|
||||
})
|
||||
|
||||
test('Shows an error toast when a non-404 error occurs loading the output model', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
// Intercept with a 500 to simulate a real server error (not 404) — toast must appear
|
||||
await comfyPage.page.route('**/view?**', (route) =>
|
||||
route.fulfill({ status: 500, body: 'Internal Server Error' })
|
||||
)
|
||||
|
||||
await comfyPage.settings.setSetting('Comfy.VueNodes.Enabled', true)
|
||||
|
||||
const responsePromise = comfyPage.page.waitForResponse('**/view?**')
|
||||
await comfyPage.workflow.loadWorkflow('3d/load3d_missing_model')
|
||||
await responsePromise
|
||||
|
||||
await expect
|
||||
.poll(
|
||||
() =>
|
||||
comfyPage.toast.visibleToasts
|
||||
.filter({ hasText: 'Error loading model' })
|
||||
.count(),
|
||||
{ timeout: 10000 }
|
||||
)
|
||||
.toBeGreaterThan(0)
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Load3D initialization failure', () => {
|
||||
test('Surfaces a toast when the THREE.WebGLRenderer cannot be created', async ({
|
||||
comfyPage
|
||||
|
||||
@@ -1,164 +0,0 @@
|
||||
import type { Page } from '@playwright/test'
|
||||
import { expect } from '@playwright/test'
|
||||
|
||||
import { comfyPageFixture as test } from '@e2e/fixtures/ComfyPage'
|
||||
import { TestIds } from '@e2e/fixtures/selectors'
|
||||
|
||||
// Mirrors BANNER_DISMISS_DELAY_MS in src/composables/queue/useQueueNotificationBanners.ts.
|
||||
// Duplicated here to avoid pulling production source (and its litegraph
|
||||
// transitive deps) into the Playwright TS loader.
|
||||
const BANNER_DISMISS_DELAY_MS = 4000
|
||||
const BANNER_ASSERT_TIMEOUT_MS = BANNER_DISMISS_DELAY_MS + 2000
|
||||
|
||||
const REQUEST_ID_PRIMARY = 1
|
||||
const REQUEST_ID_SECONDARY = 2
|
||||
const REQUEST_ID_MISMATCH = 999
|
||||
|
||||
let nextRequestId = 1000
|
||||
const newRequestId = () => nextRequestId++
|
||||
|
||||
function bannerLocator(page: Page) {
|
||||
return page.getByTestId(TestIds.queue.notificationBanner)
|
||||
}
|
||||
|
||||
type DispatchOpts = { batchCount?: number; requestId?: number }
|
||||
|
||||
function dispatchPromptQueueing(page: Page, opts: DispatchOpts = {}) {
|
||||
return page.evaluate(
|
||||
([batchCount, requestId]) => {
|
||||
window.app!.api.dispatchCustomEvent('promptQueueing', {
|
||||
batchCount,
|
||||
requestId
|
||||
})
|
||||
},
|
||||
[opts.batchCount ?? 1, opts.requestId ?? newRequestId()]
|
||||
)
|
||||
}
|
||||
|
||||
function dispatchPromptQueued(page: Page, opts: DispatchOpts = {}) {
|
||||
return page.evaluate(
|
||||
([batchCount, requestId]) => {
|
||||
window.app!.api.dispatchCustomEvent('promptQueued', {
|
||||
number: 0,
|
||||
batchCount,
|
||||
requestId
|
||||
})
|
||||
},
|
||||
[opts.batchCount ?? 1, opts.requestId ?? newRequestId()]
|
||||
)
|
||||
}
|
||||
|
||||
test.describe('Queue notification banners', { tag: ['@ui'] }, () => {
|
||||
test.describe('Queuing lifecycle', () => {
|
||||
test('promptQueueing event shows a queueing banner', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueueing(comfyPage.page)
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toContainText('queuing')
|
||||
})
|
||||
|
||||
test('promptQueued upgrades a pending banner to queued', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueueing(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toContainText('queuing')
|
||||
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
await expect(banner).toContainText('queued')
|
||||
})
|
||||
|
||||
test('promptQueued with batch count > 1 shows plural text', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueued(comfyPage.page, { batchCount: 3 })
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toContainText('3')
|
||||
await expect(banner).toContainText('jobs added to queue')
|
||||
})
|
||||
|
||||
test('promptQueued with mismatched requestId enqueues a separate queued banner', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueueing(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toContainText('queuing')
|
||||
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_MISMATCH
|
||||
})
|
||||
|
||||
// Pending banner is not upgraded — still shows "queuing".
|
||||
await expect(banner).toContainText('queuing')
|
||||
|
||||
// After the pending banner auto-dismisses, the queued banner appears.
|
||||
await expect(banner).toContainText('queued', {
|
||||
timeout: BANNER_ASSERT_TIMEOUT_MS
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Auto-dismiss', () => {
|
||||
test('Banner auto-dismisses after timeout', async ({ comfyPage }) => {
|
||||
await dispatchPromptQueued(comfyPage.page)
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toBeHidden({ timeout: BANNER_ASSERT_TIMEOUT_MS })
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Notification queue (FIFO)', () => {
|
||||
test('Second notification shows after first auto-dismisses', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 2,
|
||||
requestId: REQUEST_ID_SECONDARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toContainText('Job queued')
|
||||
await expect(banner).toContainText('2 jobs added to queue', {
|
||||
timeout: BANNER_ASSERT_TIMEOUT_MS
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Direct queued event (no pending predecessor)', () => {
|
||||
test('promptQueued without prior queueing shows queued banner directly', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await dispatchPromptQueued(comfyPage.page, {
|
||||
batchCount: 1,
|
||||
requestId: REQUEST_ID_PRIMARY
|
||||
})
|
||||
|
||||
const banner = bannerLocator(comfyPage.page)
|
||||
await expect(banner).toBeVisible()
|
||||
await expect(banner).toContainText('queued')
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,5 +1,7 @@
|
||||
import type { ConsoleMessage } from '@playwright/test'
|
||||
import { expect } from '@playwright/test'
|
||||
|
||||
import type { ComfyPage } from '@e2e/fixtures/ComfyPage'
|
||||
import { comfyPageFixture as test } from '@e2e/fixtures/ComfyPage'
|
||||
import { TestIds } from '@e2e/fixtures/selectors'
|
||||
import { getPseudoPreviewWidgets } from '@e2e/fixtures/utils/promotedWidgets'
|
||||
@@ -90,4 +92,173 @@ test.describe('Subgraph Lifecycle', { tag: ['@subgraph'] }, () => {
|
||||
await expect(comfyPage.page.locator(domPreviewSelector)).toHaveCount(0)
|
||||
})
|
||||
})
|
||||
|
||||
test.describe('Detach Race Repro', { tag: ['@vue-nodes'] }, () => {
|
||||
const SUBGRAPH_NODE_TITLE = 'New Subgraph'
|
||||
|
||||
// Capture-and-defer the legacy onNodeRemoved/onSelectionChange handlers
|
||||
// so the test can drive unpack to completion before they run. Widens
|
||||
// the race window so a guard regression deterministically surfaces; on
|
||||
// fast environments the legacy cleanup runs in time and masks the bug.
|
||||
const DEFERRED_HANDLERS_KEY = '__deferredHandlers'
|
||||
|
||||
async function deferLegacyHandlers(comfyPage: ComfyPage) {
|
||||
await comfyPage.page.evaluate((key) => {
|
||||
const w = window as unknown as Record<string, unknown>
|
||||
const graph = window.app!.graph!
|
||||
const canvas = window.app!.canvas!
|
||||
const queue: Array<() => void> = []
|
||||
const originalNodeRemoved = graph.onNodeRemoved
|
||||
const originalSelectionChange = canvas.onSelectionChange
|
||||
w[key] = { queue, originalNodeRemoved, originalSelectionChange }
|
||||
graph.onNodeRemoved = function (node) {
|
||||
queue.push(() => originalNodeRemoved?.call(this, node))
|
||||
}
|
||||
canvas.onSelectionChange = function (selected) {
|
||||
queue.push(() => originalSelectionChange?.call(this, selected))
|
||||
}
|
||||
}, DEFERRED_HANDLERS_KEY)
|
||||
}
|
||||
|
||||
async function runDeferredHandlers(comfyPage: ComfyPage) {
|
||||
await comfyPage.page.evaluate((key) => {
|
||||
const stash = (window as unknown as Record<string, unknown>)[key] as
|
||||
| { queue: Array<() => void> }
|
||||
| undefined
|
||||
if (!stash) return
|
||||
for (const fn of stash.queue.splice(0)) fn()
|
||||
}, DEFERRED_HANDLERS_KEY)
|
||||
}
|
||||
|
||||
test.afterEach(async ({ comfyPage }) => {
|
||||
await comfyPage.page.evaluate((key) => {
|
||||
const w = window as unknown as Record<string, unknown>
|
||||
const graph = window.app?.graph
|
||||
const canvas = window.app?.canvas
|
||||
const stash = w[key] as
|
||||
| {
|
||||
originalNodeRemoved?: NonNullable<typeof graph>['onNodeRemoved']
|
||||
originalSelectionChange?: NonNullable<
|
||||
typeof canvas
|
||||
>['onSelectionChange']
|
||||
}
|
||||
| undefined
|
||||
if (stash) {
|
||||
if (graph) graph.onNodeRemoved = stash.originalNodeRemoved
|
||||
if (canvas) canvas.onSelectionChange = stash.originalSelectionChange
|
||||
}
|
||||
delete w[key]
|
||||
}, DEFERRED_HANDLERS_KEY)
|
||||
})
|
||||
|
||||
function isNullGraphErrorText(text: string): boolean {
|
||||
return text.includes('NullGraphError') || /has no graph/.test(text)
|
||||
}
|
||||
|
||||
// Vue's default errorHandler routes render throws to console.error,
|
||||
// not pageerror - listen to both.
|
||||
function captureNullGraphErrors(comfyPage: ComfyPage) {
|
||||
const captured: string[] = []
|
||||
const onPageError = (err: Error) => {
|
||||
if (
|
||||
err.name === 'NullGraphError' ||
|
||||
isNullGraphErrorText(err.message ?? '')
|
||||
) {
|
||||
captured.push(`pageerror ${err.name}: ${err.message}`)
|
||||
}
|
||||
}
|
||||
const onConsoleMessage = (msg: ConsoleMessage) => {
|
||||
if (msg.type() !== 'error') return
|
||||
const text = msg.text()
|
||||
if (isNullGraphErrorText(text)) {
|
||||
captured.push(`console.error: ${text}`)
|
||||
}
|
||||
}
|
||||
comfyPage.page.on('pageerror', onPageError)
|
||||
comfyPage.page.on('console', onConsoleMessage)
|
||||
return {
|
||||
getErrors: () => [...captured],
|
||||
stop: () => {
|
||||
comfyPage.page.off('pageerror', onPageError)
|
||||
comfyPage.page.off('console', onConsoleMessage)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function unpackViaContextMenu(comfyPage: ComfyPage, title: string) {
|
||||
const fixture = await comfyPage.vueNodes.getFixtureByTitle(title)
|
||||
await comfyPage.contextMenu.openForVueNode(fixture.header)
|
||||
await comfyPage.contextMenu.clickMenuItemExact('Unpack Subgraph')
|
||||
}
|
||||
|
||||
async function unpackAndCaptureErrors(
|
||||
comfyPage: ComfyPage
|
||||
): Promise<string[]> {
|
||||
const subgraphNode =
|
||||
comfyPage.vueNodes.getNodeByTitle(SUBGRAPH_NODE_TITLE)
|
||||
const errors = captureNullGraphErrors(comfyPage)
|
||||
try {
|
||||
await deferLegacyHandlers(comfyPage)
|
||||
await unpackViaContextMenu(comfyPage, SUBGRAPH_NODE_TITLE)
|
||||
await expect(subgraphNode).toHaveCount(0)
|
||||
await runDeferredHandlers(comfyPage)
|
||||
// Let drained-handler reactive flushes settle before stop().
|
||||
await comfyPage.nextFrame()
|
||||
return errors.getErrors()
|
||||
} finally {
|
||||
errors.stop()
|
||||
}
|
||||
}
|
||||
|
||||
test.beforeEach(async ({ comfyPage }) => {
|
||||
await comfyPage.settings.setSetting('Comfy.RightSidePanel.IsOpen', true)
|
||||
await comfyPage.workflow.loadWorkflow(
|
||||
'subgraphs/subgraph-with-promoted-text-widget'
|
||||
)
|
||||
const subgraphNode =
|
||||
comfyPage.vueNodes.getNodeByTitle(SUBGRAPH_NODE_TITLE)
|
||||
await expect(subgraphNode).toBeVisible()
|
||||
|
||||
const fixture =
|
||||
await comfyPage.vueNodes.getFixtureByTitle(SUBGRAPH_NODE_TITLE)
|
||||
await fixture.header.click()
|
||||
await expect(
|
||||
comfyPage.page.getByTestId(TestIds.propertiesPanel.root)
|
||||
).toBeVisible()
|
||||
await comfyPage.nextFrame()
|
||||
})
|
||||
|
||||
test('unpack does not surface NullGraphError on the LGraphNode render path', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
const nullGraphErrors = await unpackAndCaptureErrors(comfyPage)
|
||||
expect(
|
||||
nullGraphErrors,
|
||||
'LGraphNode render path: detach race must not surface NullGraphError'
|
||||
).toEqual([])
|
||||
})
|
||||
|
||||
test('unpack does not surface NullGraphError from the TabSubgraphInputs panel', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
const nullGraphErrors = await unpackAndCaptureErrors(comfyPage)
|
||||
expect(
|
||||
nullGraphErrors,
|
||||
'TabSubgraphInputs panel: detach race must not surface NullGraphError'
|
||||
).toEqual([])
|
||||
})
|
||||
|
||||
test('unpack with subgraph editor open does not surface NullGraphError from the SubgraphEditor panel', async ({
|
||||
comfyPage
|
||||
}) => {
|
||||
await comfyPage.page.getByTestId(TestIds.subgraphEditor.toggle).click()
|
||||
await comfyPage.nextFrame()
|
||||
|
||||
const nullGraphErrors = await unpackAndCaptureErrors(comfyPage)
|
||||
expect(
|
||||
nullGraphErrors,
|
||||
'SubgraphEditor panel: detach race must not surface NullGraphError'
|
||||
).toEqual([])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -83,7 +83,6 @@
|
||||
"@tiptap/extension-table-row": "catalog:",
|
||||
"@tiptap/pm": "catalog:",
|
||||
"@tiptap/starter-kit": "catalog:",
|
||||
"@vee-validate/zod": "catalog:",
|
||||
"@vueuse/core": "catalog:",
|
||||
"@vueuse/integrations": "catalog:",
|
||||
"@vueuse/router": "^14.2.0",
|
||||
@@ -114,7 +113,6 @@
|
||||
"three": "^0.170.0",
|
||||
"tiptap-markdown": "^0.8.10",
|
||||
"typegpu": "catalog:",
|
||||
"vee-validate": "catalog:",
|
||||
"vue": "catalog:",
|
||||
"vue-i18n": "catalog:",
|
||||
"vue-router": "catalog:",
|
||||
|
||||
38
pnpm-lock.yaml
generated
38
pnpm-lock.yaml
generated
@@ -162,9 +162,6 @@ catalogs:
|
||||
'@types/three':
|
||||
specifier: ^0.169.0
|
||||
version: 0.169.0
|
||||
'@vee-validate/zod':
|
||||
specifier: ^4.15.1
|
||||
version: 4.15.1
|
||||
'@vercel/analytics':
|
||||
specifier: ^2.0.1
|
||||
version: 2.0.1
|
||||
@@ -363,9 +360,6 @@ catalogs:
|
||||
unplugin-vue-components:
|
||||
specifier: ^30.0.0
|
||||
version: 30.0.0
|
||||
vee-validate:
|
||||
specifier: ^4.15.1
|
||||
version: 4.15.1
|
||||
vite-plugin-dts:
|
||||
specifier: ^4.5.4
|
||||
version: 4.5.4
|
||||
@@ -503,9 +497,6 @@ importers:
|
||||
'@tiptap/starter-kit':
|
||||
specifier: 'catalog:'
|
||||
version: 2.27.2
|
||||
'@vee-validate/zod':
|
||||
specifier: 'catalog:'
|
||||
version: 4.15.1(vue@3.5.13(typescript@5.9.3))(zod@3.25.76)
|
||||
'@vueuse/core':
|
||||
specifier: 'catalog:'
|
||||
version: 14.2.0(vue@3.5.13(typescript@5.9.3))
|
||||
@@ -596,9 +587,6 @@ importers:
|
||||
typegpu:
|
||||
specifier: 'catalog:'
|
||||
version: 0.8.2
|
||||
vee-validate:
|
||||
specifier: 'catalog:'
|
||||
version: 4.15.1(vue@3.5.13(typescript@5.9.3))
|
||||
vue:
|
||||
specifier: 'catalog:'
|
||||
version: 3.5.13(typescript@5.9.3)
|
||||
@@ -4736,11 +4724,6 @@ packages:
|
||||
peerDependencies:
|
||||
valibot: ^1.2.0
|
||||
|
||||
'@vee-validate/zod@4.15.1':
|
||||
resolution: {integrity: sha512-329Z4TDBE5Vx0FdbA8S4eR9iGCFFUNGbxjpQ20ff5b5wGueScjocUIx9JHPa79LTG06RnlUR4XogQsjN4tecKA==}
|
||||
peerDependencies:
|
||||
zod: ^3.24.0
|
||||
|
||||
'@vercel/analytics@2.0.1':
|
||||
resolution: {integrity: sha512-MTQG6V9qQrt1tsDeF+2Uoo5aPjqbVPys1xvnIftXSJYG2SrwXRHnqEvVoYID7BTruDz4lCd2Z7rM1BdkUehk2g==}
|
||||
peerDependencies:
|
||||
@@ -9613,11 +9596,6 @@ packages:
|
||||
typescript:
|
||||
optional: true
|
||||
|
||||
vee-validate@4.15.1:
|
||||
resolution: {integrity: sha512-DkFsiTwEKau8VIxyZBGdO6tOudD+QoUBPuHj3e6QFqmbfCRj1ArmYWue9lEp6jLSWBIw4XPlDLjFIZNLdRAMSg==}
|
||||
peerDependencies:
|
||||
vue: ^3.4.26
|
||||
|
||||
vfile-location@5.0.3:
|
||||
resolution: {integrity: sha512-5yXvWDEgqeiYiBe1lbxYF7UMAIm/IcopxMHrMQDq3nvKcjPKIhZklUKL+AE7J7uApI4kwe2snsK+eI6UTj9EHg==}
|
||||
|
||||
@@ -14063,14 +14041,6 @@ snapshots:
|
||||
dependencies:
|
||||
valibot: 1.2.0(typescript@5.9.3)
|
||||
|
||||
'@vee-validate/zod@4.15.1(vue@3.5.13(typescript@5.9.3))(zod@3.25.76)':
|
||||
dependencies:
|
||||
type-fest: 4.41.0
|
||||
vee-validate: 4.15.1(vue@3.5.13(typescript@5.9.3))
|
||||
zod: 3.25.76
|
||||
transitivePeerDependencies:
|
||||
- vue
|
||||
|
||||
'@vercel/analytics@2.0.1(react@19.2.4)(vue-router@4.4.3(vue@3.5.13(typescript@5.9.3)))(vue@3.5.13(typescript@5.9.3))':
|
||||
optionalDependencies:
|
||||
react: 19.2.4
|
||||
@@ -14189,7 +14159,7 @@ snapshots:
|
||||
sirv: 3.0.2
|
||||
tinyglobby: 0.2.15
|
||||
tinyrainbow: 3.0.3
|
||||
vitest: 4.0.16(@opentelemetry/api@1.9.0)(@types/node@24.10.4)(@vitest/ui@4.0.16)(esbuild@0.27.3)(happy-dom@20.0.11)(jiti@2.6.1)(jsdom@27.4.0)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)
|
||||
vitest: 4.0.16(@opentelemetry/api@1.9.0)(@types/node@25.0.3)(@vitest/ui@4.0.16)(esbuild@0.27.3)(happy-dom@20.0.11)(jiti@2.6.1)(jsdom@27.4.0)(terser@5.39.2)(tsx@4.19.4)(yaml@2.8.2)
|
||||
|
||||
'@vitest/utils@3.2.4':
|
||||
dependencies:
|
||||
@@ -20084,12 +20054,6 @@ snapshots:
|
||||
optionalDependencies:
|
||||
typescript: 5.9.3
|
||||
|
||||
vee-validate@4.15.1(vue@3.5.13(typescript@5.9.3)):
|
||||
dependencies:
|
||||
'@vue/devtools-api': 7.7.9
|
||||
type-fest: 4.41.0
|
||||
vue: 3.5.13(typescript@5.9.3)
|
||||
|
||||
vfile-location@5.0.3:
|
||||
dependencies:
|
||||
'@types/unist': 3.0.3
|
||||
|
||||
@@ -55,7 +55,6 @@ catalog:
|
||||
'@types/node': ^24.1.0
|
||||
'@types/semver': ^7.7.0
|
||||
'@types/three': ^0.169.0
|
||||
'@vee-validate/zod': ^4.15.1
|
||||
'@vercel/analytics': ^2.0.1
|
||||
'@vitejs/plugin-vue': ^6.0.0
|
||||
'@vitest/coverage-v8': ^4.0.16
|
||||
@@ -122,7 +121,6 @@ catalog:
|
||||
unplugin-icons: ^22.5.0
|
||||
unplugin-typegpu: 0.8.0
|
||||
unplugin-vue-components: ^30.0.0
|
||||
vee-validate: ^4.15.1
|
||||
vite: ^8.0.0
|
||||
vite-plugin-dts: ^4.5.4
|
||||
vite-plugin-html: ^3.2.2
|
||||
|
||||
@@ -1,177 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Generate test fixture files for metadata parser tests.
|
||||
|
||||
Each fixture embeds the same workflow and prompt JSON, matching the
|
||||
format the ComfyUI backend uses to write metadata.
|
||||
|
||||
Prerequisites:
|
||||
source ~/ComfyUI/.venv/bin/activate
|
||||
python3 scripts/generate-embedded-metadata-test-files.py
|
||||
|
||||
Output: src/scripts/metadata/__fixtures__/
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import struct
|
||||
import subprocess
|
||||
|
||||
import av
|
||||
from PIL import Image
|
||||
|
||||
REPO_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
FIXTURES_DIR = os.path.join(REPO_ROOT, 'src', 'scripts', 'metadata', '__fixtures__')
|
||||
|
||||
WORKFLOW = {
|
||||
'nodes': [
|
||||
{
|
||||
'id': 1,
|
||||
'type': 'KSampler',
|
||||
'pos': [100, 100],
|
||||
'size': [200, 200],
|
||||
}
|
||||
]
|
||||
}
|
||||
PROMPT = {'1': {'class_type': 'KSampler', 'inputs': {}}}
|
||||
|
||||
WORKFLOW_JSON = json.dumps(WORKFLOW, separators=(',', ':'))
|
||||
PROMPT_JSON = json.dumps(PROMPT, separators=(',', ':'))
|
||||
|
||||
|
||||
def out(name: str) -> str:
|
||||
return os.path.join(FIXTURES_DIR, name)
|
||||
|
||||
|
||||
def report(name: str):
|
||||
size = os.path.getsize(out(name))
|
||||
print(f' {name} ({size} bytes)')
|
||||
|
||||
|
||||
def make_1x1_image() -> Image.Image:
|
||||
return Image.new('RGB', (1, 1), (255, 0, 0))
|
||||
|
||||
|
||||
def build_exif_bytes() -> bytes:
|
||||
"""Build EXIF bytes matching the backend's tag assignments.
|
||||
|
||||
Backend: 0x010F (Make) = "workflow:<json>", 0x0110 (Model) = "prompt:<json>"
|
||||
"""
|
||||
img = make_1x1_image()
|
||||
exif = img.getexif()
|
||||
exif[0x010F] = f'workflow:{WORKFLOW_JSON}'
|
||||
exif[0x0110] = f'prompt:{PROMPT_JSON}'
|
||||
return exif.tobytes()
|
||||
|
||||
|
||||
def inject_exif_prefix_in_webp(path: str):
|
||||
"""Prepend Exif\\0\\0 to the EXIF chunk in a WEBP file.
|
||||
|
||||
PIL always strips this prefix, so we re-inject it to test that code path.
|
||||
"""
|
||||
data = bytearray(open(path, 'rb').read())
|
||||
off = 12
|
||||
while off < len(data):
|
||||
chunk_type = data[off:off + 4]
|
||||
chunk_len = struct.unpack_from('<I', data, off + 4)[0]
|
||||
if chunk_type == b'EXIF':
|
||||
prefix = b'Exif\x00\x00'
|
||||
data[off + 8:off + 8] = prefix
|
||||
struct.pack_into('<I', data, off + 4, chunk_len + len(prefix))
|
||||
riff_size = struct.unpack_from('<I', data, 4)[0]
|
||||
struct.pack_into('<I', data, 4, riff_size + len(prefix))
|
||||
break
|
||||
off += 8 + chunk_len + (chunk_len % 2)
|
||||
with open(path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
def generate_av_fixture(
|
||||
name: str,
|
||||
fmt: str,
|
||||
codec: str,
|
||||
rate: int = 44100,
|
||||
options: dict | None = None,
|
||||
):
|
||||
"""Generate an audio fixture via PyAV container.metadata[], matching the backend."""
|
||||
path = out(name)
|
||||
container = av.open(path, mode='w', format=fmt, options=options or {})
|
||||
stream = container.add_stream(codec, rate=rate)
|
||||
stream.layout = 'mono'
|
||||
|
||||
container.metadata['prompt'] = PROMPT_JSON
|
||||
container.metadata['workflow'] = WORKFLOW_JSON
|
||||
|
||||
sample_fmt = stream.codec_context.codec.audio_formats[0].name
|
||||
samples = stream.codec_context.frame_size or 1024
|
||||
frame = av.AudioFrame(format=sample_fmt, layout='mono', samples=samples)
|
||||
frame.rate = rate
|
||||
frame.pts = 0
|
||||
for packet in stream.encode(frame):
|
||||
container.mux(packet)
|
||||
for packet in stream.encode():
|
||||
container.mux(packet)
|
||||
container.close()
|
||||
report(name)
|
||||
|
||||
|
||||
def generate_webp():
|
||||
img = make_1x1_image()
|
||||
exif = build_exif_bytes()
|
||||
|
||||
img.save(out('with_metadata.webp'), 'WEBP', exif=exif)
|
||||
report('with_metadata.webp')
|
||||
|
||||
img.save(out('with_metadata_exif_prefix.webp'), 'WEBP', exif=exif)
|
||||
inject_exif_prefix_in_webp(out('with_metadata_exif_prefix.webp'))
|
||||
report('with_metadata_exif_prefix.webp')
|
||||
|
||||
|
||||
def generate_avif():
|
||||
img = make_1x1_image()
|
||||
exif = build_exif_bytes()
|
||||
img.save(out('with_metadata.avif'), 'AVIF', exif=exif)
|
||||
report('with_metadata.avif')
|
||||
|
||||
|
||||
def generate_flac():
|
||||
generate_av_fixture('with_metadata.flac', 'flac', 'flac')
|
||||
|
||||
|
||||
def generate_opus():
|
||||
generate_av_fixture('with_metadata.opus', 'opus', 'libopus', rate=48000)
|
||||
|
||||
|
||||
def generate_mp3():
|
||||
generate_av_fixture('with_metadata.mp3', 'mp3', 'libmp3lame')
|
||||
|
||||
|
||||
def generate_mp4():
|
||||
"""Generate MP4 via ffmpeg CLI with QuickTime keys/ilst metadata."""
|
||||
path = out('with_metadata.mp4')
|
||||
subprocess.run([
|
||||
'ffmpeg', '-y', '-loglevel', 'error',
|
||||
'-f', 'lavfi', '-i', 'anullsrc=r=44100:cl=mono',
|
||||
'-t', '0.01', '-c:a', 'aac', '-b:a', '32k',
|
||||
'-movflags', 'use_metadata_tags',
|
||||
'-metadata', f'prompt={PROMPT_JSON}',
|
||||
'-metadata', f'workflow={WORKFLOW_JSON}',
|
||||
path,
|
||||
], check=True)
|
||||
report('with_metadata.mp4')
|
||||
|
||||
|
||||
def generate_webm():
|
||||
generate_av_fixture('with_metadata.webm', 'webm', 'libvorbis')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print('Generating fixtures...')
|
||||
generate_webp()
|
||||
generate_avif()
|
||||
generate_flac()
|
||||
generate_opus()
|
||||
generate_mp3()
|
||||
generate_mp4()
|
||||
generate_webm()
|
||||
print('Done.')
|
||||
@@ -5,7 +5,6 @@
|
||||
role="status"
|
||||
aria-live="polite"
|
||||
aria-atomic="true"
|
||||
data-testid="queue-notification-banner"
|
||||
>
|
||||
<QueueNotificationBanner :notification="currentNotification" />
|
||||
</div>
|
||||
|
||||
@@ -872,3 +872,55 @@ describe('reconcileNodeErrorFlags (via lastNodeErrors watcher)', () => {
|
||||
expect(subgraphNode.has_errors).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Pre-remove vueNodeData drain', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createTestingPinia({ stubActions: false }))
|
||||
})
|
||||
|
||||
it('drops vueNodeData entry before node.onRemoved fires', () => {
|
||||
const graph = new LGraph()
|
||||
const node = new LGraphNode('test')
|
||||
graph.add(node)
|
||||
const { vueNodeData } = useGraphNodeManager(graph)
|
||||
|
||||
expect(vueNodeData.has(String(node.id))).toBe(true)
|
||||
|
||||
let dataPresentInOnRemoved: boolean | undefined
|
||||
node.onRemoved = () => {
|
||||
dataPresentInOnRemoved = vueNodeData.has(String(node.id))
|
||||
}
|
||||
|
||||
graph.remove(node)
|
||||
|
||||
expect(
|
||||
dataPresentInOnRemoved,
|
||||
'vueNodeData entry must be cleared before node.onRemoved fires so reactive consumers cannot observe the detached node'
|
||||
).toBe(false)
|
||||
})
|
||||
|
||||
it('clears vueNodeData via the onNodeRemoved fallback when LGraph.clear() bypasses node:before-removed', () => {
|
||||
const graph = new LGraph()
|
||||
const nodeA = new LGraphNode('a')
|
||||
const nodeB = new LGraphNode('b')
|
||||
graph.add(nodeA)
|
||||
graph.add(nodeB)
|
||||
const { vueNodeData } = useGraphNodeManager(graph)
|
||||
|
||||
expect(vueNodeData.size).toBe(2)
|
||||
|
||||
const beforeRemovedSpy = vi.fn()
|
||||
graph.events.addEventListener('node:before-removed', beforeRemovedSpy)
|
||||
|
||||
graph.clear()
|
||||
|
||||
expect(
|
||||
beforeRemovedSpy,
|
||||
'clear() does not dispatch node:before-removed - cleanup comes from the onNodeRemoved fallback'
|
||||
).not.toHaveBeenCalled()
|
||||
expect(
|
||||
vueNodeData.size,
|
||||
'onNodeRemoved fallback must clear vueNodeData when the event path is bypassed'
|
||||
).toBe(0)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -635,27 +635,24 @@ export function useGraphNodeManager(graph: LGraph): GraphNodeManager {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Handles node removal from the graph - cleans up all references
|
||||
*/
|
||||
// Drop refs while node is still attached, before reactive store writes
|
||||
// in node.onRemoved can invalidate computeds holding the node.
|
||||
const handleBeforeNodeRemoved = (node: LGraphNode) => {
|
||||
const id = String(node.id)
|
||||
nodeRefs.delete(id)
|
||||
vueNodeData.delete(id)
|
||||
}
|
||||
|
||||
const handleNodeRemoved = (
|
||||
node: LGraphNode,
|
||||
originalCallback?: (node: LGraphNode) => void
|
||||
) => {
|
||||
// Ensure refs are cleared if node:before-removed didn't fire
|
||||
handleBeforeNodeRemoved(node)
|
||||
const id = String(node.id)
|
||||
|
||||
// Remove node from layout store
|
||||
setSource(LayoutSource.Canvas)
|
||||
void deleteNode(id)
|
||||
|
||||
// Clean up all tracking references
|
||||
nodeRefs.delete(id)
|
||||
vueNodeData.delete(id)
|
||||
|
||||
// Call original callback if provided
|
||||
if (originalCallback) {
|
||||
originalCallback(node)
|
||||
}
|
||||
originalCallback?.(node)
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -678,9 +675,6 @@ export function useGraphNodeManager(graph: LGraph): GraphNodeManager {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up event listeners - now simplified with extracted handlers
|
||||
*/
|
||||
const setupEventListeners = (): (() => void) => {
|
||||
// Store original callbacks
|
||||
const originalOnNodeAdded = graph.onNodeAdded
|
||||
@@ -696,6 +690,16 @@ export function useGraphNodeManager(graph: LGraph): GraphNodeManager {
|
||||
handleNodeRemoved(node, originalOnNodeRemoved)
|
||||
}
|
||||
|
||||
const beforeNodeRemovedListener = (
|
||||
e: CustomEvent<{ node: LGraphNode }>
|
||||
) => {
|
||||
handleBeforeNodeRemoved(e.detail.node)
|
||||
}
|
||||
graph.events.addEventListener(
|
||||
'node:before-removed',
|
||||
beforeNodeRemovedListener
|
||||
)
|
||||
|
||||
const triggerHandlers: {
|
||||
[K in LGraphTriggerAction]: (event: LGraphTriggerParam<K>) => void
|
||||
} = {
|
||||
@@ -838,12 +842,19 @@ export function useGraphNodeManager(graph: LGraph): GraphNodeManager {
|
||||
// Initialize state
|
||||
syncWithGraph()
|
||||
|
||||
// Return cleanup function
|
||||
return createCleanupFunction(
|
||||
const cleanup = createCleanupFunction(
|
||||
originalOnNodeAdded || undefined,
|
||||
originalOnNodeRemoved || undefined,
|
||||
originalOnTrigger || undefined
|
||||
)
|
||||
|
||||
return () => {
|
||||
graph.events.removeEventListener(
|
||||
'node:before-removed',
|
||||
beforeNodeRemovedListener
|
||||
)
|
||||
cleanup()
|
||||
}
|
||||
}
|
||||
|
||||
// Set up event listeners immediately
|
||||
|
||||
@@ -106,6 +106,18 @@ describe(usePromotedPreviews, () => {
|
||||
expect(promotedPreviews.value).toEqual([])
|
||||
})
|
||||
|
||||
it('returns empty array (does not throw) when SubgraphNode is detached', () => {
|
||||
const setup = createSetup()
|
||||
const parentGraph = setup.subgraphNode.graph!
|
||||
parentGraph.add(setup.subgraphNode)
|
||||
parentGraph.remove(setup.subgraphNode)
|
||||
|
||||
expect(setup.subgraphNode.graph).toBeNull()
|
||||
const { promotedPreviews } = usePromotedPreviews(() => setup.subgraphNode)
|
||||
expect(() => promotedPreviews.value).not.toThrow()
|
||||
expect(promotedPreviews.value).toEqual([])
|
||||
})
|
||||
|
||||
it('returns empty array when no $$ promotions exist', () => {
|
||||
const setup = createSetup()
|
||||
addInteriorNode(setup, { id: 10 })
|
||||
|
||||
@@ -28,6 +28,7 @@ export function usePromotedPreviews(
|
||||
const promotedPreviews = computed((): PromotedPreview[] => {
|
||||
const node = toValue(lgraphNode)
|
||||
if (!(node instanceof SubgraphNode)) return []
|
||||
if (!node.graph) return []
|
||||
|
||||
const entries = promotionStore.getPromotions(node.rootGraph.id, node.id)
|
||||
const pseudoEntries = entries.filter((e) =>
|
||||
|
||||
@@ -5,7 +5,6 @@ import { ref } from 'vue'
|
||||
import { useCoreCommands } from '@/composables/useCoreCommands'
|
||||
import { useExternalLink } from '@/composables/useExternalLink'
|
||||
import type { LGraphNode } from '@/lib/litegraph/src/litegraph'
|
||||
import { LiteGraph } from '@/lib/litegraph/src/litegraph'
|
||||
import { useSettingStore } from '@/platform/settings/settingStore'
|
||||
import { api } from '@/scripts/api'
|
||||
import { app } from '@/scripts/app'
|
||||
@@ -36,34 +35,17 @@ vi.mock('@/scripts/app', () => {
|
||||
const mockCanvas = {
|
||||
subgraph: undefined,
|
||||
selectedItems: new Set(),
|
||||
selected_nodes: null as Record<string, unknown> | null,
|
||||
copyToClipboard: vi.fn(),
|
||||
pasteFromClipboard: vi.fn(),
|
||||
selectItems: vi.fn(),
|
||||
ds: mockDs,
|
||||
deleteSelected: vi.fn(),
|
||||
setDirty: vi.fn(),
|
||||
fitViewToSelectionAnimated: vi.fn(),
|
||||
empty: false,
|
||||
state: {
|
||||
readOnly: false,
|
||||
selectionChanged: false
|
||||
},
|
||||
graph: {
|
||||
add: vi.fn(),
|
||||
convertToSubgraph: vi.fn(),
|
||||
rootGraph: {}
|
||||
},
|
||||
select: vi.fn(),
|
||||
canvas: {
|
||||
dispatchEvent: vi.fn()
|
||||
},
|
||||
setGraph: vi.fn()
|
||||
setDirty: vi.fn()
|
||||
}
|
||||
|
||||
return {
|
||||
app: {
|
||||
clean: vi.fn(() => {
|
||||
// Simulate app.clean() calling graph.clear() only when not in subgraph
|
||||
if (!mockCanvas.subgraph) {
|
||||
mockGraphClear()
|
||||
}
|
||||
@@ -72,11 +54,8 @@ vi.mock('@/scripts/app', () => {
|
||||
refreshComboInNodes: vi.fn().mockResolvedValue(undefined),
|
||||
canvas: mockCanvas,
|
||||
rootGraph: {
|
||||
clear: mockGraphClear,
|
||||
_nodes: []
|
||||
},
|
||||
queuePrompt: vi.fn(),
|
||||
ui: { loadFile: vi.fn() }
|
||||
clear: mockGraphClear
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -84,9 +63,7 @@ vi.mock('@/scripts/app', () => {
|
||||
vi.mock('@/scripts/api', () => ({
|
||||
api: {
|
||||
dispatchCustomEvent: vi.fn(),
|
||||
apiURL: vi.fn(() => 'http://localhost:8188'),
|
||||
interrupt: vi.fn(),
|
||||
freeMemory: vi.fn()
|
||||
apiURL: vi.fn(() => 'http://localhost:8188')
|
||||
}
|
||||
}))
|
||||
|
||||
@@ -124,15 +101,11 @@ vi.mock('@/services/litegraphService', () => ({
|
||||
}))
|
||||
}))
|
||||
|
||||
const mockTelemetry = vi.hoisted(() => ({
|
||||
trackWorkflowCreated: vi.fn(),
|
||||
trackRunButton: vi.fn(),
|
||||
trackWorkflowExecution: vi.fn(),
|
||||
trackHelpResourceClicked: vi.fn(),
|
||||
trackEnterLinear: vi.fn()
|
||||
}))
|
||||
const mockTrackHelpResourceClicked = vi.hoisted(() => vi.fn())
|
||||
vi.mock('@/platform/telemetry', () => ({
|
||||
useTelemetry: vi.fn(() => mockTelemetry)
|
||||
useTelemetry: vi.fn(() => ({
|
||||
trackHelpResourceClicked: mockTrackHelpResourceClicked
|
||||
}))
|
||||
}))
|
||||
|
||||
const mockShowAbout = vi.hoisted(() => vi.fn())
|
||||
@@ -148,18 +121,12 @@ vi.mock('@/stores/executionStore', () => ({
|
||||
useExecutionStore: vi.fn(() => ({}))
|
||||
}))
|
||||
|
||||
const mockToastStore = vi.hoisted(() => ({
|
||||
add: vi.fn()
|
||||
}))
|
||||
vi.mock('@/platform/updates/common/toastStore', () => ({
|
||||
useToastStore: vi.fn(() => mockToastStore)
|
||||
vi.mock('@/stores/toastStore', () => ({
|
||||
useToastStore: vi.fn(() => ({}))
|
||||
}))
|
||||
|
||||
const mockChangeTracker = vi.hoisted(() => ({
|
||||
captureCanvasState: vi.fn(),
|
||||
checkState: vi.fn(),
|
||||
undo: vi.fn(),
|
||||
redo: vi.fn()
|
||||
captureCanvasState: vi.fn()
|
||||
}))
|
||||
const mockWorkflowStore = vi.hoisted(() => ({
|
||||
activeWorkflow: {
|
||||
@@ -174,29 +141,22 @@ vi.mock('@/stores/subgraphStore', () => ({
|
||||
useSubgraphStore: vi.fn(() => ({}))
|
||||
}))
|
||||
|
||||
const mockCanvasStore = vi.hoisted(() => ({
|
||||
getCanvas: vi.fn(),
|
||||
canvas: null as unknown,
|
||||
linearMode: false,
|
||||
updateSelectedItems: vi.fn()
|
||||
}))
|
||||
vi.mock('@/renderer/core/canvas/canvasStore', () => ({
|
||||
useCanvasStore: vi.fn(() => mockCanvasStore),
|
||||
useCanvasStore: vi.fn(() => ({
|
||||
getCanvas: () => app.canvas,
|
||||
canvas: app.canvas
|
||||
})),
|
||||
useTitleEditorStore: vi.fn(() => ({
|
||||
titleEditorTarget: null
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/stores/workspace/colorPaletteStore', () => ({
|
||||
useColorPaletteStore: vi.fn(() => ({
|
||||
completedActivePalette: { id: 'dark-default', light_theme: false }
|
||||
}))
|
||||
useColorPaletteStore: vi.fn(() => ({}))
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/auth/useAuthActions', () => ({
|
||||
useAuthActions: vi.fn(() => ({
|
||||
logout: vi.fn()
|
||||
}))
|
||||
useAuthActions: vi.fn(() => ({}))
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/cloud/subscription/composables/useSubscription', () => ({
|
||||
@@ -206,73 +166,13 @@ vi.mock('@/platform/cloud/subscription/composables/useSubscription', () => ({
|
||||
}))
|
||||
}))
|
||||
|
||||
const mockIsActiveSubscription = vi.hoisted(() => ({ value: true }))
|
||||
const mockShowSubscriptionDialog = vi.hoisted(() => vi.fn())
|
||||
vi.mock('@/composables/billing/useBillingContext', () => ({
|
||||
useBillingContext: vi.fn(() => ({
|
||||
isActiveSubscription: mockIsActiveSubscription,
|
||||
showSubscriptionDialog: mockShowSubscriptionDialog
|
||||
isActiveSubscription: { value: true },
|
||||
showSubscriptionDialog: vi.fn()
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/auth/useCurrentUser', () => ({
|
||||
useCurrentUser: vi.fn(() => ({
|
||||
userEmail: ref(''),
|
||||
resolvedUserInfo: ref(null)
|
||||
}))
|
||||
}))
|
||||
|
||||
const mockSelectedItems = vi.hoisted(() => ({
|
||||
getSelectedNodes: vi.fn((): unknown[] => []),
|
||||
toggleSelectedNodesMode: vi.fn()
|
||||
}))
|
||||
vi.mock('@/composables/canvas/useSelectedLiteGraphItems', () => ({
|
||||
useSelectedLiteGraphItems: vi.fn(() => mockSelectedItems)
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/graph/useSubgraphOperations', () => ({
|
||||
useSubgraphOperations: vi.fn(() => ({
|
||||
unpackSubgraph: vi.fn()
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/useExternalLink', () => ({
|
||||
useExternalLink: vi.fn(() => ({
|
||||
staticUrls: {
|
||||
githubIssues: 'https://github.com/issues',
|
||||
discord: 'https://discord.gg/test',
|
||||
forum: 'https://forum.test.com'
|
||||
},
|
||||
buildDocsUrl: vi.fn(() => 'https://docs.test.com')
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/useModelSelectorDialog', () => ({
|
||||
useModelSelectorDialog: vi.fn(() => ({
|
||||
show: vi.fn()
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/useWorkflowTemplateSelectorDialog', () => ({
|
||||
useWorkflowTemplateSelectorDialog: vi.fn(() => ({
|
||||
show: vi.fn()
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/assets/composables/useAssetBrowserDialog', () => ({
|
||||
useAssetBrowserDialog: vi.fn(() => ({
|
||||
browse: vi.fn()
|
||||
}))
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/assets/utils/createModelNodeFromAsset', () => ({
|
||||
createModelNodeFromAsset: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/support/config', () => ({
|
||||
buildSupportUrl: vi.fn(() => 'https://support.test.com')
|
||||
}))
|
||||
|
||||
describe('useCoreCommands', () => {
|
||||
const createMockNode = (id: number, comfyClass: string): LGraphNode => {
|
||||
const baseNode = createMockLGraphNode({ id })
|
||||
@@ -286,9 +186,13 @@ describe('useCoreCommands', () => {
|
||||
|
||||
const createMockSubgraph = () => {
|
||||
const mockNodes = [
|
||||
// Mock input node
|
||||
createMockNode(1, 'SubgraphInputNode'),
|
||||
// Mock output node
|
||||
createMockNode(2, 'SubgraphOutputNode'),
|
||||
// Mock user node
|
||||
createMockNode(3, 'SomeUserNode'),
|
||||
// Another mock user node
|
||||
createMockNode(4, 'AnotherUserNode')
|
||||
]
|
||||
|
||||
@@ -357,38 +261,31 @@ describe('useCoreCommands', () => {
|
||||
} satisfies ReturnType<typeof useSettingStore>
|
||||
}
|
||||
|
||||
function findCommand(id: string) {
|
||||
const cmd = useCoreCommands().find((c) => c.id === id)
|
||||
if (!cmd) throw new Error(`Command '${id}' not found`)
|
||||
return cmd
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
|
||||
// Set up Pinia
|
||||
setActivePinia(createPinia())
|
||||
|
||||
// Reset app state
|
||||
app.canvas.subgraph = undefined
|
||||
app.canvas.selectedItems = new Set()
|
||||
app.canvas.state.readOnly = false
|
||||
app.canvas.state.selectionChanged = false
|
||||
Object.defineProperty(app.canvas, 'empty', { value: false, writable: true })
|
||||
mockCanvasStore.linearMode = false
|
||||
mockCanvasStore.getCanvas.mockReturnValue(app.canvas)
|
||||
mockIsActiveSubscription.value = true
|
||||
|
||||
// Mock settings store
|
||||
vi.mocked(useSettingStore).mockReturnValue(createMockSettingStore(false))
|
||||
|
||||
vi.stubGlobal('confirm', vi.fn().mockReturnValue(true))
|
||||
vi.stubGlobal(
|
||||
'open',
|
||||
vi.fn().mockReturnValue({ focus: vi.fn(), closed: false })
|
||||
)
|
||||
// Mock global confirm
|
||||
global.confirm = vi.fn().mockReturnValue(true)
|
||||
})
|
||||
|
||||
describe('ClearWorkflow command', () => {
|
||||
it('should clear main graph when not in subgraph', async () => {
|
||||
await findCommand('Comfy.ClearWorkflow').function()
|
||||
const commands = useCoreCommands()
|
||||
const clearCommand = commands.find(
|
||||
(cmd) => cmd.id === 'Comfy.ClearWorkflow'
|
||||
)!
|
||||
|
||||
// Execute the command
|
||||
await clearCommand.function()
|
||||
|
||||
expect(app.clean).toHaveBeenCalled()
|
||||
expect(app.rootGraph.clear).toHaveBeenCalled()
|
||||
@@ -396,29 +293,46 @@ describe('useCoreCommands', () => {
|
||||
})
|
||||
|
||||
it('should preserve input/output nodes when clearing subgraph', async () => {
|
||||
// Set up subgraph context
|
||||
app.canvas.subgraph = mockSubgraph
|
||||
|
||||
await findCommand('Comfy.ClearWorkflow').function()
|
||||
const commands = useCoreCommands()
|
||||
const clearCommand = commands.find(
|
||||
(cmd) => cmd.id === 'Comfy.ClearWorkflow'
|
||||
)!
|
||||
|
||||
// Execute the command
|
||||
await clearCommand.function()
|
||||
|
||||
expect(app.clean).toHaveBeenCalled()
|
||||
expect(app.rootGraph.clear).not.toHaveBeenCalled()
|
||||
|
||||
// Should only remove user nodes, not input/output nodes
|
||||
const subgraph = app.canvas.subgraph!
|
||||
expect(subgraph.remove).toHaveBeenCalledTimes(2)
|
||||
expect(subgraph.remove).toHaveBeenCalledWith(subgraph.nodes[2])
|
||||
expect(subgraph.remove).toHaveBeenCalledWith(subgraph.nodes[3])
|
||||
expect(subgraph.remove).not.toHaveBeenCalledWith(subgraph.nodes[0])
|
||||
expect(subgraph.remove).not.toHaveBeenCalledWith(subgraph.nodes[1])
|
||||
expect(subgraph.remove).toHaveBeenCalledWith(subgraph.nodes[2]) // user1
|
||||
expect(subgraph.remove).toHaveBeenCalledWith(subgraph.nodes[3]) // user2
|
||||
expect(subgraph.remove).not.toHaveBeenCalledWith(subgraph.nodes[0]) // input1
|
||||
expect(subgraph.remove).not.toHaveBeenCalledWith(subgraph.nodes[1]) // output1
|
||||
|
||||
expect(api.dispatchCustomEvent).toHaveBeenCalledWith('graphCleared')
|
||||
})
|
||||
|
||||
it('should respect confirmation setting', async () => {
|
||||
// Mock confirmation required
|
||||
vi.mocked(useSettingStore).mockReturnValue(createMockSettingStore(true))
|
||||
vi.stubGlobal('confirm', vi.fn().mockReturnValue(false))
|
||||
|
||||
await findCommand('Comfy.ClearWorkflow').function()
|
||||
global.confirm = vi.fn().mockReturnValue(false) // User cancels
|
||||
|
||||
const commands = useCoreCommands()
|
||||
const clearCommand = commands.find(
|
||||
(cmd) => cmd.id === 'Comfy.ClearWorkflow'
|
||||
)!
|
||||
|
||||
// Execute the command
|
||||
await clearCommand.function()
|
||||
|
||||
// Should not clear anything when user cancels
|
||||
expect(app.clean).not.toHaveBeenCalled()
|
||||
expect(app.rootGraph.clear).not.toHaveBeenCalled()
|
||||
expect(api.dispatchCustomEvent).not.toHaveBeenCalled()
|
||||
@@ -426,6 +340,17 @@ describe('useCoreCommands', () => {
|
||||
})
|
||||
|
||||
describe('Canvas clipboard commands', () => {
|
||||
function findCommand(id: string) {
|
||||
return useCoreCommands().find((cmd) => cmd.id === id)!
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
app.canvas.selectedItems = new Set()
|
||||
vi.mocked(app.canvas.copyToClipboard).mockClear()
|
||||
vi.mocked(app.canvas.pasteFromClipboard).mockClear()
|
||||
vi.mocked(app.canvas.selectItems).mockClear()
|
||||
})
|
||||
|
||||
it('should copy selected items when selection exists', async () => {
|
||||
app.canvas.selectedItems = new Set([
|
||||
{}
|
||||
@@ -448,541 +373,14 @@ describe('useCoreCommands', () => {
|
||||
expect(app.canvas.pasteFromClipboard).toHaveBeenCalledWith()
|
||||
})
|
||||
|
||||
it('should paste with connect option', async () => {
|
||||
await findCommand('Comfy.Canvas.PasteFromClipboardWithConnect').function()
|
||||
|
||||
expect(app.canvas.pasteFromClipboard).toHaveBeenCalledWith({
|
||||
connectInputs: true
|
||||
})
|
||||
})
|
||||
|
||||
it('should select all items', async () => {
|
||||
await findCommand('Comfy.Canvas.SelectAll').function()
|
||||
|
||||
// No arguments means "select all items on canvas"
|
||||
expect(app.canvas.selectItems).toHaveBeenCalledWith()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Undo/Redo commands', () => {
|
||||
it('Undo should call changeTracker.undo', async () => {
|
||||
await findCommand('Comfy.Undo').function()
|
||||
|
||||
expect(mockChangeTracker.undo).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('Redo should call changeTracker.redo', async () => {
|
||||
await findCommand('Comfy.Redo').function()
|
||||
|
||||
expect(mockChangeTracker.redo).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Zoom commands', () => {
|
||||
it('ZoomIn should increase scale and mark dirty', async () => {
|
||||
await findCommand('Comfy.Canvas.ZoomIn').function()
|
||||
|
||||
expect(app.canvas.ds.changeScale).toHaveBeenCalled()
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('ZoomOut should decrease scale and mark dirty', async () => {
|
||||
await findCommand('Comfy.Canvas.ZoomOut').function()
|
||||
|
||||
expect(app.canvas.ds.changeScale).toHaveBeenCalled()
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('ToggleLock should toggle readOnly state', async () => {
|
||||
app.canvas.state.readOnly = false
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleLock').function()
|
||||
expect(app.canvas.state.readOnly).toBe(true)
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleLock').function()
|
||||
expect(app.canvas.state.readOnly).toBe(false)
|
||||
})
|
||||
|
||||
it('Lock should set readOnly to true', async () => {
|
||||
await findCommand('Comfy.Canvas.Lock').function()
|
||||
expect(app.canvas.state.readOnly).toBe(true)
|
||||
})
|
||||
|
||||
it('Unlock should set readOnly to false', async () => {
|
||||
app.canvas.state.readOnly = true
|
||||
await findCommand('Comfy.Canvas.Unlock').function()
|
||||
expect(app.canvas.state.readOnly).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Canvas delete command', () => {
|
||||
it('should delete selected items when selection exists', async () => {
|
||||
app.canvas.selectedItems = new Set([
|
||||
{}
|
||||
]) as typeof app.canvas.selectedItems
|
||||
|
||||
await findCommand('Comfy.Canvas.DeleteSelectedItems').function()
|
||||
|
||||
expect(app.canvas.deleteSelected).toHaveBeenCalled()
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('should dispatch no-items-selected event when nothing selected', async () => {
|
||||
app.canvas.selectedItems = new Set()
|
||||
|
||||
await findCommand('Comfy.Canvas.DeleteSelectedItems').function()
|
||||
|
||||
expect(app.canvas.canvas.dispatchEvent).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ type: 'litegraph:no-items-selected' })
|
||||
)
|
||||
expect(app.canvas.deleteSelected).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('ToggleLinkVisibility command', () => {
|
||||
it('should hide links when currently visible', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(LiteGraph.SPLINE_LINK)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleLinkVisibility').function()
|
||||
|
||||
expect(mockStore.set).toHaveBeenCalledWith(
|
||||
'Comfy.LinkRenderMode',
|
||||
LiteGraph.HIDDEN_LINK
|
||||
)
|
||||
})
|
||||
|
||||
it('should restore links when currently hidden', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(LiteGraph.HIDDEN_LINK)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleLinkVisibility').function()
|
||||
|
||||
const lastSetCall = vi.mocked(mockStore.set).mock.calls.at(-1)
|
||||
expect(lastSetCall?.[0]).toBe('Comfy.LinkRenderMode')
|
||||
expect(lastSetCall?.[1]).not.toBe(LiteGraph.HIDDEN_LINK)
|
||||
})
|
||||
})
|
||||
|
||||
describe('ToggleMinimap command', () => {
|
||||
it('should toggle minimap visibility setting', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(false)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleMinimap').function()
|
||||
|
||||
expect(mockStore.set).toHaveBeenCalledWith('Comfy.Minimap.Visible', true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('QueuePrompt commands', () => {
|
||||
it('should show subscription dialog when not subscribed', async () => {
|
||||
mockIsActiveSubscription.value = false
|
||||
|
||||
await findCommand('Comfy.QueuePrompt').function()
|
||||
|
||||
expect(mockShowSubscriptionDialog).toHaveBeenCalled()
|
||||
expect(app.queuePrompt).not.toHaveBeenCalled()
|
||||
|
||||
mockIsActiveSubscription.value = true
|
||||
})
|
||||
|
||||
it('should queue prompt when subscribed', async () => {
|
||||
await findCommand('Comfy.QueuePrompt').function()
|
||||
|
||||
expect(app.queuePrompt).toHaveBeenCalledWith(0, 1)
|
||||
expect(mockTelemetry.trackRunButton).toHaveBeenCalled()
|
||||
expect(mockTelemetry.trackWorkflowExecution).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should queue prompt at front', async () => {
|
||||
await findCommand('Comfy.QueuePromptFront').function()
|
||||
|
||||
expect(app.queuePrompt).toHaveBeenCalledWith(-1, 1)
|
||||
})
|
||||
})
|
||||
|
||||
describe('QueueSelectedOutputNodes command', () => {
|
||||
it('should show error toast when no output nodes selected', async () => {
|
||||
await findCommand('Comfy.QueueSelectedOutputNodes').function()
|
||||
|
||||
expect(mockToastStore.add).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ severity: 'error' })
|
||||
)
|
||||
expect(app.queuePrompt).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('MoveSelectedNodes commands', () => {
|
||||
function setupMoveTest() {
|
||||
const mockNode = createMockLGraphNode({ id: 1 })
|
||||
mockNode.pos = [100, 200] as [number, number]
|
||||
mockSelectedItems.getSelectedNodes.mockReturnValue([mockNode])
|
||||
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(10)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
return mockNode
|
||||
}
|
||||
|
||||
it('should move nodes up by grid size', async () => {
|
||||
const mockNode = setupMoveTest()
|
||||
|
||||
await findCommand('Comfy.Canvas.MoveSelectedNodes.Up').function()
|
||||
|
||||
expect(mockNode.pos).toEqual([100, 190])
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('should move nodes down by grid size', async () => {
|
||||
const mockNode = setupMoveTest()
|
||||
|
||||
await findCommand('Comfy.Canvas.MoveSelectedNodes.Down').function()
|
||||
|
||||
expect(mockNode.pos).toEqual([100, 210])
|
||||
})
|
||||
|
||||
it('should move nodes left by grid size', async () => {
|
||||
const mockNode = setupMoveTest()
|
||||
|
||||
await findCommand('Comfy.Canvas.MoveSelectedNodes.Left').function()
|
||||
|
||||
expect(mockNode.pos).toEqual([90, 200])
|
||||
})
|
||||
|
||||
it('should move nodes right by grid size', async () => {
|
||||
const mockNode = setupMoveTest()
|
||||
|
||||
await findCommand('Comfy.Canvas.MoveSelectedNodes.Right').function()
|
||||
|
||||
expect(mockNode.pos).toEqual([110, 200])
|
||||
})
|
||||
|
||||
it('should not move when no nodes selected', async () => {
|
||||
mockSelectedItems.getSelectedNodes.mockReturnValue([])
|
||||
|
||||
await findCommand('Comfy.Canvas.MoveSelectedNodes.Up').function()
|
||||
|
||||
expect(app.canvas.setDirty).not.toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('ToggleLinear command', () => {
|
||||
it('should toggle linear mode and track telemetry when entering', async () => {
|
||||
mockCanvasStore.linearMode = false
|
||||
|
||||
await findCommand('Comfy.ToggleLinear').function()
|
||||
|
||||
expect(mockCanvasStore.linearMode).toBe(true)
|
||||
expect(mockTelemetry.trackEnterLinear).toHaveBeenCalledWith({
|
||||
source: 'keybind'
|
||||
})
|
||||
})
|
||||
|
||||
it('should use provided source metadata', async () => {
|
||||
mockCanvasStore.linearMode = false
|
||||
|
||||
await findCommand('Comfy.ToggleLinear').function({
|
||||
source: 'menu'
|
||||
})
|
||||
|
||||
expect(mockTelemetry.trackEnterLinear).toHaveBeenCalledWith({
|
||||
source: 'menu'
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('ToggleQPOV2 command', () => {
|
||||
it('should toggle queue panel v2 setting', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(false)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.ToggleQPOV2').function()
|
||||
|
||||
expect(mockStore.set).toHaveBeenCalledWith('Comfy.Queue.QPOV2', true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Memory commands', () => {
|
||||
it('UnloadModels should show error when setting is disabled', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(false)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.Memory.UnloadModels').function()
|
||||
|
||||
expect(mockToastStore.add).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ severity: 'error' })
|
||||
)
|
||||
expect(api.freeMemory).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('UnloadModels should call api.freeMemory when setting is enabled', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(true)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.Memory.UnloadModels').function()
|
||||
|
||||
expect(api.freeMemory).toHaveBeenCalledWith({
|
||||
freeExecutionCache: false
|
||||
})
|
||||
})
|
||||
|
||||
it('UnloadModelsAndExecutionCache should call api.freeMemory with cache flag', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
mockStore.get = vi.fn().mockReturnValue(true)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.Memory.UnloadModelsAndExecutionCache').function()
|
||||
|
||||
expect(api.freeMemory).toHaveBeenCalledWith({
|
||||
freeExecutionCache: true
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('FitView command', () => {
|
||||
it('should show error toast when canvas is empty', async () => {
|
||||
Object.defineProperty(app.canvas, 'empty', {
|
||||
value: true,
|
||||
writable: true
|
||||
})
|
||||
|
||||
await findCommand('Comfy.Canvas.FitView').function()
|
||||
|
||||
expect(mockToastStore.add).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ severity: 'error' })
|
||||
)
|
||||
expect(app.canvas.fitViewToSelectionAnimated).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('should fit view when canvas has content', async () => {
|
||||
Object.defineProperty(app.canvas, 'empty', {
|
||||
value: false,
|
||||
writable: true
|
||||
})
|
||||
|
||||
await findCommand('Comfy.Canvas.FitView').function()
|
||||
|
||||
expect(app.canvas.fitViewToSelectionAnimated).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('Interrupt command', () => {
|
||||
it('should call api.interrupt and show toast', async () => {
|
||||
await findCommand('Comfy.Interrupt').function()
|
||||
|
||||
expect(api.interrupt).toHaveBeenCalled()
|
||||
expect(mockToastStore.add).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ severity: 'info' })
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('OpenWorkflow command', () => {
|
||||
it('should call app.ui.loadFile', async () => {
|
||||
await findCommand('Comfy.OpenWorkflow').function()
|
||||
|
||||
expect(app.ui.loadFile).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('RefreshNodeDefinitions command', () => {
|
||||
it('should call app.refreshComboInNodes', async () => {
|
||||
await findCommand('Comfy.RefreshNodeDefinitions').function()
|
||||
|
||||
expect(app.refreshComboInNodes).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('OpenClipspace command', () => {
|
||||
it('should call app.openClipspace', async () => {
|
||||
await findCommand('Comfy.OpenClipspace').function()
|
||||
|
||||
expect(app.openClipspace).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('ToggleTheme command', () => {
|
||||
it('should switch from dark to light theme', async () => {
|
||||
const mockStore = createMockSettingStore(false)
|
||||
vi.mocked(useSettingStore).mockReturnValue(mockStore)
|
||||
|
||||
await findCommand('Comfy.ToggleTheme').function()
|
||||
|
||||
expect(mockStore.set).toHaveBeenCalledWith(
|
||||
'Comfy.ColorPalette',
|
||||
expect.any(String)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('ToggleSelectedNodes commands', () => {
|
||||
it('Mute should toggle selected nodes mode and mark dirty', async () => {
|
||||
await findCommand('Comfy.Canvas.ToggleSelectedNodes.Mute').function()
|
||||
|
||||
expect(mockSelectedItems.toggleSelectedNodesMode).toHaveBeenCalled()
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('Bypass should toggle selected nodes mode and mark dirty', async () => {
|
||||
await findCommand('Comfy.Canvas.ToggleSelectedNodes.Bypass').function()
|
||||
|
||||
expect(mockSelectedItems.toggleSelectedNodesMode).toHaveBeenCalled()
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('Pin should toggle pin state on each selected node', async () => {
|
||||
const mockNode = createMockLGraphNode({ id: 1 })
|
||||
Object.defineProperty(mockNode, 'pinned', {
|
||||
value: false,
|
||||
writable: true
|
||||
})
|
||||
mockNode.pin = vi.fn()
|
||||
mockSelectedItems.getSelectedNodes.mockReturnValue([mockNode])
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleSelectedNodes.Pin').function()
|
||||
|
||||
expect(mockNode.pin).toHaveBeenCalledWith(true)
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('Collapse should collapse each selected node', async () => {
|
||||
const mockNode = createMockLGraphNode({ id: 1 })
|
||||
mockNode.collapse = vi.fn()
|
||||
mockSelectedItems.getSelectedNodes.mockReturnValue([mockNode])
|
||||
|
||||
await findCommand('Comfy.Canvas.ToggleSelectedNodes.Collapse').function()
|
||||
|
||||
expect(mockNode.collapse).toHaveBeenCalled()
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
|
||||
it('Resize should compute and set optimal size', async () => {
|
||||
const mockNode = createMockLGraphNode({ id: 1 })
|
||||
mockNode.computeSize = vi.fn().mockReturnValue([200, 100])
|
||||
mockNode.setSize = vi.fn()
|
||||
mockSelectedItems.getSelectedNodes.mockReturnValue([mockNode])
|
||||
|
||||
await findCommand('Comfy.Canvas.Resize').function()
|
||||
|
||||
expect(mockNode.computeSize).toHaveBeenCalled()
|
||||
expect(mockNode.setSize).toHaveBeenCalledWith([200, 100])
|
||||
expect(app.canvas.setDirty).toHaveBeenCalledWith(true, true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Help commands', () => {
|
||||
it('OpenComfyUIIssues should open GitHub issues and track telemetry', async () => {
|
||||
await findCommand('Comfy.Help.OpenComfyUIIssues').function()
|
||||
|
||||
expect(mockTelemetry.trackHelpResourceClicked).toHaveBeenCalledWith({
|
||||
resource_type: 'github',
|
||||
is_external: true,
|
||||
source: 'menu'
|
||||
})
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://github.com/issues',
|
||||
'_blank'
|
||||
)
|
||||
})
|
||||
|
||||
it('OpenComfyUIDocs should open docs and track telemetry', async () => {
|
||||
await findCommand('Comfy.Help.OpenComfyUIDocs').function()
|
||||
|
||||
expect(mockTelemetry.trackHelpResourceClicked).toHaveBeenCalledWith({
|
||||
resource_type: 'docs',
|
||||
is_external: true,
|
||||
source: 'menu'
|
||||
})
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://docs.test.com',
|
||||
'_blank'
|
||||
)
|
||||
})
|
||||
|
||||
it('OpenComfyOrgDiscord should open Discord and track telemetry', async () => {
|
||||
await findCommand('Comfy.Help.OpenComfyOrgDiscord').function()
|
||||
|
||||
expect(mockTelemetry.trackHelpResourceClicked).toHaveBeenCalledWith({
|
||||
resource_type: 'discord',
|
||||
is_external: true,
|
||||
source: 'menu'
|
||||
})
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://discord.gg/test',
|
||||
'_blank'
|
||||
)
|
||||
})
|
||||
|
||||
it('OpenComfyUIForum should open forum and track telemetry', async () => {
|
||||
await findCommand('Comfy.Help.OpenComfyUIForum').function()
|
||||
|
||||
expect(mockTelemetry.trackHelpResourceClicked).toHaveBeenCalledWith({
|
||||
resource_type: 'help_feedback',
|
||||
is_external: true,
|
||||
source: 'menu'
|
||||
})
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://forum.test.com',
|
||||
'_blank'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('GroupSelectedNodes command', () => {
|
||||
it('should show error toast when nothing selected', async () => {
|
||||
app.canvas.selectedItems = new Set()
|
||||
|
||||
await findCommand('Comfy.Graph.GroupSelectedNodes').function()
|
||||
|
||||
expect(mockToastStore.add).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ severity: 'error' })
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('ConvertToSubgraph command', () => {
|
||||
it('should show error toast when conversion fails', async () => {
|
||||
app.canvas.graph!.convertToSubgraph = vi.fn().mockReturnValue(null)
|
||||
|
||||
await findCommand('Comfy.Graph.ConvertToSubgraph').function()
|
||||
|
||||
expect(mockToastStore.add).toHaveBeenCalledWith(
|
||||
expect.objectContaining({ severity: 'error' })
|
||||
)
|
||||
})
|
||||
|
||||
it('should select the new subgraph node on success', async () => {
|
||||
const mockNode = createMockLGraphNode({ id: 1 })
|
||||
app.canvas.graph!.convertToSubgraph = vi
|
||||
.fn()
|
||||
.mockReturnValue({ node: mockNode })
|
||||
|
||||
await findCommand('Comfy.Graph.ConvertToSubgraph').function()
|
||||
|
||||
expect(app.canvas.select).toHaveBeenCalledWith(mockNode)
|
||||
expect(mockCanvasStore.updateSelectedItems).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('ContactSupport command', () => {
|
||||
it('should open support URL in new window', async () => {
|
||||
await findCommand('Comfy.ContactSupport').function()
|
||||
|
||||
expect(window.open).toHaveBeenCalledWith(
|
||||
'https://support.test.com',
|
||||
'_blank',
|
||||
'noopener,noreferrer'
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Subgraph metadata commands', () => {
|
||||
beforeEach(() => {
|
||||
mockSubgraph.extra = {}
|
||||
@@ -1182,7 +580,7 @@ describe('useCoreCommands', () => {
|
||||
it('Comfy.Help.OpenComfyUIIssues opens the GitHub issues URL and tracks telemetry', async () => {
|
||||
await findCmd('Comfy.Help.OpenComfyUIIssues').function()
|
||||
|
||||
expect(mockTelemetry.trackHelpResourceClicked).toHaveBeenCalledWith(
|
||||
expect(mockTrackHelpResourceClicked).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
resource_type: 'github',
|
||||
is_external: true,
|
||||
@@ -1195,7 +593,7 @@ describe('useCoreCommands', () => {
|
||||
it('Comfy.Help.OpenComfyOrgDiscord opens the Discord URL and tracks telemetry', async () => {
|
||||
await findCmd('Comfy.Help.OpenComfyOrgDiscord').function()
|
||||
|
||||
expect(mockTelemetry.trackHelpResourceClicked).toHaveBeenCalledWith(
|
||||
expect(mockTrackHelpResourceClicked).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
resource_type: 'discord'
|
||||
})
|
||||
|
||||
@@ -334,6 +334,22 @@ describe('hasUnpromotedWidgets', () => {
|
||||
|
||||
expect(hasUnpromotedWidgets(subgraphNode)).toBe(false)
|
||||
})
|
||||
|
||||
it('returns false (does not throw) when SubgraphNode is detached', () => {
|
||||
const subgraph = createTestSubgraph()
|
||||
const subgraphNode = createTestSubgraphNode(subgraph)
|
||||
const parentGraph = subgraphNode.graph!
|
||||
parentGraph.add(subgraphNode)
|
||||
const interiorNode = new LGraphNode('InnerNode')
|
||||
subgraph.add(interiorNode)
|
||||
interiorNode.addWidget('text', 'seed', '123', () => {})
|
||||
|
||||
parentGraph.remove(subgraphNode)
|
||||
|
||||
expect(subgraphNode.graph).toBeNull()
|
||||
expect(() => hasUnpromotedWidgets(subgraphNode)).not.toThrow()
|
||||
expect(hasUnpromotedWidgets(subgraphNode)).toBe(false)
|
||||
})
|
||||
})
|
||||
|
||||
describe('isLinkedPromotion', () => {
|
||||
|
||||
@@ -360,6 +360,7 @@ export function pruneDisconnected(subgraphNode: SubgraphNode) {
|
||||
}
|
||||
|
||||
export function hasUnpromotedWidgets(subgraphNode: SubgraphNode): boolean {
|
||||
if (!subgraphNode.graph) return false
|
||||
const promotionStore = usePromotionStore()
|
||||
const { id: subgraphNodeId, rootGraph, subgraph } = subgraphNode
|
||||
|
||||
|
||||
@@ -497,8 +497,7 @@ useExtensionService().registerExtension({
|
||||
const settings = {
|
||||
loadFolder: 'output',
|
||||
modelWidget: modelWidget,
|
||||
cameraState: cameraState,
|
||||
silentOnNotFound: true
|
||||
cameraState: cameraState
|
||||
}
|
||||
|
||||
config.configure(settings)
|
||||
@@ -529,8 +528,7 @@ useExtensionService().registerExtension({
|
||||
loadFolder: 'output',
|
||||
modelWidget: modelWidget,
|
||||
cameraState: cameraState,
|
||||
bgImagePath: bgImagePath,
|
||||
silentOnNotFound: true
|
||||
bgImagePath: bgImagePath
|
||||
}
|
||||
|
||||
config.configure(settings)
|
||||
|
||||
@@ -1,13 +1,11 @@
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type Load3d from '@/extensions/core/load3d/Load3d'
|
||||
import Load3DConfiguration from '@/extensions/core/load3d/Load3DConfiguration'
|
||||
import Load3dUtils from '@/extensions/core/load3d/Load3dUtils'
|
||||
import type {
|
||||
GizmoConfig,
|
||||
ModelConfig
|
||||
} from '@/extensions/core/load3d/interfaces'
|
||||
import type { IBaseWidget } from '@/lib/litegraph/src/types/widgets'
|
||||
import type { Dictionary } from '@/lib/litegraph/src/interfaces'
|
||||
import type { NodeProperty } from '@/lib/litegraph/src/LGraphNode'
|
||||
|
||||
@@ -164,88 +162,3 @@ describe('Load3DConfiguration.loadModelConfig', () => {
|
||||
expect(result.gizmo).toEqual(fullGizmo)
|
||||
})
|
||||
})
|
||||
|
||||
describe('Load3DConfiguration.silentOnNotFound propagation', () => {
|
||||
let loadModelSpy: ReturnType<typeof vi.fn>
|
||||
|
||||
function makeLoad3dMock(): Load3d {
|
||||
loadModelSpy = vi.fn().mockResolvedValue(undefined)
|
||||
return {
|
||||
loadModel: loadModelSpy,
|
||||
setUpDirection: vi.fn(),
|
||||
setMaterialMode: vi.fn(),
|
||||
setTargetSize: vi.fn(),
|
||||
setCameraState: vi.fn(),
|
||||
toggleGrid: vi.fn(),
|
||||
setBackgroundColor: vi.fn(),
|
||||
setBackgroundImage: vi.fn().mockResolvedValue(undefined),
|
||||
setBackgroundRenderMode: vi.fn(),
|
||||
toggleCamera: vi.fn(),
|
||||
setFOV: vi.fn(),
|
||||
setLightIntensity: vi.fn(),
|
||||
setHDRIIntensity: vi.fn(),
|
||||
setHDRIAsBackground: vi.fn(),
|
||||
setHDRIEnabled: vi.fn()
|
||||
} as unknown as Load3d
|
||||
}
|
||||
|
||||
async function flush() {
|
||||
await new Promise<void>((resolve) => setTimeout(resolve, 0))
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
vi.mocked(Load3dUtils.splitFilePath).mockReturnValue(['', 'model.glb'])
|
||||
vi.mocked(Load3dUtils.getResourceURL).mockReturnValue(
|
||||
'/view?filename=model.glb'
|
||||
)
|
||||
})
|
||||
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('configureForSaveMesh forwards silentOnNotFound: true to loadModel', async () => {
|
||||
const config = new Load3DConfiguration(makeLoad3dMock())
|
||||
config.configureForSaveMesh('output', 'model.glb', {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
await flush()
|
||||
expect(loadModelSpy).toHaveBeenCalledWith(expect.any(String), 'model.glb', {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
})
|
||||
|
||||
it('configureForSaveMesh uses silentOnNotFound: false when option is omitted', async () => {
|
||||
const config = new Load3DConfiguration(makeLoad3dMock())
|
||||
config.configureForSaveMesh('output', 'model.glb')
|
||||
await flush()
|
||||
expect(loadModelSpy).toHaveBeenCalledWith(expect.any(String), 'model.glb', {
|
||||
silentOnNotFound: false
|
||||
})
|
||||
})
|
||||
|
||||
it('configure forwards silentOnNotFound: true from settings to loadModel', async () => {
|
||||
const config = new Load3DConfiguration(makeLoad3dMock())
|
||||
config.configure({
|
||||
modelWidget: { value: 'model.glb' } as unknown as IBaseWidget,
|
||||
loadFolder: 'output',
|
||||
silentOnNotFound: true
|
||||
})
|
||||
await flush()
|
||||
expect(loadModelSpy).toHaveBeenCalledWith(expect.any(String), 'model.glb', {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
})
|
||||
|
||||
it('configure uses silentOnNotFound: false when setting is omitted', async () => {
|
||||
const config = new Load3DConfiguration(makeLoad3dMock())
|
||||
config.configure({
|
||||
modelWidget: { value: 'model.glb' } as unknown as IBaseWidget,
|
||||
loadFolder: 'output'
|
||||
})
|
||||
await flush()
|
||||
expect(loadModelSpy).toHaveBeenCalledWith(expect.any(String), 'model.glb', {
|
||||
silentOnNotFound: false
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -21,7 +21,6 @@ type Load3DConfigurationSettings = {
|
||||
width?: IBaseWidget
|
||||
height?: IBaseWidget
|
||||
bgImagePath?: string
|
||||
silentOnNotFound?: boolean
|
||||
}
|
||||
|
||||
class Load3DConfiguration {
|
||||
@@ -30,16 +29,8 @@ class Load3DConfiguration {
|
||||
private properties?: Dictionary<NodeProperty | undefined>
|
||||
) {}
|
||||
|
||||
configureForSaveMesh(
|
||||
loadFolder: 'input' | 'output',
|
||||
filePath: string,
|
||||
options?: { silentOnNotFound?: boolean }
|
||||
) {
|
||||
this.setupModelHandlingForSaveMesh(
|
||||
filePath,
|
||||
loadFolder,
|
||||
options?.silentOnNotFound ?? false
|
||||
)
|
||||
configureForSaveMesh(loadFolder: 'input' | 'output', filePath: string) {
|
||||
this.setupModelHandlingForSaveMesh(filePath, loadFolder)
|
||||
this.setupDefaultProperties()
|
||||
}
|
||||
|
||||
@@ -47,8 +38,7 @@ class Load3DConfiguration {
|
||||
this.setupModelHandling(
|
||||
setting.modelWidget,
|
||||
setting.loadFolder,
|
||||
setting.cameraState,
|
||||
setting.silentOnNotFound ?? false
|
||||
setting.cameraState
|
||||
)
|
||||
this.setupTargetSize(setting.width, setting.height)
|
||||
this.setupDefaultProperties(setting.bgImagePath)
|
||||
@@ -68,16 +58,8 @@ class Load3DConfiguration {
|
||||
}
|
||||
}
|
||||
|
||||
private setupModelHandlingForSaveMesh(
|
||||
filePath: string,
|
||||
loadFolder: string,
|
||||
silentOnNotFound: boolean
|
||||
) {
|
||||
const onModelWidgetUpdate = this.createModelUpdateHandler(
|
||||
loadFolder,
|
||||
undefined,
|
||||
silentOnNotFound
|
||||
)
|
||||
private setupModelHandlingForSaveMesh(filePath: string, loadFolder: string) {
|
||||
const onModelWidgetUpdate = this.createModelUpdateHandler(loadFolder)
|
||||
|
||||
if (filePath) {
|
||||
onModelWidgetUpdate(filePath)
|
||||
@@ -87,13 +69,11 @@ class Load3DConfiguration {
|
||||
private setupModelHandling(
|
||||
modelWidget: IBaseWidget,
|
||||
loadFolder: string,
|
||||
cameraState?: CameraState,
|
||||
silentOnNotFound: boolean = false
|
||||
cameraState?: CameraState
|
||||
) {
|
||||
const onModelWidgetUpdate = this.createModelUpdateHandler(
|
||||
loadFolder,
|
||||
cameraState,
|
||||
silentOnNotFound
|
||||
cameraState
|
||||
)
|
||||
if (modelWidget.value) {
|
||||
onModelWidgetUpdate(modelWidget.value)
|
||||
@@ -261,8 +241,7 @@ class Load3DConfiguration {
|
||||
|
||||
private createModelUpdateHandler(
|
||||
loadFolder: string,
|
||||
cameraState?: CameraState,
|
||||
silentOnNotFound: boolean = false
|
||||
cameraState?: CameraState
|
||||
) {
|
||||
let isFirstLoad = true
|
||||
return async (value: string | number | boolean | object) => {
|
||||
@@ -279,7 +258,7 @@ class Load3DConfiguration {
|
||||
)
|
||||
)
|
||||
|
||||
await this.load3d.loadModel(modelUrl, filename, { silentOnNotFound })
|
||||
await this.load3d.loadModel(modelUrl, filename)
|
||||
|
||||
const modelConfig = this.loadModelConfig()
|
||||
this.applyModelConfig(modelConfig)
|
||||
|
||||
@@ -22,7 +22,6 @@ import type {
|
||||
EventCallback,
|
||||
GizmoMode,
|
||||
Load3DOptions,
|
||||
LoadModelOptions,
|
||||
MaterialMode,
|
||||
UpDirection
|
||||
} from './interfaces'
|
||||
@@ -501,11 +500,7 @@ class Load3d {
|
||||
return this._loadGeneration
|
||||
}
|
||||
|
||||
async loadModel(
|
||||
url: string,
|
||||
originalFileName?: string,
|
||||
options?: LoadModelOptions
|
||||
): Promise<void> {
|
||||
async loadModel(url: string, originalFileName?: string): Promise<void> {
|
||||
this._loadGeneration += 1
|
||||
|
||||
if (this.loadingPromise) {
|
||||
@@ -514,11 +509,7 @@ class Load3d {
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
this.loadingPromise = this._loadModelInternal(
|
||||
url,
|
||||
originalFileName,
|
||||
options
|
||||
)
|
||||
this.loadingPromise = this._loadModelInternal(url, originalFileName)
|
||||
return this.loadingPromise
|
||||
}
|
||||
|
||||
@@ -534,8 +525,7 @@ class Load3d {
|
||||
|
||||
private async _loadModelInternal(
|
||||
url: string,
|
||||
originalFileName?: string,
|
||||
options?: LoadModelOptions
|
||||
originalFileName?: string
|
||||
): Promise<void> {
|
||||
this.cameraManager.reset()
|
||||
this.controlsManager.reset()
|
||||
@@ -543,7 +533,7 @@ class Load3d {
|
||||
this.modelManager.clearModel()
|
||||
this.animationManager.dispose()
|
||||
|
||||
await this.loaderManager.loadModel(url, originalFileName, options)
|
||||
await this.loaderManager.loadModel(url, originalFileName)
|
||||
|
||||
// Auto-detect and setup animations if present
|
||||
if (this.modelManager.currentModel) {
|
||||
|
||||
@@ -436,55 +436,6 @@ describe('LoaderManager', () => {
|
||||
expect(consoleError).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('suppresses the alert on a 404 when silentOnNotFound is set', async () => {
|
||||
const { lm } = makeLoaderManager()
|
||||
const notFound = new Error(
|
||||
'fetch for "..." responded with 404: Not Found'
|
||||
)
|
||||
meshLoad.mockRejectedValueOnce(notFound)
|
||||
const consoleError = vi
|
||||
.spyOn(console, 'error')
|
||||
.mockImplementation(() => {})
|
||||
|
||||
await lm.loadModel('api/view?filename=cube.glb', undefined, {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
|
||||
expect(consoleError).toHaveBeenCalled()
|
||||
expect(addAlert).not.toHaveBeenCalledWith(
|
||||
'toastMessages.errorLoadingModel'
|
||||
)
|
||||
})
|
||||
|
||||
it('detects a 404 from the response status field on three.js HttpError', async () => {
|
||||
const { lm } = makeLoaderManager()
|
||||
const httpError = Object.assign(new Error('not found'), {
|
||||
response: { status: 404 }
|
||||
})
|
||||
meshLoad.mockRejectedValueOnce(httpError)
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
await lm.loadModel('api/view?filename=cube.glb', undefined, {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
|
||||
expect(addAlert).not.toHaveBeenCalledWith(
|
||||
'toastMessages.errorLoadingModel'
|
||||
)
|
||||
})
|
||||
|
||||
it('still alerts on non-404 errors when silentOnNotFound is set', async () => {
|
||||
const { lm } = makeLoaderManager()
|
||||
meshLoad.mockRejectedValueOnce(new Error('parse failure: bad header'))
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
await lm.loadModel('api/view?filename=cube.glb', undefined, {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
|
||||
expect(addAlert).toHaveBeenCalledWith('toastMessages.errorLoadingModel')
|
||||
})
|
||||
|
||||
it('discards the result of a stale load when a newer one has started', async () => {
|
||||
const { lm, modelManager, eventManager } = makeLoaderManager()
|
||||
|
||||
|
||||
@@ -10,24 +10,10 @@ import { PointCloudModelAdapter, getPLYEngine } from './PointCloudModelAdapter'
|
||||
import { SplatModelAdapter } from './SplatModelAdapter'
|
||||
import type {
|
||||
EventManagerInterface,
|
||||
LoadModelOptions,
|
||||
LoaderManagerInterface,
|
||||
ModelManagerInterface
|
||||
} from './interfaces'
|
||||
|
||||
/**
|
||||
* three.js's HttpError attaches the failed `Response` to the thrown Error.
|
||||
* fetchModelData throws a plain Error whose message embeds the status code.
|
||||
* Detect both forms so we can keep the toast for parse / network failures
|
||||
* but stay silent on 404 when the caller opted in.
|
||||
*/
|
||||
function isNotFoundError(error: unknown): boolean {
|
||||
if (!(error instanceof Error)) return false
|
||||
const withResponse = error as Error & { response?: { status?: number } }
|
||||
if (withResponse.response?.status === 404) return true
|
||||
return /\b404\b/.test(error.message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Default adapter set: mesh + pointCloud + splat. Each adapter declares the
|
||||
* file extensions it owns; LoaderManager picks one by extension.
|
||||
@@ -67,11 +53,7 @@ export class LoaderManager implements LoaderManagerInterface {
|
||||
|
||||
dispose(): void {}
|
||||
|
||||
async loadModel(
|
||||
url: string,
|
||||
originalFileName?: string,
|
||||
options?: LoadModelOptions
|
||||
): Promise<void> {
|
||||
async loadModel(url: string, originalFileName?: string): Promise<void> {
|
||||
const loadId = ++this.currentLoadId
|
||||
|
||||
try {
|
||||
@@ -123,9 +105,7 @@ export class LoaderManager implements LoaderManagerInterface {
|
||||
if (loadId === this.currentLoadId) {
|
||||
this.eventManager.emitEvent('modelLoadingEnd', null)
|
||||
console.error('Error loading model:', error)
|
||||
if (!(options?.silentOnNotFound && isNotFoundError(error))) {
|
||||
useToastStore().addAlert(t('toastMessages.errorLoadingModel'))
|
||||
}
|
||||
useToastStore().addAlert(t('toastMessages.errorLoadingModel'))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -102,16 +102,6 @@ function createMeshModel(name = 'TestModel'): THREE.Group {
|
||||
return group
|
||||
}
|
||||
|
||||
function createPointsModel(name = 'TestModel'): THREE.Group {
|
||||
const geometry = new THREE.BufferGeometry()
|
||||
const material = new THREE.PointsMaterial({ color: 0xff0000 })
|
||||
const points = new THREE.Points(geometry, material)
|
||||
const group = new THREE.Group()
|
||||
group.name = name
|
||||
group.add(points)
|
||||
return group
|
||||
}
|
||||
|
||||
describe('SceneModelManager', () => {
|
||||
describe('constructor', () => {
|
||||
it('initializes default state', () => {
|
||||
@@ -321,20 +311,6 @@ describe('SceneModelManager', () => {
|
||||
expect(geoDispose).toHaveBeenCalled()
|
||||
expect(matDispose).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('disposes points geometry and materials', async () => {
|
||||
const { manager } = createManager()
|
||||
const model = createPointsModel()
|
||||
const points = model.children[0] as THREE.Points
|
||||
const geoDispose = vi.spyOn(points.geometry, 'dispose')
|
||||
const matDispose = vi.spyOn(points.material as THREE.Material, 'dispose')
|
||||
|
||||
await manager.setupModel(model)
|
||||
manager.clearModel()
|
||||
|
||||
expect(geoDispose).toHaveBeenCalled()
|
||||
expect(matDispose).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
|
||||
describe('reset', () => {
|
||||
|
||||
@@ -328,7 +328,7 @@ export class SceneModelManager implements ModelManagerInterface {
|
||||
this.scene.remove(obj)
|
||||
|
||||
obj.traverse((child) => {
|
||||
if (child instanceof THREE.Mesh || child instanceof THREE.Points) {
|
||||
if (child instanceof THREE.Mesh) {
|
||||
child.geometry?.dispose()
|
||||
if (Array.isArray(child.material)) {
|
||||
child.material.forEach((material) => material.dispose())
|
||||
|
||||
@@ -198,23 +198,8 @@ export interface ModelManagerInterface {
|
||||
setupModelMaterials(model: THREE.Object3D): void
|
||||
}
|
||||
|
||||
export interface LoadModelOptions {
|
||||
/**
|
||||
* When true, suppress the user-facing toast for file-not-found
|
||||
* (HTTP 404) errors. Other errors (parse failures, network drops)
|
||||
* still surface a toast. Use for "preview" surfaces whose model
|
||||
* file is server-produced and may legitimately be absent locally
|
||||
* (e.g. shared workflows on a fresh machine).
|
||||
*/
|
||||
silentOnNotFound?: boolean
|
||||
}
|
||||
|
||||
export interface LoaderManagerInterface {
|
||||
init(): void
|
||||
dispose(): void
|
||||
loadModel(
|
||||
url: string,
|
||||
originalFileName?: string,
|
||||
options?: LoadModelOptions
|
||||
): Promise<void>
|
||||
loadModel(url: string, originalFileName?: string): Promise<void>
|
||||
}
|
||||
|
||||
@@ -103,9 +103,7 @@ useExtensionService().registerExtension({
|
||||
|
||||
const loadFolder = fileInfo.type as 'input' | 'output'
|
||||
|
||||
config.configureForSaveMesh(loadFolder, filePath, {
|
||||
silentOnNotFound: true
|
||||
})
|
||||
config.configureForSaveMesh(loadFolder, filePath)
|
||||
|
||||
if (isAssetPreviewSupported()) {
|
||||
const filename = fileInfo.filename ?? ''
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it } from 'vitest'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { NodeId, Subgraph } from '@/lib/litegraph/src/litegraph'
|
||||
import {
|
||||
LGraph,
|
||||
LGraphGroup,
|
||||
LGraphNode,
|
||||
LiteGraph,
|
||||
LLink,
|
||||
@@ -329,6 +330,96 @@ describe('Graph Clearing and Callbacks', () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe('node:before-removed event', () => {
|
||||
it('fires node:before-removed for a successful node removal', () => {
|
||||
const graph = new LGraph()
|
||||
const node = new LGraphNode('test')
|
||||
graph.add(node)
|
||||
|
||||
const events: { node: LGraphNode; graphAtDispatch: unknown }[] = []
|
||||
graph.events.addEventListener('node:before-removed', (e) => {
|
||||
events.push({
|
||||
node: e.detail.node,
|
||||
graphAtDispatch: e.detail.node.graph
|
||||
})
|
||||
})
|
||||
|
||||
graph.remove(node)
|
||||
|
||||
expect(events).toHaveLength(1)
|
||||
expect(events[0].node).toBe(node)
|
||||
expect(events[0].graphAtDispatch).toBe(graph)
|
||||
expect(node.graph).toBeNull()
|
||||
})
|
||||
|
||||
it('does not fire node:before-removed for a node not in the graph', () => {
|
||||
const graph = new LGraph()
|
||||
const node = new LGraphNode('test')
|
||||
|
||||
const fired = vi.fn()
|
||||
graph.events.addEventListener('node:before-removed', fired)
|
||||
|
||||
graph.remove(node)
|
||||
|
||||
expect(fired).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not fire node:before-removed when removing an LGraphGroup', () => {
|
||||
const graph = new LGraph()
|
||||
const group = new LGraphGroup('test-group')
|
||||
graph.add(group)
|
||||
|
||||
const fired = vi.fn()
|
||||
graph.events.addEventListener('node:before-removed', fired)
|
||||
|
||||
graph.remove(group)
|
||||
|
||||
expect(fired).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not fire node:before-removed when ignore_remove is set', () => {
|
||||
const graph = new LGraph()
|
||||
const node = new LGraphNode('test')
|
||||
graph.add(node)
|
||||
node.ignore_remove = true
|
||||
|
||||
const fired = vi.fn()
|
||||
graph.events.addEventListener('node:before-removed', fired)
|
||||
|
||||
graph.remove(node)
|
||||
|
||||
expect(fired).not.toHaveBeenCalled()
|
||||
expect(graph.nodes).toContain(node)
|
||||
})
|
||||
|
||||
it('fires node:before-removed before node.onRemoved and detach', () => {
|
||||
const graph = new LGraph()
|
||||
const node = new LGraphNode('test')
|
||||
graph.add(node)
|
||||
|
||||
const order: string[] = []
|
||||
graph.events.addEventListener('node:before-removed', () => {
|
||||
order.push(
|
||||
`before-removed(graph=${node.graph === graph ? 'set' : 'null'})`
|
||||
)
|
||||
})
|
||||
node.onRemoved = () => {
|
||||
order.push(`onRemoved(graph=${node.graph === graph ? 'set' : 'null'})`)
|
||||
}
|
||||
graph.onNodeRemoved = (n) => {
|
||||
order.push(`onNodeRemoved(graph=${n.graph === null ? 'null' : 'set'})`)
|
||||
}
|
||||
|
||||
graph.remove(node)
|
||||
|
||||
expect(order).toEqual([
|
||||
'before-removed(graph=set)',
|
||||
'onRemoved(graph=set)',
|
||||
'onNodeRemoved(graph=null)'
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe('Subgraph Definition Garbage Collection', () => {
|
||||
beforeEach(() => {
|
||||
setActivePinia(createTestingPinia({ stubActions: false }))
|
||||
@@ -381,6 +472,53 @@ describe('Subgraph Definition Garbage Collection', () => {
|
||||
expect(graphRemovedNodeIds.size).toBe(2)
|
||||
})
|
||||
|
||||
it('subgraph-definition GC dispatches node:before-removed on the inner subgraph for each inner node', () => {
|
||||
const rootGraph = new LGraph()
|
||||
const { subgraph, innerNodes } = createSubgraphWithNodes(rootGraph, 2)
|
||||
|
||||
const dispatched: { node: LGraphNode; graphAtDispatch: unknown }[] = []
|
||||
subgraph.events.addEventListener('node:before-removed', (e) => {
|
||||
dispatched.push({
|
||||
node: e.detail.node,
|
||||
graphAtDispatch: e.detail.node.graph
|
||||
})
|
||||
})
|
||||
|
||||
const subgraphNode = createTestSubgraphNode(subgraph, { pos: [100, 100] })
|
||||
rootGraph.add(subgraphNode)
|
||||
|
||||
rootGraph.remove(subgraphNode)
|
||||
|
||||
expect(dispatched.map((e) => e.node)).toEqual(innerNodes)
|
||||
for (const entry of dispatched) {
|
||||
expect(entry.graphAtDispatch).toBe(subgraph)
|
||||
}
|
||||
})
|
||||
|
||||
it('subgraph-definition GC dispatches node:before-removed before each inner node onRemoved', () => {
|
||||
const rootGraph = new LGraph()
|
||||
const { subgraph, innerNodes } = createSubgraphWithNodes(rootGraph, 1)
|
||||
const innerNode = innerNodes[0]
|
||||
|
||||
const order: string[] = []
|
||||
subgraph.events.addEventListener('node:before-removed', () => {
|
||||
order.push('before-removed')
|
||||
})
|
||||
innerNode.onRemoved = () => {
|
||||
order.push('onRemoved')
|
||||
}
|
||||
subgraph.onNodeRemoved = () => {
|
||||
order.push('onNodeRemoved')
|
||||
}
|
||||
|
||||
const subgraphNode = createTestSubgraphNode(subgraph, { pos: [100, 100] })
|
||||
rootGraph.add(subgraphNode)
|
||||
|
||||
rootGraph.remove(subgraphNode)
|
||||
|
||||
expect(order).toEqual(['before-removed', 'onRemoved', 'onNodeRemoved'])
|
||||
})
|
||||
|
||||
it('subgraph definition is removed when SubgraphNode is removed', () => {
|
||||
const rootGraph = new LGraph()
|
||||
const { subgraph } = createSubgraphWithNodes(rootGraph, 1)
|
||||
|
||||
@@ -1079,6 +1079,8 @@ export class LGraph
|
||||
// sure? - almost sure is wrong
|
||||
this.beforeChange()
|
||||
|
||||
this.events.dispatch('node:before-removed', { node })
|
||||
|
||||
const { inputs, outputs } = node
|
||||
|
||||
// disconnect inputs
|
||||
@@ -1115,6 +1117,11 @@ export class LGraph
|
||||
|
||||
if (!hasRemainingReferences) {
|
||||
forEachNode(node.subgraph, (innerNode) => {
|
||||
if (innerNode.graph) {
|
||||
;(
|
||||
innerNode.graph.events as CustomEventTarget<LGraphEventMap>
|
||||
).dispatch('node:before-removed', { node: innerNode })
|
||||
}
|
||||
innerNode.onRemoved?.()
|
||||
innerNode.graph?.onNodeRemoved?.(innerNode)
|
||||
})
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import type { LGraph } from '@/lib/litegraph/src/LGraph'
|
||||
import type { LGraphNode } from '@/lib/litegraph/src/LGraphNode'
|
||||
import type { LLink, ResolvedConnection } from '@/lib/litegraph/src/LLink'
|
||||
import type { ReadOnlyRect } from '@/lib/litegraph/src/interfaces'
|
||||
import type { Subgraph } from '@/lib/litegraph/src/subgraph/Subgraph'
|
||||
@@ -48,4 +49,11 @@ export interface LGraphEventMap {
|
||||
subgraph: Subgraph
|
||||
closingGraph: LGraph | Subgraph
|
||||
}
|
||||
|
||||
/**
|
||||
* Fires on the owning graph before per-node teardown begins
|
||||
*/
|
||||
'node:before-removed': {
|
||||
node: LGraphNode
|
||||
}
|
||||
}
|
||||
|
||||
@@ -79,6 +79,19 @@ describe('SubgraphNode Construction', () => {
|
||||
expect(subgraphNode.graph).toBeNull()
|
||||
})
|
||||
|
||||
it('should return empty widgets array (not throw) after removal', () => {
|
||||
const subgraph = createTestSubgraph()
|
||||
const subgraphNode = createTestSubgraphNode(subgraph)
|
||||
const parentGraph = subgraphNode.graph!
|
||||
parentGraph.add(subgraphNode)
|
||||
|
||||
parentGraph.remove(subgraphNode)
|
||||
|
||||
expect(subgraphNode.graph).toBeNull()
|
||||
expect(() => subgraphNode.widgets).not.toThrow()
|
||||
expect(subgraphNode.widgets).toEqual([])
|
||||
})
|
||||
|
||||
subgraphTest(
|
||||
'should synchronize slots with subgraph definition',
|
||||
({ subgraphWithNode }) => {
|
||||
|
||||
@@ -257,6 +257,7 @@ export class SubgraphNode extends LGraphNode implements BaseLGraph {
|
||||
}
|
||||
|
||||
private _getPromotedViews(): PromotedWidgetView[] {
|
||||
if (!this.graph) return []
|
||||
const store = usePromotionStore()
|
||||
const entries = store.getPromotionsRef(this.rootGraph.id, this.id)
|
||||
const hasMissingBoundSourceWidget = this._hasMissingBoundSourceWidget()
|
||||
@@ -302,6 +303,7 @@ export class SubgraphNode extends LGraphNode implements BaseLGraph {
|
||||
|
||||
private _syncPromotions(): void {
|
||||
if (this.id === -1) return
|
||||
if (!this.graph) return
|
||||
|
||||
const store = usePromotionStore()
|
||||
const entries = store.getPromotionsRef(this.rootGraph.id, this.id)
|
||||
|
||||
@@ -2795,55 +2795,51 @@
|
||||
"survey": {
|
||||
"title": "Cloud Survey",
|
||||
"placeholder": "Survey questions placeholder",
|
||||
"intro": "Help us tailor your ComfyUI experience.",
|
||||
"errors": {
|
||||
"chooseAnOption": "Please choose an option.",
|
||||
"selectAtLeastOne": "Please select at least one option.",
|
||||
"describeAnswer": "Please describe your answer."
|
||||
},
|
||||
"steps": {
|
||||
"usage": "How do you plan to use ComfyUI?",
|
||||
"familiarity": "How familiar are you with ComfyUI?",
|
||||
"intent": "What do you want to create with ComfyUI?",
|
||||
"source": "Where did you hear about ComfyUI?"
|
||||
"purpose": "What will you primarily use ComfyUI for?",
|
||||
"industry": "What's your primary industry?",
|
||||
"making": "What do you plan on making?"
|
||||
},
|
||||
"questions": {
|
||||
"familiarity": "How familiar are you with ComfyUI?",
|
||||
"purpose": "What will you primarily use ComfyUI for?",
|
||||
"industry": "What's your primary industry?",
|
||||
"making": "What do you plan on making?"
|
||||
},
|
||||
"options": {
|
||||
"usage": {
|
||||
"personal": "Personal use",
|
||||
"work": "Work",
|
||||
"education": "Education (student or educator)"
|
||||
},
|
||||
"familiarity": {
|
||||
"new": "New — never used it",
|
||||
"starting": "Beginner — following tutorials",
|
||||
"basics": "Intermediate — comfortable with basics",
|
||||
"advanced": "Advanced — build and edit workflows",
|
||||
"expert": "Expert — I help others"
|
||||
"new": "New to ComfyUI (never used it before)",
|
||||
"starting": "Just getting started (following tutorials)",
|
||||
"basics": "Comfortable with basics",
|
||||
"advanced": "Advanced user (custom workflows)",
|
||||
"expert": "Expert (help others)"
|
||||
},
|
||||
"intent": {
|
||||
"workflows": "Custom workflows or pipelines",
|
||||
"custom_nodes": "Custom nodes",
|
||||
"videos": "Videos",
|
||||
"purpose": {
|
||||
"personal": "Personal projects / hobby",
|
||||
"community": "Community contributions (nodes, workflows, etc.)",
|
||||
"client": "Client work (freelance)",
|
||||
"inhouse": "My own workplace (in-house)",
|
||||
"research": "Academic research"
|
||||
},
|
||||
"industry": {
|
||||
"film_tv_animation": "Film, TV, & animation",
|
||||
"gaming": "Gaming",
|
||||
"marketing": "Marketing & advertising",
|
||||
"architecture": "Architecture",
|
||||
"product_design": "Product & graphic design",
|
||||
"fine_art": "Fine art & illustration",
|
||||
"software": "Software & technology",
|
||||
"education": "Education",
|
||||
"other": "Other",
|
||||
"otherPlaceholder": "Please specify"
|
||||
},
|
||||
"making": {
|
||||
"images": "Images",
|
||||
"3d_game": "3D assets / game assets",
|
||||
"video": "Video & animation",
|
||||
"3d": "3D assets",
|
||||
"audio": "Audio / music",
|
||||
"apps": "Simplified Apps from workflows",
|
||||
"api": "API endpoints to run workflows",
|
||||
"not_sure": "Not sure"
|
||||
},
|
||||
"source": {
|
||||
"youtube": "YouTube",
|
||||
"reddit": "Reddit",
|
||||
"twitter": "Twitter / X",
|
||||
"instagram": "Instagram",
|
||||
"linkedin": "LinkedIn",
|
||||
"friend": "Friend or colleague",
|
||||
"search": "Google / search",
|
||||
"newsletter": "Newsletter or blog",
|
||||
"conference": "Conference or event",
|
||||
"discord": "Discord / community",
|
||||
"github": "GitHub",
|
||||
"other": "Other"
|
||||
"custom_nodes": "Custom nodes & workflows"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -2913,10 +2909,10 @@
|
||||
"cloudForgotPassword_emailRequired": "Email is required",
|
||||
"cloudForgotPassword_passwordResetSent": "Password reset sent",
|
||||
"cloudForgotPassword_passwordResetError": "Failed to send password reset email",
|
||||
"cloudSurvey_steps_usage": "How do you plan to use ComfyUI?",
|
||||
"cloudSurvey_steps_familiarity": "How familiar are you with ComfyUI?",
|
||||
"cloudSurvey_steps_intent": "What do you want to create with ComfyUI?",
|
||||
"cloudSurvey_steps_source": "Where did you hear about ComfyUI?",
|
||||
"cloudSurvey_steps_purpose": "What will you primarily use ComfyUI for?",
|
||||
"cloudSurvey_steps_industry": "What's your primary industry?",
|
||||
"cloudSurvey_steps_making": "What do you plan on making?",
|
||||
"assetBrowser": {
|
||||
"allCategory": "All {category}",
|
||||
"allModels": "All Models",
|
||||
|
||||
@@ -1,40 +1,251 @@
|
||||
<template>
|
||||
<div class="flex h-[700px] max-h-[85vh] w-[320px] max-w-[90vw] flex-col">
|
||||
<DynamicSurveyForm
|
||||
:key="activeSurvey.version"
|
||||
:survey="activeSurvey"
|
||||
:is-submitting="isSubmitting"
|
||||
@submit="onSubmitSurvey"
|
||||
/>
|
||||
<div>
|
||||
<Stepper
|
||||
value="1"
|
||||
class="flex h-[638px] max-h-[80vh] w-[320px] max-w-[90vw] flex-col"
|
||||
>
|
||||
<ProgressBar
|
||||
:value="progressPercent"
|
||||
:show-value="false"
|
||||
class="mb-8 h-2"
|
||||
/>
|
||||
|
||||
<StepPanels class="flex flex-1 flex-col p-0">
|
||||
<StepPanel
|
||||
v-slot="{ activateCallback }"
|
||||
value="1"
|
||||
class="flex min-h-full flex-1 flex-col justify-between bg-transparent"
|
||||
>
|
||||
<div>
|
||||
<label class="mb-8 block text-lg font-medium">{{
|
||||
t('cloudSurvey_steps_familiarity')
|
||||
}}</label>
|
||||
<div class="flex flex-col gap-6">
|
||||
<div
|
||||
v-for="opt in familiarityOptions"
|
||||
:key="opt.value"
|
||||
class="flex items-center gap-3"
|
||||
>
|
||||
<RadioButton
|
||||
v-model="surveyData.familiarity"
|
||||
:input-id="`fam-${opt.value}`"
|
||||
name="familiarity"
|
||||
:value="opt.value"
|
||||
/>
|
||||
<label
|
||||
:for="`fam-${opt.value}`"
|
||||
class="cursor-pointer text-sm"
|
||||
>{{ opt.label }}</label
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex justify-between pt-4">
|
||||
<span />
|
||||
<Button
|
||||
:disabled="!validStep1"
|
||||
class="h-10 w-full border-none text-white"
|
||||
@click="goTo(2, activateCallback)"
|
||||
>
|
||||
{{ $t('g.next') }}
|
||||
</Button>
|
||||
</div>
|
||||
</StepPanel>
|
||||
|
||||
<StepPanel
|
||||
v-slot="{ activateCallback }"
|
||||
value="2"
|
||||
class="flex min-h-full flex-1 flex-col justify-between bg-transparent"
|
||||
>
|
||||
<div>
|
||||
<label class="mb-8 block text-lg font-medium">{{
|
||||
t('cloudSurvey_steps_purpose')
|
||||
}}</label>
|
||||
<div class="flex flex-col gap-6">
|
||||
<div
|
||||
v-for="opt in purposeOptions"
|
||||
:key="opt.value"
|
||||
class="flex items-center gap-3"
|
||||
>
|
||||
<RadioButton
|
||||
v-model="surveyData.useCase"
|
||||
:input-id="`purpose-${opt.value}`"
|
||||
name="purpose"
|
||||
:value="opt.value"
|
||||
/>
|
||||
<label
|
||||
:for="`purpose-${opt.value}`"
|
||||
class="cursor-pointer text-sm"
|
||||
>{{ opt.label }}</label
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="surveyData.useCase === 'other'" class="mt-4 ml-8">
|
||||
<InputText
|
||||
v-model="surveyData.useCaseOther"
|
||||
class="w-full"
|
||||
:placeholder="
|
||||
$t('cloudOnboarding.survey.options.industry.otherPlaceholder')
|
||||
"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-6 pt-4">
|
||||
<Button
|
||||
variant="secondary"
|
||||
class="flex-1 text-white"
|
||||
@click="goTo(1, activateCallback)"
|
||||
>
|
||||
{{ $t('g.back') }}
|
||||
</Button>
|
||||
<Button
|
||||
:disabled="!validStep2"
|
||||
class="h-10 flex-1 text-white"
|
||||
@click="goTo(3, activateCallback)"
|
||||
>
|
||||
{{ $t('g.next') }}
|
||||
</Button>
|
||||
</div>
|
||||
</StepPanel>
|
||||
|
||||
<StepPanel
|
||||
v-slot="{ activateCallback }"
|
||||
value="3"
|
||||
class="flex min-h-full flex-1 flex-col justify-between bg-transparent"
|
||||
>
|
||||
<div>
|
||||
<label class="mb-8 block text-lg font-medium">{{
|
||||
t('cloudSurvey_steps_industry')
|
||||
}}</label>
|
||||
<div class="flex flex-col gap-6">
|
||||
<div
|
||||
v-for="opt in industryOptions"
|
||||
:key="opt.value"
|
||||
class="flex items-center gap-3"
|
||||
>
|
||||
<RadioButton
|
||||
v-model="surveyData.industry"
|
||||
:input-id="`industry-${opt.value}`"
|
||||
name="industry"
|
||||
:value="opt.value"
|
||||
/>
|
||||
<label
|
||||
:for="`industry-${opt.value}`"
|
||||
class="cursor-pointer text-sm"
|
||||
>{{ opt.label }}</label
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="surveyData.industry === 'other'" class="mt-4 ml-8">
|
||||
<InputText
|
||||
v-model="surveyData.industryOther"
|
||||
class="w-full"
|
||||
:placeholder="
|
||||
$t('cloudOnboarding.survey.options.industry.otherPlaceholder')
|
||||
"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-6 pt-4">
|
||||
<Button
|
||||
variant="secondary"
|
||||
class="flex-1 text-white"
|
||||
@click="goTo(2, activateCallback)"
|
||||
>
|
||||
{{ $t('g.back') }}
|
||||
</Button>
|
||||
<Button
|
||||
:disabled="!validStep3"
|
||||
class="h-10 flex-1 border-none text-white"
|
||||
@click="goTo(4, activateCallback)"
|
||||
>
|
||||
{{ $t('g.next') }}
|
||||
</Button>
|
||||
</div>
|
||||
</StepPanel>
|
||||
|
||||
<StepPanel
|
||||
v-slot="{ activateCallback }"
|
||||
value="4"
|
||||
class="flex min-h-full flex-1 flex-col justify-between bg-transparent"
|
||||
>
|
||||
<div>
|
||||
<label class="mb-8 block text-lg font-medium">{{
|
||||
t('cloudSurvey_steps_making')
|
||||
}}</label>
|
||||
<div class="flex flex-col gap-6">
|
||||
<div
|
||||
v-for="opt in makingOptions"
|
||||
:key="opt.value"
|
||||
class="flex items-center gap-3"
|
||||
>
|
||||
<Checkbox
|
||||
v-model="surveyData.making"
|
||||
:input-id="`making-${opt.value}`"
|
||||
:value="opt.value"
|
||||
/>
|
||||
<label
|
||||
:for="`making-${opt.value}`"
|
||||
class="cursor-pointer text-sm"
|
||||
>{{ opt.label }}</label
|
||||
>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-6 pt-4">
|
||||
<Button
|
||||
variant="secondary"
|
||||
class="flex-1 text-white"
|
||||
@click="goTo(3, activateCallback)"
|
||||
>
|
||||
{{ $t('g.back') }}
|
||||
</Button>
|
||||
<Button
|
||||
:disabled="!validStep4 || isSubmitting"
|
||||
:loading="isSubmitting"
|
||||
class="h-10 flex-1 border-none text-white"
|
||||
@click="onSubmitSurvey"
|
||||
>
|
||||
{{ $t('g.submit') }}
|
||||
</Button>
|
||||
</div>
|
||||
</StepPanel>
|
||||
</StepPanels>
|
||||
</Stepper>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import Checkbox from 'primevue/checkbox'
|
||||
import InputText from 'primevue/inputtext'
|
||||
import ProgressBar from 'primevue/progressbar'
|
||||
import RadioButton from 'primevue/radiobutton'
|
||||
import StepPanel from 'primevue/steppanel'
|
||||
import StepPanels from 'primevue/steppanels'
|
||||
import Stepper from 'primevue/stepper'
|
||||
import { computed, onMounted, ref } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
import { useRouter } from 'vue-router'
|
||||
|
||||
import Button from '@/components/ui/button/Button.vue'
|
||||
import { useFeatureFlags } from '@/composables/useFeatureFlags'
|
||||
import {
|
||||
getSurveyCompletedStatus,
|
||||
submitSurvey
|
||||
} from '@/platform/cloud/onboarding/auth'
|
||||
import { isCloud } from '@/platform/distribution/types'
|
||||
import { remoteConfig } from '@/platform/remoteConfig/remoteConfig'
|
||||
import { useTelemetry } from '@/platform/telemetry'
|
||||
|
||||
import DynamicSurveyForm from './survey/DynamicSurveyForm.vue'
|
||||
import { defaultOnboardingSurvey } from './survey/defaultSurveySchema'
|
||||
|
||||
const { t } = useI18n()
|
||||
const router = useRouter()
|
||||
const { flags } = useFeatureFlags()
|
||||
const onboardingSurveyEnabled = computed(() => flags.onboardingSurveyEnabled)
|
||||
|
||||
const activeSurvey = computed(
|
||||
() => remoteConfig.value.onboarding_survey ?? defaultOnboardingSurvey
|
||||
)
|
||||
|
||||
const isSubmitting = ref(false)
|
||||
|
||||
// Check if survey is already completed on mount
|
||||
onMounted(async () => {
|
||||
if (!onboardingSurveyEnabled.value) {
|
||||
await router.replace({ name: 'cloud-user-check' })
|
||||
@@ -43,31 +254,156 @@ onMounted(async () => {
|
||||
try {
|
||||
const surveyCompleted = await getSurveyCompletedStatus()
|
||||
if (surveyCompleted) {
|
||||
// User already completed survey, return to onboarding flow
|
||||
await router.replace({ name: 'cloud-user-check' })
|
||||
return
|
||||
}
|
||||
if (isCloud) {
|
||||
useTelemetry()?.trackSurvey('opened')
|
||||
} else {
|
||||
// Track survey opened event
|
||||
if (isCloud) {
|
||||
useTelemetry()?.trackSurvey('opened')
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to check survey status:', error)
|
||||
}
|
||||
})
|
||||
|
||||
const onSubmitSurvey = async (payload: Record<string, unknown>) => {
|
||||
if (!onboardingSurveyEnabled.value) {
|
||||
await router.replace({ name: 'cloud-user-check' })
|
||||
return
|
||||
const activeStep = ref(1)
|
||||
const totalSteps = 4
|
||||
const progressPercent = computed(() =>
|
||||
Math.max(20, Math.min(100, ((activeStep.value - 1) / (totalSteps - 1)) * 100))
|
||||
)
|
||||
|
||||
const isSubmitting = ref(false)
|
||||
|
||||
const surveyData = ref({
|
||||
familiarity: '',
|
||||
useCase: '',
|
||||
useCaseOther: '',
|
||||
industry: '',
|
||||
industryOther: '',
|
||||
making: [] as string[]
|
||||
})
|
||||
|
||||
// Options
|
||||
const familiarityOptions = [
|
||||
{ label: 'New to ComfyUI (never used it before)', value: 'new' },
|
||||
{ label: 'Just getting started (following tutorials)', value: 'starting' },
|
||||
{ label: 'Comfortable with basics', value: 'basics' },
|
||||
{ label: 'Advanced user (custom workflows)', value: 'advanced' },
|
||||
{ label: 'Expert (help others)', value: 'expert' }
|
||||
]
|
||||
|
||||
const purposeOptions = [
|
||||
{ label: 'Personal projects/hobby', value: 'personal' },
|
||||
{
|
||||
label: 'Community contributions (nodes, workflows, etc.)',
|
||||
value: 'community'
|
||||
},
|
||||
{ label: 'Client work (freelance)', value: 'client' },
|
||||
{ label: 'My own workplace (in-house)', value: 'inhouse' },
|
||||
{ label: 'Academic research', value: 'research' },
|
||||
{ label: 'Other', value: 'other' }
|
||||
]
|
||||
|
||||
const industryOptions = [
|
||||
{ label: 'Film, TV, & animation', value: 'film_tv_animation' },
|
||||
{ label: 'Gaming', value: 'gaming' },
|
||||
{ label: 'Marketing & advertising', value: 'marketing' },
|
||||
{ label: 'Architecture', value: 'architecture' },
|
||||
{ label: 'Product & graphic design', value: 'product_design' },
|
||||
{ label: 'Fine art & illustration', value: 'fine_art' },
|
||||
{ label: 'Software & technology', value: 'software' },
|
||||
{ label: 'Education', value: 'education' },
|
||||
{ label: 'Other', value: 'other' }
|
||||
]
|
||||
|
||||
const makingOptions = [
|
||||
{ label: 'Images', value: 'images' },
|
||||
{ label: 'Video & animation', value: 'video' },
|
||||
{ label: '3D assets', value: '3d' },
|
||||
{ label: 'Audio/music', value: 'audio' },
|
||||
{ label: 'Custom nodes & workflows', value: 'custom_nodes' }
|
||||
]
|
||||
|
||||
// Validation per step
|
||||
const validStep1 = computed(() => !!surveyData.value.familiarity)
|
||||
const validStep2 = computed(() => {
|
||||
if (!surveyData.value.useCase) return false
|
||||
if (surveyData.value.useCase === 'other') {
|
||||
return !!surveyData.value.useCaseOther?.trim()
|
||||
}
|
||||
isSubmitting.value = true
|
||||
return true
|
||||
})
|
||||
const validStep3 = computed(() => {
|
||||
if (!surveyData.value.industry) return false
|
||||
if (surveyData.value.industry === 'other') {
|
||||
return !!surveyData.value.industryOther?.trim()
|
||||
}
|
||||
return true
|
||||
})
|
||||
const validStep4 = computed(() => surveyData.value.making.length > 0)
|
||||
|
||||
const changeActiveStep = (step: number) => {
|
||||
activeStep.value = step
|
||||
}
|
||||
|
||||
const goTo = (step: number, activate: (val: string | number) => void) => {
|
||||
// keep Stepper panel and progress bar in sync; Stepper values are strings
|
||||
changeActiveStep(step)
|
||||
activate(String(step))
|
||||
}
|
||||
|
||||
// Submit
|
||||
const onSubmitSurvey = async () => {
|
||||
try {
|
||||
await submitSurvey(payload)
|
||||
if (isCloud) {
|
||||
useTelemetry()?.trackSurvey('submitted', payload)
|
||||
if (!onboardingSurveyEnabled.value) {
|
||||
await router.replace({ name: 'cloud-user-check' })
|
||||
return
|
||||
}
|
||||
isSubmitting.value = true
|
||||
// prepare payload with consistent structure
|
||||
const payload = {
|
||||
familiarity: surveyData.value.familiarity,
|
||||
useCase:
|
||||
surveyData.value.useCase === 'other'
|
||||
? surveyData.value.useCaseOther?.trim() || 'other'
|
||||
: surveyData.value.useCase,
|
||||
industry:
|
||||
surveyData.value.industry === 'other'
|
||||
? surveyData.value.industryOther?.trim() || 'other'
|
||||
: surveyData.value.industry,
|
||||
making: surveyData.value.making
|
||||
}
|
||||
|
||||
await submitSurvey(payload)
|
||||
|
||||
// Track survey submitted event with responses
|
||||
if (isCloud) {
|
||||
useTelemetry()?.trackSurvey('submitted', {
|
||||
industry: payload.industry,
|
||||
useCase: payload.useCase,
|
||||
familiarity: payload.familiarity,
|
||||
making: payload.making
|
||||
})
|
||||
}
|
||||
|
||||
await router.push({ name: 'cloud-user-check' })
|
||||
} finally {
|
||||
isSubmitting.value = false
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
:deep(.p-progressbar .p-progressbar-value) {
|
||||
background-color: #f0ff41 !important;
|
||||
}
|
||||
:deep(.p-radiobutton-checked .p-radiobutton-box) {
|
||||
background-color: #f0ff41 !important;
|
||||
border-color: #f0ff41 !important;
|
||||
}
|
||||
:deep(.p-checkbox-checked .p-checkbox-box) {
|
||||
background-color: #f0ff41 !important;
|
||||
border-color: #f0ff41 !important;
|
||||
}
|
||||
</style>
|
||||
|
||||
@@ -1,161 +0,0 @@
|
||||
<template>
|
||||
<fieldset
|
||||
v-if="field.type !== 'text'"
|
||||
:aria-invalid="Boolean(errorMessage)"
|
||||
class="flex flex-col gap-4 border-0 p-0"
|
||||
>
|
||||
<legend class="mb-2 block text-lg font-medium text-base-foreground">
|
||||
{{ resolvedLabel }}
|
||||
</legend>
|
||||
<template v-if="field.type === 'single'">
|
||||
<div
|
||||
v-for="option in field.options"
|
||||
:key="option.value"
|
||||
class="flex items-center gap-3"
|
||||
>
|
||||
<RadioButton
|
||||
:model-value="(modelValue as string) ?? ''"
|
||||
:input-id="`${field.id}-${option.value}`"
|
||||
:name="field.id"
|
||||
:value="option.value"
|
||||
:dt="checkedTokens"
|
||||
@update:model-value="onSingleChange"
|
||||
/>
|
||||
<label
|
||||
:for="`${field.id}-${option.value}`"
|
||||
class="cursor-pointer text-sm"
|
||||
>{{ resolveOptionLabel(option) }}</label
|
||||
>
|
||||
</div>
|
||||
</template>
|
||||
<template v-else>
|
||||
<div
|
||||
v-for="option in field.options"
|
||||
:key="option.value"
|
||||
class="flex items-center gap-3"
|
||||
>
|
||||
<Checkbox
|
||||
:model-value="(modelValue as string[]) ?? []"
|
||||
:input-id="`${field.id}-${option.value}`"
|
||||
:value="option.value"
|
||||
:dt="checkedTokens"
|
||||
@update:model-value="onMultiChange"
|
||||
/>
|
||||
<label
|
||||
:for="`${field.id}-${option.value}`"
|
||||
class="cursor-pointer text-sm"
|
||||
>{{ resolveOptionLabel(option) }}</label
|
||||
>
|
||||
</div>
|
||||
</template>
|
||||
<Input
|
||||
v-if="field.allowOther && field.otherFieldId && modelValue === 'other'"
|
||||
:model-value="(otherValue as string) ?? ''"
|
||||
:placeholder="
|
||||
$t(
|
||||
`cloudOnboarding.survey.options.${field.id}.otherPlaceholder`,
|
||||
$t('cloudOnboarding.survey.otherPlaceholder')
|
||||
)
|
||||
"
|
||||
class="ml-1"
|
||||
@update:model-value="onOtherChange"
|
||||
/>
|
||||
<p v-if="errorMessage" class="text-danger text-xs">{{ errorMessage }}</p>
|
||||
</fieldset>
|
||||
<div v-else class="flex flex-col gap-3">
|
||||
<label
|
||||
:for="controlId"
|
||||
class="block text-lg font-medium text-base-foreground"
|
||||
>
|
||||
{{ resolvedLabel }}
|
||||
</label>
|
||||
<Input
|
||||
:id="controlId"
|
||||
:model-value="(modelValue as string) ?? ''"
|
||||
:placeholder="field.placeholder"
|
||||
:aria-invalid="Boolean(errorMessage)"
|
||||
@update:model-value="onTextChange"
|
||||
/>
|
||||
<p v-if="errorMessage" class="text-danger text-xs">{{ errorMessage }}</p>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import Checkbox from 'primevue/checkbox'
|
||||
import RadioButton from 'primevue/radiobutton'
|
||||
import { useId } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import Input from '@/components/ui/input/Input.vue'
|
||||
import type {
|
||||
LocalizedString,
|
||||
OnboardingSurveyField,
|
||||
OnboardingSurveyOption
|
||||
} from '@/platform/remoteConfig/types'
|
||||
|
||||
const {
|
||||
field,
|
||||
modelValue,
|
||||
otherValue,
|
||||
errorMessage = ''
|
||||
} = defineProps<{
|
||||
field: OnboardingSurveyField
|
||||
modelValue: string | string[] | undefined
|
||||
otherValue?: string
|
||||
errorMessage?: string
|
||||
}>()
|
||||
|
||||
const emit = defineEmits<{
|
||||
'update:modelValue': [value: string | string[]]
|
||||
'update:otherValue': [value: string]
|
||||
}>()
|
||||
|
||||
const { t, te, locale } = useI18n()
|
||||
const controlId = useId()
|
||||
|
||||
const resolveLocalized = (value: LocalizedString): string => {
|
||||
if (typeof value === 'string') return value
|
||||
return value[locale.value] ?? value.en ?? Object.values(value)[0] ?? ''
|
||||
}
|
||||
|
||||
const checkedTokens = {
|
||||
checked: {
|
||||
background: 'var(--color-electric-400)',
|
||||
borderColor: 'var(--color-electric-400)',
|
||||
hoverBackground: 'var(--color-electric-400)',
|
||||
hoverBorderColor: 'var(--color-electric-400)'
|
||||
}
|
||||
}
|
||||
|
||||
const resolvedLabel = (() => {
|
||||
if (field.labelKey && te(field.labelKey)) return t(field.labelKey)
|
||||
if (field.label != null) return resolveLocalized(field.label)
|
||||
return field.id
|
||||
})()
|
||||
|
||||
const resolveOptionLabel = (option: OnboardingSurveyOption): string => {
|
||||
if (option.labelKey && te(option.labelKey)) return t(option.labelKey)
|
||||
if (option.label != null) return resolveLocalized(option.label)
|
||||
return option.value
|
||||
}
|
||||
|
||||
const onSingleChange = (value: unknown) => {
|
||||
emit('update:modelValue', typeof value === 'string' ? value : '')
|
||||
}
|
||||
const onMultiChange = (value: unknown) => {
|
||||
if (!Array.isArray(value)) {
|
||||
emit('update:modelValue', [])
|
||||
return
|
||||
}
|
||||
emit(
|
||||
'update:modelValue',
|
||||
value.filter((v): v is string => typeof v === 'string')
|
||||
)
|
||||
}
|
||||
const onTextChange = (value: string | number | undefined) => {
|
||||
emit('update:modelValue', String(value ?? ''))
|
||||
}
|
||||
const onOtherChange = (value: string | number | undefined) => {
|
||||
emit('update:otherValue', String(value ?? ''))
|
||||
}
|
||||
</script>
|
||||
@@ -1,320 +0,0 @@
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { render, screen } from '@testing-library/vue'
|
||||
import PrimeVue from 'primevue/config'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
import { createI18n } from 'vue-i18n'
|
||||
|
||||
import type { OnboardingSurvey } from '@/platform/remoteConfig/types'
|
||||
|
||||
import DynamicSurveyForm from './DynamicSurveyForm.vue'
|
||||
|
||||
const flushPromises = () => new Promise((resolve) => setTimeout(resolve, 0))
|
||||
|
||||
const i18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
g: { back: 'Back', next: 'Next', submit: 'Submit' },
|
||||
cloudOnboarding: {
|
||||
survey: {
|
||||
intro: 'Help us tailor your ComfyUI experience.',
|
||||
errors: {
|
||||
chooseAnOption: 'Please choose an option.',
|
||||
selectAtLeastOne: 'Please select at least one option.',
|
||||
describeAnswer: 'Please describe your answer.'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const renderForm = (survey: OnboardingSurvey) =>
|
||||
render(DynamicSurveyForm, {
|
||||
global: { plugins: [PrimeVue, i18n] },
|
||||
props: { survey }
|
||||
})
|
||||
|
||||
const twoStepSurvey: OnboardingSurvey = {
|
||||
version: 1,
|
||||
introKey: 'cloudOnboarding.survey.intro',
|
||||
fields: [
|
||||
{
|
||||
id: 'usage',
|
||||
type: 'single',
|
||||
label: 'How do you plan to use ComfyUI?',
|
||||
required: true,
|
||||
options: [
|
||||
{ value: 'personal', label: 'Personal use' },
|
||||
{ value: 'work', label: 'Work' }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'intent',
|
||||
type: 'multi',
|
||||
label: 'What do you want to create with ComfyUI?',
|
||||
required: true,
|
||||
options: [
|
||||
{ value: 'images', label: 'Images' },
|
||||
{ value: 'videos', label: 'Videos' }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('DynamicSurveyForm', () => {
|
||||
it('renders the intro text and the first field options', () => {
|
||||
renderForm(twoStepSurvey)
|
||||
|
||||
expect(
|
||||
screen.getByText('Help us tailor your ComfyUI experience.')
|
||||
).toBeInTheDocument()
|
||||
expect(screen.getByText('How do you plan to use ComfyUI?')).toBeVisible()
|
||||
expect(screen.getByLabelText('Personal use')).toBeInTheDocument()
|
||||
expect(screen.getByLabelText('Work')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('disables Next until the user selects an option, then advances', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderForm(twoStepSurvey)
|
||||
|
||||
const next = screen.getByRole('button', { name: 'Next' })
|
||||
expect(next).toBeDisabled()
|
||||
|
||||
await user.click(screen.getByLabelText('Personal use'))
|
||||
expect(next).toBeEnabled()
|
||||
|
||||
await user.click(next)
|
||||
await flushPromises()
|
||||
|
||||
expect(
|
||||
screen.getByText('What do you want to create with ComfyUI?')
|
||||
).toBeVisible()
|
||||
expect(screen.getByLabelText('Images')).toBeInTheDocument()
|
||||
expect(screen.getByRole('button', { name: 'Back' })).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('navigates back to the previous step', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderForm(twoStepSurvey)
|
||||
|
||||
await user.click(screen.getByLabelText('Personal use'))
|
||||
await user.click(screen.getByRole('button', { name: 'Next' }))
|
||||
await flushPromises()
|
||||
expect(
|
||||
screen.getByText('What do you want to create with ComfyUI?')
|
||||
).toBeVisible()
|
||||
|
||||
await user.click(screen.getByRole('button', { name: 'Back' }))
|
||||
await flushPromises()
|
||||
expect(screen.getByText('How do you plan to use ComfyUI?')).toBeVisible()
|
||||
})
|
||||
|
||||
it('resolves option and field labels via labelKey when provided', () => {
|
||||
const localizedI18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
g: { back: 'Back', next: 'Next', submit: 'Submit' },
|
||||
cloudOnboarding: {
|
||||
survey: {
|
||||
intro: 'Help us tailor your ComfyUI experience.',
|
||||
errors: {
|
||||
chooseAnOption: '',
|
||||
selectAtLeastOne: '',
|
||||
describeAnswer: ''
|
||||
}
|
||||
}
|
||||
},
|
||||
survey_label: 'Localized question?',
|
||||
survey_a: 'Localized A',
|
||||
survey_b: 'Localized B'
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
render(DynamicSurveyForm, {
|
||||
global: { plugins: [PrimeVue, localizedI18n] },
|
||||
props: {
|
||||
survey: {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'q',
|
||||
type: 'single',
|
||||
labelKey: 'survey_label',
|
||||
required: true,
|
||||
options: [
|
||||
{ value: 'a', labelKey: 'survey_a' },
|
||||
{ value: 'b', labelKey: 'survey_b' }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(screen.getByText('Localized question?')).toBeVisible()
|
||||
expect(screen.getByLabelText('Localized A')).toBeInTheDocument()
|
||||
expect(screen.getByLabelText('Localized B')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('renders server-supplied translations from a label locale map', () => {
|
||||
const koreanI18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'ko',
|
||||
fallbackLocale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
g: { back: 'Back', next: 'Next', submit: 'Submit' },
|
||||
cloudOnboarding: {
|
||||
survey: {
|
||||
intro: '',
|
||||
errors: {
|
||||
chooseAnOption: '',
|
||||
selectAtLeastOne: '',
|
||||
describeAnswer: ''
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
ko: { g: { back: '뒤로', next: '다음', submit: '제출' } }
|
||||
}
|
||||
})
|
||||
|
||||
render(DynamicSurveyForm, {
|
||||
global: { plugins: [PrimeVue, koreanI18n] },
|
||||
props: {
|
||||
survey: {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'usage',
|
||||
type: 'single',
|
||||
label: {
|
||||
en: 'How will you use it?',
|
||||
ko: '어떻게 사용하시겠어요?'
|
||||
},
|
||||
required: true,
|
||||
options: [
|
||||
{
|
||||
value: 'personal',
|
||||
label: { en: 'Personal use', ko: '개인 용도' }
|
||||
},
|
||||
{ value: 'work', label: { en: 'Work', ko: '업무' } }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
expect(screen.getByText('어떻게 사용하시겠어요?')).toBeVisible()
|
||||
expect(screen.getByLabelText('개인 용도')).toBeInTheDocument()
|
||||
expect(screen.getByLabelText('업무')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('falls back to English when current locale missing from label map', () => {
|
||||
const fallbackI18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'fr',
|
||||
fallbackLocale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
g: { back: 'Back', next: 'Next', submit: 'Submit' },
|
||||
cloudOnboarding: {
|
||||
survey: {
|
||||
intro: '',
|
||||
errors: {
|
||||
chooseAnOption: '',
|
||||
selectAtLeastOne: '',
|
||||
describeAnswer: ''
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
fr: {}
|
||||
}
|
||||
})
|
||||
|
||||
render(DynamicSurveyForm, {
|
||||
global: { plugins: [PrimeVue, fallbackI18n] },
|
||||
props: {
|
||||
survey: {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'q',
|
||||
type: 'single',
|
||||
label: { en: 'English question', ko: '한국어' },
|
||||
required: true,
|
||||
options: [
|
||||
{ value: 'a', label: { en: 'English A', ko: '한국어 A' } }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
// fr is not in the map → falls back to en
|
||||
expect(screen.getByText('English question')).toBeVisible()
|
||||
expect(screen.getByLabelText('English A')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('allows advancing past an optional field while still empty', async () => {
|
||||
const user = userEvent.setup()
|
||||
render(DynamicSurveyForm, {
|
||||
global: { plugins: [PrimeVue, i18n] },
|
||||
props: {
|
||||
survey: {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'q1',
|
||||
type: 'single',
|
||||
label: 'Optional question?',
|
||||
options: [
|
||||
{ value: 'a', label: 'A' },
|
||||
{ value: 'b', label: 'B' }
|
||||
]
|
||||
// no required: true — should be skippable
|
||||
},
|
||||
{
|
||||
id: 'q2',
|
||||
type: 'single',
|
||||
label: 'Required question?',
|
||||
required: true,
|
||||
options: [{ value: 'c', label: 'C' }]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const next = screen.getByRole('button', { name: 'Next' })
|
||||
expect(next).toBeEnabled()
|
||||
|
||||
await user.click(next)
|
||||
await flushPromises()
|
||||
expect(screen.getByText('Required question?')).toBeVisible()
|
||||
})
|
||||
|
||||
it('enables Submit only after the multi-select field has at least one choice', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderForm(twoStepSurvey)
|
||||
|
||||
await user.click(screen.getByLabelText('Work'))
|
||||
await user.click(screen.getByRole('button', { name: 'Next' }))
|
||||
await flushPromises()
|
||||
|
||||
const submitBtn = screen.getByRole('button', { name: 'Submit' })
|
||||
expect(submitBtn).toBeDisabled()
|
||||
|
||||
await user.click(screen.getByRole('checkbox', { name: /Images/i }))
|
||||
await flushPromises()
|
||||
expect(submitBtn).toBeEnabled()
|
||||
})
|
||||
})
|
||||
@@ -1,212 +0,0 @@
|
||||
<template>
|
||||
<form class="flex size-full flex-col" @submit.prevent="onSubmit">
|
||||
<p v-if="introText" class="mb-4 text-sm text-muted">
|
||||
{{ introText }}
|
||||
</p>
|
||||
<div
|
||||
class="mb-8 h-2 w-full overflow-hidden rounded-full bg-secondary-background"
|
||||
>
|
||||
<div
|
||||
class="h-full bg-electric-400 transition-[width] duration-300 ease-out"
|
||||
:style="{ width: `${progressPercent}%` }"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="flex flex-1 flex-col overflow-hidden">
|
||||
<div
|
||||
v-if="currentField"
|
||||
:key="currentField.id"
|
||||
class="flex flex-1 flex-col gap-4 overflow-y-auto pr-1"
|
||||
>
|
||||
<DynamicSurveyField
|
||||
:field="currentField"
|
||||
:model-value="values[currentField.id]"
|
||||
:other-value="
|
||||
currentField.otherFieldId
|
||||
? (values[currentField.otherFieldId] as string)
|
||||
: undefined
|
||||
"
|
||||
:error-message="
|
||||
errors[currentField.id] ??
|
||||
(currentField.otherFieldId
|
||||
? errors[currentField.otherFieldId]
|
||||
: undefined)
|
||||
"
|
||||
@update:model-value="(value) => onFieldChange(currentField.id, value)"
|
||||
@update:other-value="
|
||||
(value) =>
|
||||
currentField.otherFieldId &&
|
||||
onFieldChange(currentField.otherFieldId, value)
|
||||
"
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-6 pt-4">
|
||||
<Button
|
||||
v-if="!isFirst"
|
||||
type="button"
|
||||
variant="secondary"
|
||||
class="h-10 flex-1 text-white"
|
||||
@click="goPrevious"
|
||||
>
|
||||
{{ $t('g.back') }}
|
||||
</Button>
|
||||
<span v-else class="flex-1" />
|
||||
<Button
|
||||
v-if="!isLast"
|
||||
type="button"
|
||||
:disabled="!isCurrentValid"
|
||||
:class="
|
||||
cn(
|
||||
'h-10 flex-1 border-none',
|
||||
isCurrentValid
|
||||
? 'bg-electric-400 text-black hover:bg-electric-400/85'
|
||||
: 'bg-zinc-800 text-zinc-500'
|
||||
)
|
||||
"
|
||||
@click="goNext"
|
||||
>
|
||||
{{ $t('g.next') }}
|
||||
</Button>
|
||||
<Button
|
||||
v-else
|
||||
type="submit"
|
||||
:disabled="!isCurrentValid || isSubmitting"
|
||||
:loading="isSubmitting"
|
||||
:class="
|
||||
cn(
|
||||
'h-10 flex-1 border-none',
|
||||
isCurrentValid && !isSubmitting
|
||||
? 'bg-electric-400 text-black hover:bg-electric-400/85'
|
||||
: 'bg-zinc-800 text-zinc-500'
|
||||
)
|
||||
"
|
||||
>
|
||||
{{ $t('g.submit') }}
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { cn } from '@comfyorg/tailwind-utils'
|
||||
import { toTypedSchema } from '@vee-validate/zod'
|
||||
import { useForm } from 'vee-validate'
|
||||
import { computed, ref, watch } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import Button from '@/components/ui/button/Button.vue'
|
||||
import type { OnboardingSurvey } from '@/platform/remoteConfig/types'
|
||||
|
||||
import DynamicSurveyField from './DynamicSurveyField.vue'
|
||||
import {
|
||||
buildInitialValues,
|
||||
buildSubmissionPayload,
|
||||
buildZodSchema,
|
||||
prepareSurvey,
|
||||
visibleFields
|
||||
} from './surveySchema'
|
||||
import type { SurveyValues } from './surveySchema'
|
||||
|
||||
const { survey } = defineProps<{
|
||||
survey: OnboardingSurvey
|
||||
isSubmitting?: boolean
|
||||
}>()
|
||||
|
||||
const emit = defineEmits<{
|
||||
submit: [payload: Record<string, unknown>]
|
||||
}>()
|
||||
|
||||
const { t, te } = useI18n()
|
||||
|
||||
const preparedSurvey = computed(() => prepareSurvey(survey))
|
||||
|
||||
const introText = computed(() => {
|
||||
const key = preparedSurvey.value.introKey
|
||||
if (!key) return ''
|
||||
return te(key) ? t(key) : ''
|
||||
})
|
||||
|
||||
const liveValues = ref<SurveyValues>(buildInitialValues(preparedSurvey.value))
|
||||
|
||||
const validationSchema = computed(() =>
|
||||
toTypedSchema(buildZodSchema(preparedSurvey.value, liveValues.value, t))
|
||||
)
|
||||
|
||||
const { values, errors, setFieldValue, validate, resetForm } =
|
||||
useForm<SurveyValues>({
|
||||
initialValues: liveValues.value,
|
||||
validationSchema
|
||||
})
|
||||
|
||||
watch(
|
||||
() => survey,
|
||||
() => {
|
||||
const fresh = buildInitialValues(preparedSurvey.value)
|
||||
liveValues.value = { ...fresh }
|
||||
resetForm({ values: fresh })
|
||||
stepIndex.value = 0
|
||||
}
|
||||
)
|
||||
|
||||
const visible = computed(() =>
|
||||
visibleFields(preparedSurvey.value, values as SurveyValues)
|
||||
)
|
||||
const stepIndex = ref(0)
|
||||
|
||||
const currentField = computed(() => visible.value[stepIndex.value])
|
||||
const isFirst = computed(() => stepIndex.value === 0)
|
||||
const isLast = computed(() => stepIndex.value === visible.value.length - 1)
|
||||
|
||||
const totalSteps = computed(() => Math.max(visible.value.length, 1))
|
||||
const progressPercent = computed(() =>
|
||||
Math.max(
|
||||
100 / totalSteps.value,
|
||||
((stepIndex.value + 1) / totalSteps.value) * 100
|
||||
)
|
||||
)
|
||||
|
||||
const isCurrentValid = computed(() => {
|
||||
const field = currentField.value
|
||||
if (!field) return false
|
||||
|
||||
const value = values[field.id]
|
||||
const isEmpty =
|
||||
field.type === 'multi'
|
||||
? !Array.isArray(value) || value.length === 0
|
||||
: typeof value !== 'string' || value.length === 0
|
||||
|
||||
if (isEmpty) return !field.required
|
||||
|
||||
if (field.allowOther && field.otherFieldId && value === 'other') {
|
||||
const other = values[field.otherFieldId]
|
||||
return typeof other === 'string' && other.trim().length > 0
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
const onFieldChange = (id: string, value: string | string[]) => {
|
||||
setFieldValue(id, value)
|
||||
liveValues.value = { ...liveValues.value, [id]: value }
|
||||
if (stepIndex.value > visible.value.length - 1) {
|
||||
stepIndex.value = Math.max(0, visible.value.length - 1)
|
||||
}
|
||||
}
|
||||
|
||||
const goNext = () => {
|
||||
if (stepIndex.value < visible.value.length - 1) stepIndex.value += 1
|
||||
}
|
||||
const goPrevious = () => {
|
||||
if (stepIndex.value > 0) stepIndex.value -= 1
|
||||
}
|
||||
|
||||
const onSubmit = async () => {
|
||||
const result = await validate()
|
||||
if (!result.valid) return
|
||||
emit(
|
||||
'submit',
|
||||
buildSubmissionPayload(preparedSurvey.value, values as SurveyValues)
|
||||
)
|
||||
}
|
||||
</script>
|
||||
@@ -1,76 +0,0 @@
|
||||
import type { OnboardingSurvey } from '@/platform/remoteConfig/types'
|
||||
|
||||
const optionsFor = (
|
||||
fieldId: string,
|
||||
values: string[]
|
||||
): { value: string; labelKey: string }[] =>
|
||||
values.map((value) => ({
|
||||
value,
|
||||
labelKey: `cloudOnboarding.survey.options.${fieldId}.${value}`
|
||||
}))
|
||||
|
||||
export const defaultOnboardingSurvey: OnboardingSurvey = {
|
||||
version: 2,
|
||||
introKey: 'cloudOnboarding.survey.intro',
|
||||
fields: [
|
||||
{
|
||||
id: 'usage',
|
||||
type: 'single',
|
||||
labelKey: 'cloudSurvey_steps_usage',
|
||||
required: true,
|
||||
options: optionsFor('usage', ['personal', 'work', 'education'])
|
||||
},
|
||||
{
|
||||
id: 'familiarity',
|
||||
type: 'single',
|
||||
labelKey: 'cloudSurvey_steps_familiarity',
|
||||
required: true,
|
||||
options: optionsFor('familiarity', [
|
||||
'new',
|
||||
'starting',
|
||||
'basics',
|
||||
'advanced',
|
||||
'expert'
|
||||
])
|
||||
},
|
||||
{
|
||||
id: 'intent',
|
||||
type: 'multi',
|
||||
labelKey: 'cloudSurvey_steps_intent',
|
||||
required: true,
|
||||
randomize: true,
|
||||
options: optionsFor('intent', [
|
||||
'workflows',
|
||||
'custom_nodes',
|
||||
'videos',
|
||||
'images',
|
||||
'3d_game',
|
||||
'audio',
|
||||
'apps',
|
||||
'api',
|
||||
'not_sure'
|
||||
])
|
||||
},
|
||||
{
|
||||
id: 'source',
|
||||
type: 'single',
|
||||
labelKey: 'cloudSurvey_steps_source',
|
||||
required: true,
|
||||
randomize: true,
|
||||
options: optionsFor('source', [
|
||||
'youtube',
|
||||
'reddit',
|
||||
'twitter',
|
||||
'instagram',
|
||||
'linkedin',
|
||||
'friend',
|
||||
'search',
|
||||
'newsletter',
|
||||
'conference',
|
||||
'discord',
|
||||
'github',
|
||||
'other'
|
||||
])
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,248 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import type { OnboardingSurvey } from '@/platform/remoteConfig/types'
|
||||
|
||||
import {
|
||||
buildInitialValues,
|
||||
buildSubmissionPayload,
|
||||
buildZodSchema,
|
||||
prepareSurvey,
|
||||
visibleFields
|
||||
} from './surveySchema'
|
||||
|
||||
const baseSurvey: OnboardingSurvey = {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'usage',
|
||||
type: 'single',
|
||||
required: true,
|
||||
options: [
|
||||
{ value: 'work', label: 'Work' },
|
||||
{ value: 'personal', label: 'Personal' }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'role',
|
||||
type: 'single',
|
||||
required: true,
|
||||
showWhen: { field: 'usage', equals: 'work' },
|
||||
options: [{ value: 'engineer', label: 'Engineer' }]
|
||||
},
|
||||
{
|
||||
id: 'industry',
|
||||
type: 'single',
|
||||
required: true,
|
||||
allowOther: true,
|
||||
otherFieldId: 'industryOther',
|
||||
showWhen: { field: 'usage', equals: 'work' },
|
||||
options: [
|
||||
{ value: 'tech', label: 'Tech' },
|
||||
{ value: 'other', label: 'Other' }
|
||||
]
|
||||
},
|
||||
{
|
||||
id: 'making',
|
||||
type: 'multi',
|
||||
required: true,
|
||||
options: [
|
||||
{ value: 'video', label: 'Video' },
|
||||
{ value: 'images', label: 'Images' }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
describe('visibleFields', () => {
|
||||
it('hides fields when showWhen does not match', () => {
|
||||
const visible = visibleFields(baseSurvey, { usage: 'personal' })
|
||||
expect(visible.map((f) => f.id)).toEqual(['usage', 'making'])
|
||||
})
|
||||
|
||||
it('shows gated fields when showWhen matches', () => {
|
||||
const visible = visibleFields(baseSurvey, { usage: 'work' })
|
||||
expect(visible.map((f) => f.id)).toEqual([
|
||||
'usage',
|
||||
'role',
|
||||
'industry',
|
||||
'making'
|
||||
])
|
||||
})
|
||||
|
||||
it('treats array equals as membership', () => {
|
||||
const survey: OnboardingSurvey = {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'role',
|
||||
type: 'single',
|
||||
showWhen: { field: 'usage', equals: ['work', 'education'] }
|
||||
}
|
||||
]
|
||||
}
|
||||
expect(visibleFields(survey, { usage: 'education' })).toHaveLength(1)
|
||||
expect(visibleFields(survey, { usage: 'personal' })).toHaveLength(0)
|
||||
})
|
||||
|
||||
it('intersects multi-select source values with expected set', () => {
|
||||
const survey: OnboardingSurvey = {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'follow_up',
|
||||
type: 'single',
|
||||
showWhen: { field: 'making', equals: ['video', '3d'] }
|
||||
}
|
||||
]
|
||||
}
|
||||
expect(visibleFields(survey, { making: [] })).toHaveLength(0)
|
||||
expect(visibleFields(survey, { making: ['images'] })).toHaveLength(0)
|
||||
expect(visibleFields(survey, { making: ['images', 'video'] })).toHaveLength(
|
||||
1
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('buildInitialValues', () => {
|
||||
it('initializes single fields to empty string and multi to empty array', () => {
|
||||
expect(buildInitialValues(baseSurvey)).toMatchObject({
|
||||
usage: '',
|
||||
role: '',
|
||||
industry: '',
|
||||
industryOther: '',
|
||||
making: []
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('buildZodSchema', () => {
|
||||
it('omits hidden fields from validation', () => {
|
||||
const schema = buildZodSchema(baseSurvey, { usage: 'personal' })
|
||||
const result = schema.safeParse({ usage: 'personal', making: ['video'] })
|
||||
expect(result.success).toBe(true)
|
||||
})
|
||||
|
||||
it('requires gated fields once visible', () => {
|
||||
const schema = buildZodSchema(baseSurvey, { usage: 'work' })
|
||||
const result = schema.safeParse({ usage: 'work', making: ['video'] })
|
||||
expect(result.success).toBe(false)
|
||||
})
|
||||
|
||||
it('requires "other" detail when option is selected', () => {
|
||||
const schema = buildZodSchema(baseSurvey, {
|
||||
usage: 'work',
|
||||
role: 'engineer',
|
||||
industry: 'other',
|
||||
making: ['video']
|
||||
})
|
||||
expect(
|
||||
schema.safeParse({
|
||||
usage: 'work',
|
||||
role: 'engineer',
|
||||
industry: 'other',
|
||||
industryOther: '',
|
||||
making: ['video']
|
||||
}).success
|
||||
).toBe(false)
|
||||
expect(
|
||||
schema.safeParse({
|
||||
usage: 'work',
|
||||
role: 'engineer',
|
||||
industry: 'other',
|
||||
industryOther: 'Aerospace',
|
||||
making: ['video']
|
||||
}).success
|
||||
).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe('buildSubmissionPayload', () => {
|
||||
it('clears hidden fields and prefers free-text "other" detail', () => {
|
||||
const payload = buildSubmissionPayload(baseSurvey, {
|
||||
usage: 'work',
|
||||
role: 'engineer',
|
||||
industry: 'other',
|
||||
industryOther: ' Aerospace ',
|
||||
making: ['video']
|
||||
})
|
||||
expect(payload).toEqual({
|
||||
usage: 'work',
|
||||
role: 'engineer',
|
||||
industry: 'Aerospace',
|
||||
making: ['video']
|
||||
})
|
||||
})
|
||||
|
||||
it('falls back to "other" when free-text is empty', () => {
|
||||
const payload = buildSubmissionPayload(baseSurvey, {
|
||||
usage: 'work',
|
||||
role: 'engineer',
|
||||
industry: 'other',
|
||||
industryOther: '',
|
||||
making: ['video']
|
||||
})
|
||||
expect(payload.industry).toBe('other')
|
||||
})
|
||||
|
||||
it('zeroes out fields hidden by showWhen', () => {
|
||||
const payload = buildSubmissionPayload(baseSurvey, {
|
||||
usage: 'personal',
|
||||
role: 'engineer',
|
||||
making: ['video']
|
||||
})
|
||||
expect(payload).toMatchObject({
|
||||
usage: 'personal',
|
||||
role: '',
|
||||
industry: '',
|
||||
making: ['video']
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('prepareSurvey', () => {
|
||||
it('preserves option contents but may reorder when randomize=true', () => {
|
||||
const survey: OnboardingSurvey = {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'making',
|
||||
type: 'multi',
|
||||
randomize: true,
|
||||
options: [
|
||||
{ value: 'a', label: 'A' },
|
||||
{ value: 'b', label: 'B' },
|
||||
{ value: 'other', label: 'Other' }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
const prepared = prepareSurvey(survey)
|
||||
const values = prepared.fields[0]!.options!.map((o) => o.value)
|
||||
expect(values).toContain('a')
|
||||
expect(values).toContain('b')
|
||||
expect(values[values.length - 1]).toBe('other')
|
||||
})
|
||||
|
||||
it('pins both "other" and "not_sure" at the end while randomizing the rest', () => {
|
||||
const survey: OnboardingSurvey = {
|
||||
version: 1,
|
||||
fields: [
|
||||
{
|
||||
id: 'intent',
|
||||
type: 'multi',
|
||||
randomize: true,
|
||||
options: [
|
||||
{ value: 'a', label: 'A' },
|
||||
{ value: 'b', label: 'B' },
|
||||
{ value: 'other', label: 'Other' },
|
||||
{ value: 'not_sure', label: 'Not sure' }
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
const prepared = prepareSurvey(survey)
|
||||
const values = prepared.fields[0]!.options!.map((o) => o.value)
|
||||
expect(values.slice(-2).sort()).toEqual(['not_sure', 'other'])
|
||||
expect(values.slice(0, -2).sort()).toEqual(['a', 'b'])
|
||||
})
|
||||
})
|
||||
@@ -1,137 +0,0 @@
|
||||
import { shuffle } from 'es-toolkit'
|
||||
import { z } from 'zod'
|
||||
|
||||
import type {
|
||||
OnboardingSurvey,
|
||||
OnboardingSurveyField,
|
||||
OnboardingSurveyFieldCondition
|
||||
} from '@/platform/remoteConfig/types'
|
||||
|
||||
export type SurveyValues = Record<string, string | string[] | undefined>
|
||||
|
||||
const hasNonEmptyValue = (current: string | string[] | undefined): boolean => {
|
||||
if (current === undefined || current === '') return false
|
||||
if (Array.isArray(current)) return current.length > 0
|
||||
return true
|
||||
}
|
||||
|
||||
const conditionMatches = (
|
||||
condition: OnboardingSurveyFieldCondition | undefined,
|
||||
values: SurveyValues
|
||||
): boolean => {
|
||||
if (!condition) return true
|
||||
const current = values[condition.field]
|
||||
if (!hasNonEmptyValue(current)) return false
|
||||
const expected = condition.equals
|
||||
if (expected === undefined) return true
|
||||
const expectedSet = Array.isArray(expected) ? expected : [expected]
|
||||
if (Array.isArray(current)) {
|
||||
return current.some((v) => expectedSet.includes(v))
|
||||
}
|
||||
return typeof current === 'string' && expectedSet.includes(current)
|
||||
}
|
||||
|
||||
export const visibleFields = (
|
||||
survey: OnboardingSurvey,
|
||||
values: SurveyValues
|
||||
): OnboardingSurveyField[] =>
|
||||
survey.fields.filter((field) => conditionMatches(field.showWhen, values))
|
||||
|
||||
const PIN_LAST_VALUES = new Set(['other', 'not_sure'])
|
||||
|
||||
const randomizeOptions = (field: OnboardingSurveyField) => {
|
||||
if (!field.randomize || !field.options) return field
|
||||
const pinned = field.options.filter((opt) => PIN_LAST_VALUES.has(opt.value))
|
||||
const rest = field.options.filter((opt) => !PIN_LAST_VALUES.has(opt.value))
|
||||
return {
|
||||
...field,
|
||||
options: [...shuffle(rest), ...pinned]
|
||||
}
|
||||
}
|
||||
|
||||
export const prepareSurvey = (survey: OnboardingSurvey): OnboardingSurvey => ({
|
||||
...survey,
|
||||
fields: survey.fields.map(randomizeOptions)
|
||||
})
|
||||
|
||||
type Translator = (key: string) => string
|
||||
|
||||
const identityTranslator: Translator = (key) => key
|
||||
|
||||
const fieldSchema = (field: OnboardingSurveyField, t: Translator) => {
|
||||
if (field.type === 'multi') {
|
||||
const arr = z.array(z.string())
|
||||
return field.required
|
||||
? arr.min(1, {
|
||||
message: t('cloudOnboarding.survey.errors.selectAtLeastOne')
|
||||
})
|
||||
: arr.optional()
|
||||
}
|
||||
if (field.required) {
|
||||
return z.string().min(1, {
|
||||
message: t('cloudOnboarding.survey.errors.chooseAnOption')
|
||||
})
|
||||
}
|
||||
return z.string().optional()
|
||||
}
|
||||
|
||||
export const buildZodSchema = (
|
||||
survey: OnboardingSurvey,
|
||||
values: SurveyValues,
|
||||
t: Translator = identityTranslator
|
||||
) => {
|
||||
const shape: Record<string, z.ZodTypeAny> = {}
|
||||
for (const field of survey.fields) {
|
||||
if (!conditionMatches(field.showWhen, values)) continue
|
||||
shape[field.id] = fieldSchema(field, t)
|
||||
if (
|
||||
field.allowOther &&
|
||||
field.otherFieldId &&
|
||||
values[field.id] === 'other'
|
||||
) {
|
||||
shape[field.otherFieldId] = z.string().min(1, {
|
||||
message: t('cloudOnboarding.survey.errors.describeAnswer')
|
||||
})
|
||||
} else if (field.otherFieldId) {
|
||||
shape[field.otherFieldId] = z.string().optional()
|
||||
}
|
||||
}
|
||||
return z.object(shape)
|
||||
}
|
||||
|
||||
export const buildInitialValues = (survey: OnboardingSurvey): SurveyValues => {
|
||||
const initial: SurveyValues = {}
|
||||
for (const field of survey.fields) {
|
||||
initial[field.id] = field.type === 'multi' ? [] : ''
|
||||
if (field.otherFieldId) initial[field.otherFieldId] = ''
|
||||
}
|
||||
return initial
|
||||
}
|
||||
|
||||
export const buildSubmissionPayload = (
|
||||
survey: OnboardingSurvey,
|
||||
values: SurveyValues
|
||||
): Record<string, unknown> => {
|
||||
const payload: Record<string, unknown> = {}
|
||||
for (const field of survey.fields) {
|
||||
const visible = conditionMatches(field.showWhen, values)
|
||||
if (!visible) {
|
||||
payload[field.id] = field.type === 'multi' ? [] : ''
|
||||
continue
|
||||
}
|
||||
const value = values[field.id]
|
||||
const otherRaw = field.otherFieldId ? values[field.otherFieldId] : undefined
|
||||
if (
|
||||
field.allowOther &&
|
||||
field.otherFieldId &&
|
||||
value === 'other' &&
|
||||
typeof otherRaw === 'string'
|
||||
) {
|
||||
const other = otherRaw.trim()
|
||||
payload[field.id] = other || 'other'
|
||||
} else {
|
||||
payload[field.id] = field.type === 'multi' ? (value ?? []) : (value ?? '')
|
||||
}
|
||||
}
|
||||
return payload
|
||||
}
|
||||
@@ -23,54 +23,6 @@ type FirebaseRuntimeConfig = {
|
||||
measurementId?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Server-driven onboarding survey schema.
|
||||
*
|
||||
* The backend ships the entire form definition so onboarding questions can
|
||||
* be tweaked without a frontend release. Field types map 1:1 to a component
|
||||
* in our internal UI library — see `DynamicSurveyField.vue`.
|
||||
*/
|
||||
export type OnboardingSurveyFieldType = 'single' | 'multi' | 'text'
|
||||
|
||||
/**
|
||||
* A translatable string. Either:
|
||||
* - a single literal (treated as the fallback in any locale), or
|
||||
* - a locale → text map, e.g. `{ en: 'Personal use', ko: '개인 용도' }`,
|
||||
* so the backend can ship translations without a frontend release.
|
||||
*/
|
||||
export type LocalizedString = string | Record<string, string>
|
||||
|
||||
export type OnboardingSurveyOption = {
|
||||
value: string
|
||||
label?: LocalizedString
|
||||
labelKey?: string
|
||||
}
|
||||
|
||||
export type OnboardingSurveyFieldCondition = {
|
||||
field: string
|
||||
equals?: string | string[]
|
||||
}
|
||||
|
||||
export type OnboardingSurveyField = {
|
||||
id: string
|
||||
type: OnboardingSurveyFieldType
|
||||
labelKey?: string
|
||||
label?: LocalizedString
|
||||
options?: OnboardingSurveyOption[]
|
||||
required?: boolean
|
||||
randomize?: boolean
|
||||
allowOther?: boolean
|
||||
otherFieldId?: string
|
||||
placeholder?: string
|
||||
showWhen?: OnboardingSurveyFieldCondition
|
||||
}
|
||||
|
||||
export type OnboardingSurvey = {
|
||||
version: number
|
||||
introKey?: string
|
||||
fields: OnboardingSurveyField[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Remote configuration type
|
||||
* Configuration fetched from the server at runtime
|
||||
@@ -93,7 +45,6 @@ export type RemoteConfig = {
|
||||
asset_rename_enabled?: boolean
|
||||
private_models_enabled?: boolean
|
||||
onboarding_survey_enabled?: boolean
|
||||
onboarding_survey?: OnboardingSurvey
|
||||
linear_toggle_enabled?: boolean
|
||||
team_workspaces_enabled?: boolean
|
||||
user_secrets_enabled?: boolean
|
||||
|
||||
@@ -40,11 +40,6 @@ export interface SurveyResponses {
|
||||
industry?: string
|
||||
useCase?: string
|
||||
making?: string[]
|
||||
role?: string
|
||||
teamSize?: string
|
||||
source?: string
|
||||
usage?: string
|
||||
intent?: string[]
|
||||
}
|
||||
|
||||
export interface SurveyResponsesNormalized extends SurveyResponses {
|
||||
|
||||
@@ -1,198 +0,0 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { render, screen } from '@testing-library/vue'
|
||||
import userEvent from '@testing-library/user-event'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { ref } from 'vue'
|
||||
import { createI18n } from 'vue-i18n'
|
||||
|
||||
import type { SubscriptionDialogReason } from '@/platform/cloud/subscription/composables/useSubscriptionDialog'
|
||||
|
||||
import SubscriptionRequiredDialogContentWorkspace from './SubscriptionRequiredDialogContentWorkspace.vue'
|
||||
|
||||
const mockHandleSubscribeClick = vi.fn()
|
||||
const mockHandleBackToPricing = vi.fn()
|
||||
const mockHandleAddCreditCard = vi.fn()
|
||||
const mockHandleConfirmTransition = vi.fn()
|
||||
const mockHandleResubscribe = vi.fn()
|
||||
const mockCheckoutStep = ref<'pricing' | 'preview'>('pricing')
|
||||
const mockPreviewData = ref<{ transition_type: string } | null>(null)
|
||||
|
||||
vi.mock('@/platform/workspace/composables/useSubscriptionCheckout', () => ({
|
||||
useSubscriptionCheckout: () => ({
|
||||
checkoutStep: mockCheckoutStep,
|
||||
isLoadingPreview: ref(false),
|
||||
loadingTier: ref(null),
|
||||
isSubscribing: ref(false),
|
||||
isResubscribing: ref(false),
|
||||
previewData: mockPreviewData,
|
||||
selectedTierKey: ref('standard'),
|
||||
selectedBillingCycle: ref('yearly'),
|
||||
isPolling: ref(false),
|
||||
handleSubscribeClick: mockHandleSubscribeClick,
|
||||
handleBackToPricing: mockHandleBackToPricing,
|
||||
handleAddCreditCard: mockHandleAddCreditCard,
|
||||
handleConfirmTransition: mockHandleConfirmTransition,
|
||||
handleResubscribe: mockHandleResubscribe
|
||||
})
|
||||
}))
|
||||
|
||||
const i18n = createI18n({
|
||||
legacy: false,
|
||||
locale: 'en',
|
||||
messages: {
|
||||
en: {
|
||||
g: { back: 'Back', close: 'Close' },
|
||||
subscription: {
|
||||
plansForWorkspace: 'Plans for {workspace}',
|
||||
teamWorkspace: 'Team'
|
||||
},
|
||||
credits: {
|
||||
topUp: {
|
||||
insufficientTitle: 'Insufficient Credits',
|
||||
insufficientMessage: 'You have run out of credits.'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
const PricingTableStub = {
|
||||
name: 'PricingTableWorkspace',
|
||||
template: `<div data-testid="pricing-table">
|
||||
<button data-testid="subscribe-btn" @click="$emit('subscribe', { tierKey: 'standard', billingCycle: 'yearly' })">Subscribe</button>
|
||||
<button data-testid="resubscribe-btn" @click="$emit('resubscribe')">Resubscribe</button>
|
||||
</div>`
|
||||
}
|
||||
|
||||
const AddPaymentPreviewStub = {
|
||||
name: 'SubscriptionAddPaymentPreviewWorkspace',
|
||||
template: `<div data-testid="add-payment-preview">
|
||||
<button data-testid="add-card-btn" @click="$emit('addCreditCard')">Add Card</button>
|
||||
</div>`
|
||||
}
|
||||
|
||||
const TransitionPreviewStub = {
|
||||
name: 'SubscriptionTransitionPreviewWorkspace',
|
||||
template: `<div data-testid="transition-preview">
|
||||
<button data-testid="confirm-btn" @click="$emit('confirm')">Confirm</button>
|
||||
</div>`
|
||||
}
|
||||
|
||||
function renderComponent(
|
||||
props: { onClose?: () => void; reason?: SubscriptionDialogReason } = {}
|
||||
) {
|
||||
return render(SubscriptionRequiredDialogContentWorkspace, {
|
||||
props: {
|
||||
onClose: props.onClose ?? vi.fn(),
|
||||
...(props.reason ? { reason: props.reason } : {})
|
||||
},
|
||||
global: {
|
||||
plugins: [
|
||||
createTestingPinia({ createSpy: vi.fn, stubActions: false }),
|
||||
i18n
|
||||
],
|
||||
stubs: {
|
||||
PricingTableWorkspace: PricingTableStub,
|
||||
SubscriptionAddPaymentPreviewWorkspace: AddPaymentPreviewStub,
|
||||
SubscriptionTransitionPreviewWorkspace: TransitionPreviewStub
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
describe('SubscriptionRequiredDialogContentWorkspace', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks()
|
||||
mockCheckoutStep.value = 'pricing'
|
||||
mockPreviewData.value = null
|
||||
})
|
||||
|
||||
it('shows pricing table on pricing step', () => {
|
||||
renderComponent()
|
||||
expect(screen.getByTestId('pricing-table')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('add-payment-preview')).not.toBeInTheDocument()
|
||||
expect(screen.queryByTestId('transition-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows close button and hides back button on pricing step', () => {
|
||||
renderComponent()
|
||||
expect(screen.getByLabelText('Close')).toBeInTheDocument()
|
||||
expect(screen.queryByLabelText('Back')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('calls onClose when close button is clicked', async () => {
|
||||
const user = userEvent.setup()
|
||||
const onClose = vi.fn()
|
||||
renderComponent({ onClose })
|
||||
|
||||
await user.click(screen.getByLabelText('Close'))
|
||||
|
||||
expect(onClose).toHaveBeenCalledOnce()
|
||||
})
|
||||
|
||||
it('shows back button on preview step', () => {
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'new_subscription' }
|
||||
renderComponent()
|
||||
expect(screen.getByLabelText('Back')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows insufficient credits message when reason is out_of_credits', () => {
|
||||
renderComponent({ reason: 'out_of_credits' })
|
||||
expect(screen.getByText('Insufficient Credits')).toBeInTheDocument()
|
||||
expect(screen.getByText('You have run out of credits.')).toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('does not show insufficient credits message without reason', () => {
|
||||
renderComponent()
|
||||
expect(screen.queryByText('Insufficient Credits')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows new subscription preview when transition_type is new_subscription', () => {
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'new_subscription' }
|
||||
renderComponent()
|
||||
expect(screen.getByTestId('add-payment-preview')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('transition-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('shows transition preview when transition_type is upgrade', () => {
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'upgrade' }
|
||||
renderComponent()
|
||||
expect(screen.getByTestId('transition-preview')).toBeInTheDocument()
|
||||
expect(screen.queryByTestId('add-payment-preview')).not.toBeInTheDocument()
|
||||
})
|
||||
|
||||
it('wires subscribe event to handleSubscribeClick', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderComponent()
|
||||
|
||||
await user.click(screen.getByTestId('subscribe-btn'))
|
||||
|
||||
expect(mockHandleSubscribeClick).toHaveBeenCalledWith({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
})
|
||||
|
||||
it('wires resubscribe event to handleResubscribe', async () => {
|
||||
const user = userEvent.setup()
|
||||
renderComponent()
|
||||
|
||||
await user.click(screen.getByTestId('resubscribe-btn'))
|
||||
|
||||
expect(mockHandleResubscribe).toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('wires back button to handleBackToPricing', async () => {
|
||||
const user = userEvent.setup()
|
||||
mockCheckoutStep.value = 'preview'
|
||||
mockPreviewData.value = { transition_type: 'new_subscription' }
|
||||
renderComponent()
|
||||
|
||||
await user.click(screen.getByLabelText('Back'))
|
||||
|
||||
expect(mockHandleBackToPricing).toHaveBeenCalled()
|
||||
})
|
||||
})
|
||||
@@ -18,7 +18,7 @@
|
||||
variant="muted-textonly"
|
||||
class="absolute top-2.5 right-2.5 shrink-0 rounded-full text-text-secondary hover:bg-white/10"
|
||||
:aria-label="$t('g.close')"
|
||||
@click="onClose"
|
||||
@click="handleClose"
|
||||
>
|
||||
<i class="pi pi-times text-xl" />
|
||||
</Button>
|
||||
@@ -94,14 +94,28 @@
|
||||
</template>
|
||||
|
||||
<script setup lang="ts">
|
||||
import { useToast } from 'primevue/usetoast'
|
||||
import { computed, ref } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import Button from '@/components/ui/button/Button.vue'
|
||||
import { useBillingContext } from '@/composables/billing/useBillingContext'
|
||||
import { getComfyPlatformBaseUrl } from '@/config/comfyApi'
|
||||
import type { TierKey } from '@/platform/cloud/subscription/constants/tierPricing'
|
||||
import { useTelemetry } from '@/platform/telemetry'
|
||||
import type { BillingCycle } from '@/platform/cloud/subscription/utils/subscriptionTierRank'
|
||||
import type { PreviewSubscribeResponse } from '@/platform/workspace/api/workspaceApi'
|
||||
import { workspaceApi } from '@/platform/workspace/api/workspaceApi'
|
||||
import { useBillingOperationStore } from '@/platform/workspace/stores/billingOperationStore'
|
||||
import type { SubscriptionDialogReason } from '@/platform/cloud/subscription/composables/useSubscriptionDialog'
|
||||
import { useSubscriptionCheckout } from '@/platform/workspace/composables/useSubscriptionCheckout'
|
||||
|
||||
import PricingTableWorkspace from './PricingTableWorkspace.vue'
|
||||
import SubscriptionAddPaymentPreviewWorkspace from './SubscriptionAddPaymentPreviewWorkspace.vue'
|
||||
import SubscriptionTransitionPreviewWorkspace from './SubscriptionTransitionPreviewWorkspace.vue'
|
||||
|
||||
type CheckoutStep = 'pricing' | 'preview'
|
||||
type CheckoutTierKey = Exclude<TierKey, 'free' | 'founder'>
|
||||
|
||||
const { onClose, reason } = defineProps<{
|
||||
onClose: () => void
|
||||
reason?: SubscriptionDialogReason
|
||||
@@ -111,22 +125,227 @@ const emit = defineEmits<{
|
||||
close: [subscribed: boolean]
|
||||
}>()
|
||||
|
||||
const {
|
||||
checkoutStep,
|
||||
isLoadingPreview,
|
||||
loadingTier,
|
||||
isSubscribing,
|
||||
isResubscribing,
|
||||
previewData,
|
||||
selectedTierKey,
|
||||
selectedBillingCycle,
|
||||
isPolling,
|
||||
handleSubscribeClick,
|
||||
handleBackToPricing,
|
||||
handleAddCreditCard,
|
||||
handleConfirmTransition,
|
||||
handleResubscribe
|
||||
} = useSubscriptionCheckout(emit)
|
||||
const { t } = useI18n()
|
||||
const toast = useToast()
|
||||
const { subscribe, previewSubscribe, plans, fetchStatus, fetchBalance } =
|
||||
useBillingContext()
|
||||
const telemetry = useTelemetry()
|
||||
const billingOperationStore = useBillingOperationStore()
|
||||
const isPolling = computed(() => billingOperationStore.hasPendingOperations)
|
||||
|
||||
const checkoutStep = ref<CheckoutStep>('pricing')
|
||||
const isLoadingPreview = ref(false)
|
||||
const loadingTier = ref<CheckoutTierKey | null>(null)
|
||||
const isSubscribing = ref(false)
|
||||
const isResubscribing = ref(false)
|
||||
const previewData = ref<PreviewSubscribeResponse | null>(null)
|
||||
const selectedTierKey = ref<CheckoutTierKey | null>(null)
|
||||
const selectedBillingCycle = ref<BillingCycle>('yearly')
|
||||
|
||||
function getApiPlanSlug(
|
||||
tierKey: CheckoutTierKey,
|
||||
billingCycle: BillingCycle
|
||||
): string | null {
|
||||
const apiDuration = billingCycle === 'yearly' ? 'ANNUAL' : 'MONTHLY'
|
||||
const apiTier = tierKey.toUpperCase()
|
||||
const plan = plans.value.find(
|
||||
(p) => p.tier === apiTier && p.duration === apiDuration
|
||||
)
|
||||
return plan?.slug ?? null
|
||||
}
|
||||
|
||||
async function handleSubscribeClick(payload: {
|
||||
tierKey: CheckoutTierKey
|
||||
billingCycle: BillingCycle
|
||||
}) {
|
||||
const { tierKey, billingCycle } = payload
|
||||
|
||||
isLoadingPreview.value = true
|
||||
loadingTier.value = tierKey
|
||||
selectedTierKey.value = tierKey
|
||||
selectedBillingCycle.value = billingCycle
|
||||
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(tierKey, billingCycle)
|
||||
if (!planSlug) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
const response = await previewSubscribe(planSlug)
|
||||
|
||||
if (!response || !response.allowed) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: response?.reason || 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
previewData.value = response
|
||||
checkoutStep.value = 'preview'
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to load subscription preview'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isLoadingPreview.value = false
|
||||
loadingTier.value = null
|
||||
}
|
||||
}
|
||||
|
||||
function handleBackToPricing() {
|
||||
checkoutStep.value = 'pricing'
|
||||
previewData.value = null
|
||||
}
|
||||
|
||||
async function handleAddCreditCard() {
|
||||
if (!selectedTierKey.value) return
|
||||
|
||||
isSubscribing.value = true
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(
|
||||
selectedTierKey.value,
|
||||
selectedBillingCycle.value
|
||||
)
|
||||
if (!planSlug) return
|
||||
const response = await subscribe(
|
||||
planSlug,
|
||||
`${getComfyPlatformBaseUrl()}/payment/success`,
|
||||
`${getComfyPlatformBaseUrl()}/payment/failed`
|
||||
)
|
||||
|
||||
if (!response) return
|
||||
|
||||
if (response.status === 'subscribed') {
|
||||
telemetry?.trackMonthlySubscriptionSucceeded()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.required.pollingSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} else if (
|
||||
response.status === 'needs_payment_method' &&
|
||||
response.payment_method_url
|
||||
) {
|
||||
window.open(response.payment_method_url, '_blank')
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
} else if (response.status === 'pending_payment') {
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to subscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isSubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConfirmTransition() {
|
||||
if (!selectedTierKey.value) return
|
||||
|
||||
isSubscribing.value = true
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(
|
||||
selectedTierKey.value,
|
||||
selectedBillingCycle.value
|
||||
)
|
||||
if (!planSlug) return
|
||||
const response = await subscribe(
|
||||
planSlug,
|
||||
`${getComfyPlatformBaseUrl()}/payment/success`,
|
||||
`${getComfyPlatformBaseUrl()}/payment/failed`
|
||||
)
|
||||
|
||||
if (!response) return
|
||||
|
||||
if (response.status === 'subscribed') {
|
||||
telemetry?.trackMonthlySubscriptionSucceeded()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.required.pollingSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} else if (
|
||||
response.status === 'needs_payment_method' &&
|
||||
response.payment_method_url
|
||||
) {
|
||||
window.open(response.payment_method_url, '_blank')
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
} else if (response.status === 'pending_payment') {
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to update subscription'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isSubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleResubscribe() {
|
||||
isResubscribing.value = true
|
||||
try {
|
||||
await workspaceApi.resubscribe()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.resubscribeSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to resubscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isResubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
function handleClose() {
|
||||
onClose()
|
||||
}
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
|
||||
@@ -1,369 +0,0 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { setActivePinia } from 'pinia'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { computed } from 'vue'
|
||||
|
||||
import type { Plan } from '@/platform/workspace/api/workspaceApi'
|
||||
|
||||
import { findPlanSlug } from './useSubscriptionCheckout'
|
||||
|
||||
function makeStandardYearly(): Plan {
|
||||
return {
|
||||
slug: 'standard-yearly',
|
||||
tier: 'STANDARD',
|
||||
duration: 'ANNUAL',
|
||||
price_cents: 1600,
|
||||
credits_cents: 4200,
|
||||
max_seats: 1,
|
||||
availability: { available: true },
|
||||
seat_summary: {
|
||||
seat_count: 1,
|
||||
total_cost_cents: 1600,
|
||||
total_credits_cents: 4200
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function makeCreatorMonthly(): Plan {
|
||||
return {
|
||||
slug: 'creator-monthly',
|
||||
tier: 'CREATOR',
|
||||
duration: 'MONTHLY',
|
||||
price_cents: 3500,
|
||||
credits_cents: 7400,
|
||||
max_seats: 5,
|
||||
availability: { available: true },
|
||||
seat_summary: {
|
||||
seat_count: 1,
|
||||
total_cost_cents: 3500,
|
||||
total_credits_cents: 7400
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function allPlans(): Plan[] {
|
||||
return [makeStandardYearly(), makeCreatorMonthly()]
|
||||
}
|
||||
|
||||
describe('findPlanSlug', () => {
|
||||
it('finds an annual plan by tier key and yearly billing cycle', () => {
|
||||
expect(findPlanSlug(allPlans(), 'standard', 'yearly')).toBe(
|
||||
'standard-yearly'
|
||||
)
|
||||
})
|
||||
|
||||
it('finds a monthly plan by tier key and monthly billing cycle', () => {
|
||||
expect(findPlanSlug(allPlans(), 'creator', 'monthly')).toBe(
|
||||
'creator-monthly'
|
||||
)
|
||||
})
|
||||
|
||||
it('returns null when no plan matches', () => {
|
||||
expect(findPlanSlug(allPlans(), 'standard', 'monthly')).toBeNull()
|
||||
})
|
||||
|
||||
it('returns null for empty plans', () => {
|
||||
expect(findPlanSlug([], 'standard', 'yearly')).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
const {
|
||||
mockSubscribe,
|
||||
mockPreviewSubscribe,
|
||||
mockFetchStatus,
|
||||
mockFetchBalance,
|
||||
mockPlans,
|
||||
mockResubscribe,
|
||||
mockToastAdd
|
||||
} = vi.hoisted(() => ({
|
||||
mockSubscribe: vi.fn(),
|
||||
mockPreviewSubscribe: vi.fn(),
|
||||
mockFetchStatus: vi.fn(),
|
||||
mockFetchBalance: vi.fn(),
|
||||
mockPlans: { value: [] as Plan[] },
|
||||
mockResubscribe: vi.fn(),
|
||||
mockToastAdd: vi.fn()
|
||||
}))
|
||||
|
||||
vi.mock('@/composables/billing/useBillingContext', () => ({
|
||||
useBillingContext: () => ({
|
||||
subscribe: mockSubscribe,
|
||||
previewSubscribe: mockPreviewSubscribe,
|
||||
plans: computed(() => mockPlans.value),
|
||||
fetchStatus: mockFetchStatus,
|
||||
fetchBalance: mockFetchBalance
|
||||
})
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/workspace/api/workspaceApi', () => ({
|
||||
workspaceApi: { resubscribe: mockResubscribe }
|
||||
}))
|
||||
|
||||
vi.mock('@/config/comfyApi', () => ({
|
||||
getComfyPlatformBaseUrl: () => 'https://platform.comfy.org'
|
||||
}))
|
||||
|
||||
vi.mock('primevue/usetoast', () => ({
|
||||
useToast: () => ({ add: mockToastAdd })
|
||||
}))
|
||||
|
||||
vi.mock('@/platform/telemetry', () => ({
|
||||
useTelemetry: () => ({ trackMonthlySubscriptionSucceeded: vi.fn() })
|
||||
}))
|
||||
|
||||
vi.mock('vue-i18n', async (importOriginal) => {
|
||||
const actual = await importOriginal()
|
||||
return {
|
||||
...(actual as Record<string, unknown>),
|
||||
useI18n: () => ({
|
||||
t: (key: string) => key
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
describe('useSubscriptionCheckout', () => {
|
||||
let emit: ReturnType<typeof vi.fn>
|
||||
|
||||
async function setup() {
|
||||
const { useSubscriptionCheckout } =
|
||||
await import('./useSubscriptionCheckout')
|
||||
return useSubscriptionCheckout(emit as never)
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
setActivePinia(createTestingPinia({ stubActions: false }))
|
||||
vi.clearAllMocks()
|
||||
mockPlans.value = allPlans()
|
||||
emit = vi.fn()
|
||||
})
|
||||
|
||||
describe('handleSubscribeClick', () => {
|
||||
it('transitions to preview on successful preview', async () => {
|
||||
const checkout = await setup()
|
||||
const preview = {
|
||||
allowed: true,
|
||||
transition_type: 'new_subscription' as const,
|
||||
effective_at: '2025-01-01',
|
||||
is_immediate: true,
|
||||
cost_today_cents: 1600,
|
||||
cost_next_period_cents: 1600,
|
||||
credits_today_cents: 4200,
|
||||
credits_next_period_cents: 4200,
|
||||
new_plan: makeStandardYearly().seat_summary
|
||||
}
|
||||
mockPreviewSubscribe.mockResolvedValueOnce(preview)
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(checkout.checkoutStep.value).toBe('preview')
|
||||
expect(checkout.previewData.value).toStrictEqual(preview)
|
||||
})
|
||||
|
||||
it('shows error toast when preview is disallowed', async () => {
|
||||
const checkout = await setup()
|
||||
mockPreviewSubscribe.mockResolvedValueOnce({
|
||||
allowed: false,
|
||||
reason: 'Not allowed'
|
||||
})
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(checkout.checkoutStep.value).toBe('pricing')
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Not allowed'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('shows error toast when plan slug is not found', async () => {
|
||||
const checkout = await setup()
|
||||
mockPlans.value = []
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'This plan is not available'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('shows error toast on network failure', async () => {
|
||||
const checkout = await setup()
|
||||
mockPreviewSubscribe.mockRejectedValueOnce(new Error('Network error'))
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'standard',
|
||||
billingCycle: 'yearly'
|
||||
})
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Network error'
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it('resolves monthly billing cycle to correct plan slug', async () => {
|
||||
const checkout = await setup()
|
||||
mockPreviewSubscribe.mockResolvedValueOnce({
|
||||
allowed: true,
|
||||
transition_type: 'new_subscription'
|
||||
})
|
||||
|
||||
await checkout.handleSubscribeClick({
|
||||
tierKey: 'creator',
|
||||
billingCycle: 'monthly'
|
||||
})
|
||||
|
||||
expect(mockPreviewSubscribe).toHaveBeenCalledWith('creator-monthly')
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleBackToPricing', () => {
|
||||
it('resets to pricing step and clears preview data', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.checkoutStep.value = 'preview'
|
||||
checkout.previewData.value = {} as never
|
||||
|
||||
checkout.handleBackToPricing()
|
||||
|
||||
expect(checkout.checkoutStep.value).toBe('pricing')
|
||||
expect(checkout.previewData.value).toBeNull()
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleAddCreditCard', () => {
|
||||
it('emits close on subscribed status', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockResolvedValueOnce({
|
||||
status: 'subscribed',
|
||||
billing_op_id: 'op-1'
|
||||
})
|
||||
mockFetchStatus.mockResolvedValueOnce(undefined)
|
||||
mockFetchBalance.mockResolvedValueOnce(undefined)
|
||||
|
||||
await checkout.handleAddCreditCard()
|
||||
|
||||
expect(mockSubscribe).toHaveBeenCalledWith(
|
||||
'standard-yearly',
|
||||
'https://platform.comfy.org/payment/success',
|
||||
'https://platform.comfy.org/payment/failed'
|
||||
)
|
||||
expect(emit).toHaveBeenCalledWith('close', true)
|
||||
})
|
||||
|
||||
it('opens payment URL when needs_payment_method', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockResolvedValueOnce({
|
||||
status: 'needs_payment_method',
|
||||
billing_op_id: 'op-2',
|
||||
payment_method_url: 'https://stripe.com/pay'
|
||||
})
|
||||
|
||||
const openSpy = vi.spyOn(window, 'open').mockImplementation(() => null)
|
||||
await checkout.handleAddCreditCard()
|
||||
|
||||
expect(openSpy).toHaveBeenCalledWith('https://stripe.com/pay', '_blank')
|
||||
openSpy.mockRestore()
|
||||
})
|
||||
|
||||
it('shows error toast on subscribe failure', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockRejectedValueOnce(new Error('Payment failed'))
|
||||
|
||||
await checkout.handleAddCreditCard()
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Payment failed'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleConfirmTransition', () => {
|
||||
it('emits close on subscribed status', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockResolvedValueOnce({
|
||||
status: 'subscribed',
|
||||
billing_op_id: 'op-3'
|
||||
})
|
||||
mockFetchStatus.mockResolvedValueOnce(undefined)
|
||||
mockFetchBalance.mockResolvedValueOnce(undefined)
|
||||
|
||||
await checkout.handleConfirmTransition()
|
||||
|
||||
expect(emit).toHaveBeenCalledWith('close', true)
|
||||
})
|
||||
|
||||
it('shows error toast on failure', async () => {
|
||||
const checkout = await setup()
|
||||
checkout.selectedTierKey.value = 'standard'
|
||||
checkout.selectedBillingCycle.value = 'yearly'
|
||||
mockSubscribe.mockRejectedValueOnce(new Error('Transition error'))
|
||||
|
||||
await checkout.handleConfirmTransition()
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Transition error'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe('handleResubscribe', () => {
|
||||
it('emits close on success', async () => {
|
||||
const checkout = await setup()
|
||||
mockResubscribe.mockResolvedValueOnce({
|
||||
billing_op_id: 'op-4',
|
||||
status: 'active'
|
||||
})
|
||||
mockFetchStatus.mockResolvedValueOnce(undefined)
|
||||
mockFetchBalance.mockResolvedValueOnce(undefined)
|
||||
|
||||
await checkout.handleResubscribe()
|
||||
|
||||
expect(mockResubscribe).toHaveBeenCalled()
|
||||
expect(emit).toHaveBeenCalledWith('close', true)
|
||||
})
|
||||
|
||||
it('shows error toast on failure', async () => {
|
||||
const checkout = await setup()
|
||||
mockResubscribe.mockRejectedValueOnce(new Error('Resubscribe failed'))
|
||||
|
||||
await checkout.handleResubscribe()
|
||||
|
||||
expect(mockToastAdd).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
severity: 'error',
|
||||
detail: 'Resubscribe failed'
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,210 +0,0 @@
|
||||
import { useToast } from 'primevue/usetoast'
|
||||
import { computed, ref } from 'vue'
|
||||
import { useI18n } from 'vue-i18n'
|
||||
|
||||
import { useBillingContext } from '@/composables/billing/useBillingContext'
|
||||
import { getComfyPlatformBaseUrl } from '@/config/comfyApi'
|
||||
import type { TierKey } from '@/platform/cloud/subscription/constants/tierPricing'
|
||||
import type { BillingCycle } from '@/platform/cloud/subscription/utils/subscriptionTierRank'
|
||||
import { useTelemetry } from '@/platform/telemetry'
|
||||
import type {
|
||||
Plan,
|
||||
PreviewSubscribeResponse
|
||||
} from '@/platform/workspace/api/workspaceApi'
|
||||
import { workspaceApi } from '@/platform/workspace/api/workspaceApi'
|
||||
import { useBillingOperationStore } from '@/platform/workspace/stores/billingOperationStore'
|
||||
|
||||
type CheckoutStep = 'pricing' | 'preview'
|
||||
type CheckoutTierKey = Exclude<TierKey, 'free' | 'founder'>
|
||||
|
||||
export function findPlanSlug(
|
||||
plans: Plan[],
|
||||
tierKey: CheckoutTierKey,
|
||||
billingCycle: BillingCycle
|
||||
): string | null {
|
||||
const apiDuration = billingCycle === 'yearly' ? 'ANNUAL' : 'MONTHLY'
|
||||
const apiTier = tierKey.toUpperCase()
|
||||
const plan = plans.find(
|
||||
(p) => p.tier === apiTier && p.duration === apiDuration
|
||||
)
|
||||
return plan?.slug ?? null
|
||||
}
|
||||
|
||||
export function useSubscriptionCheckout(emit: {
|
||||
(e: 'close', subscribed: boolean): void
|
||||
}) {
|
||||
const { t } = useI18n()
|
||||
const toast = useToast()
|
||||
const { subscribe, previewSubscribe, plans, fetchStatus, fetchBalance } =
|
||||
useBillingContext()
|
||||
const telemetry = useTelemetry()
|
||||
const billingOperationStore = useBillingOperationStore()
|
||||
|
||||
const checkoutStep = ref<CheckoutStep>('pricing')
|
||||
const isLoadingPreview = ref(false)
|
||||
const loadingTier = ref<CheckoutTierKey | null>(null)
|
||||
const isSubscribing = ref(false)
|
||||
const isResubscribing = ref(false)
|
||||
const previewData = ref<PreviewSubscribeResponse | null>(null)
|
||||
const selectedTierKey = ref<CheckoutTierKey | null>(null)
|
||||
const selectedBillingCycle = ref<BillingCycle>('yearly')
|
||||
const isPolling = computed(() => billingOperationStore.hasPendingOperations)
|
||||
|
||||
function getApiPlanSlug(
|
||||
tierKey: CheckoutTierKey,
|
||||
billingCycle: BillingCycle
|
||||
): string | null {
|
||||
return findPlanSlug(plans.value, tierKey, billingCycle)
|
||||
}
|
||||
|
||||
async function handleSubscribeClick(payload: {
|
||||
tierKey: CheckoutTierKey
|
||||
billingCycle: BillingCycle
|
||||
}) {
|
||||
const { tierKey, billingCycle } = payload
|
||||
|
||||
isLoadingPreview.value = true
|
||||
loadingTier.value = tierKey
|
||||
selectedTierKey.value = tierKey
|
||||
selectedBillingCycle.value = billingCycle
|
||||
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(tierKey, billingCycle)
|
||||
if (!planSlug) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
const response = await previewSubscribe(planSlug)
|
||||
|
||||
if (!response || !response.allowed) {
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Unable to subscribe',
|
||||
detail: response?.reason || 'This plan is not available'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
previewData.value = response
|
||||
checkoutStep.value = 'preview'
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: 'Failed to load subscription preview'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isLoadingPreview.value = false
|
||||
loadingTier.value = null
|
||||
}
|
||||
}
|
||||
|
||||
function handleBackToPricing() {
|
||||
checkoutStep.value = 'pricing'
|
||||
previewData.value = null
|
||||
}
|
||||
|
||||
async function handleSubscription() {
|
||||
if (!selectedTierKey.value) return
|
||||
|
||||
isSubscribing.value = true
|
||||
try {
|
||||
const planSlug = getApiPlanSlug(
|
||||
selectedTierKey.value,
|
||||
selectedBillingCycle.value
|
||||
)
|
||||
if (!planSlug) return
|
||||
const response = await subscribe(
|
||||
planSlug,
|
||||
`${getComfyPlatformBaseUrl()}/payment/success`,
|
||||
`${getComfyPlatformBaseUrl()}/payment/failed`
|
||||
)
|
||||
|
||||
if (!response) return
|
||||
|
||||
if (response.status === 'subscribed') {
|
||||
telemetry?.trackMonthlySubscriptionSucceeded()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.required.pollingSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} else if (
|
||||
response.status === 'needs_payment_method' &&
|
||||
response.payment_method_url
|
||||
) {
|
||||
window.open(response.payment_method_url, '_blank')
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
} else if (response.status === 'pending_payment') {
|
||||
billingOperationStore.startOperation(
|
||||
response.billing_op_id,
|
||||
'subscription'
|
||||
)
|
||||
}
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to subscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isSubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
async function handleResubscribe() {
|
||||
isResubscribing.value = true
|
||||
try {
|
||||
await workspaceApi.resubscribe()
|
||||
toast.add({
|
||||
severity: 'success',
|
||||
summary: t('subscription.resubscribeSuccess'),
|
||||
life: 5000
|
||||
})
|
||||
await Promise.all([fetchStatus(), fetchBalance()])
|
||||
emit('close', true)
|
||||
} catch (error) {
|
||||
const message =
|
||||
error instanceof Error ? error.message : 'Failed to resubscribe'
|
||||
toast.add({
|
||||
severity: 'error',
|
||||
summary: 'Error',
|
||||
detail: message
|
||||
})
|
||||
} finally {
|
||||
isResubscribing.value = false
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
checkoutStep,
|
||||
isLoadingPreview,
|
||||
loadingTier,
|
||||
isSubscribing,
|
||||
isResubscribing,
|
||||
previewData,
|
||||
selectedTierKey,
|
||||
selectedBillingCycle,
|
||||
isPolling,
|
||||
handleSubscribeClick,
|
||||
handleBackToPricing,
|
||||
handleAddCreditCard: handleSubscription,
|
||||
handleConfirmTransition: handleSubscription,
|
||||
handleResubscribe
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,10 @@
|
||||
import { createTestingPinia } from '@pinia/testing'
|
||||
import { setActivePinia } from 'pinia'
|
||||
import { nextTick } from 'vue'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import type { LGraphCanvas, Positionable } from '@/lib/litegraph/src/litegraph'
|
||||
import { LGraph, LGraphNode } from '@/lib/litegraph/src/litegraph'
|
||||
import { useCanvasStore } from '@/renderer/core/canvas/canvasStore'
|
||||
|
||||
vi.mock('@/composables/useAppMode', () => ({
|
||||
@@ -84,4 +87,39 @@ describe('useCanvasStore', () => {
|
||||
expect(originalHandler).toHaveBeenCalledWith(2.0, app.canvas.ds.offset)
|
||||
})
|
||||
})
|
||||
|
||||
describe('node:before-removed selection cleanup', () => {
|
||||
it('removes the node from store.selectedItems before its onRemoved fires', async () => {
|
||||
const graph = new LGraph()
|
||||
const node = new LGraphNode('test')
|
||||
graph.add(node)
|
||||
|
||||
const selectedItems = new Set<Positionable>([node])
|
||||
const fakeCanvas = {
|
||||
canvas: document.createElement('canvas'),
|
||||
graph,
|
||||
selectedItems,
|
||||
deselect: vi.fn((item: Positionable) => {
|
||||
selectedItems.delete(item)
|
||||
})
|
||||
}
|
||||
store.canvas = fakeCanvas as unknown as LGraphCanvas
|
||||
await nextTick()
|
||||
store.updateSelectedItems()
|
||||
expect(store.selectedItems).toContain(node)
|
||||
|
||||
let stillSelectedInOnRemoved: boolean | undefined
|
||||
node.onRemoved = () => {
|
||||
stillSelectedInOnRemoved = store.selectedItems.includes(node)
|
||||
}
|
||||
|
||||
graph.remove(node)
|
||||
|
||||
expect(
|
||||
stillSelectedInOnRemoved,
|
||||
'selectedItems must not contain the node when onRemoved fires'
|
||||
).toBe(false)
|
||||
expect(store.selectedItems).toEqual([])
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -131,6 +131,15 @@ export const useCanvasStore = defineStore('canvas', () => {
|
||||
whenever(
|
||||
() => canvas.value,
|
||||
(newCanvas) => {
|
||||
useEventListener(
|
||||
() => (currentGraph.value ?? newCanvas.graph)?.events,
|
||||
'node:before-removed',
|
||||
(e: CustomEvent<{ node: LGraphNode }>) => {
|
||||
newCanvas.deselect(e.detail.node)
|
||||
updateSelectedItems()
|
||||
}
|
||||
)
|
||||
|
||||
useEventListener(
|
||||
newCanvas.canvas,
|
||||
'litegraph:set-graph',
|
||||
|
||||
@@ -115,15 +115,7 @@ export const defaultGraph: ComfyWorkflowJSON = {
|
||||
{ name: 'CLIP', type: 'CLIP', links: [3, 5], slot_index: 1 },
|
||||
{ name: 'VAE', type: 'VAE', links: [8], slot_index: 2 }
|
||||
],
|
||||
properties: {
|
||||
models: [
|
||||
{
|
||||
name: 'v1-5-pruned-emaonly-fp16.safetensors',
|
||||
url: 'https://huggingface.co/Comfy-Org/stable-diffusion-v1-5-archive/resolve/main/v1-5-pruned-emaonly-fp16.safetensors',
|
||||
directory: 'checkpoints'
|
||||
}
|
||||
]
|
||||
},
|
||||
properties: {},
|
||||
widgets_values: ['v1-5-pruned-emaonly-fp16.safetensors']
|
||||
}
|
||||
],
|
||||
|
||||
@@ -1,48 +0,0 @@
|
||||
import { vi } from 'vitest'
|
||||
|
||||
export const EXPECTED_WORKFLOW = {
|
||||
nodes: [{ id: 1, type: 'KSampler', pos: [100, 100], size: [200, 200] }]
|
||||
}
|
||||
|
||||
export const EXPECTED_PROMPT = {
|
||||
'1': { class_type: 'KSampler', inputs: {} }
|
||||
}
|
||||
|
||||
type ReadMethod = 'readAsText' | 'readAsArrayBuffer'
|
||||
|
||||
export function mockFileReaderError(method: ReadMethod): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
queueMicrotask(() =>
|
||||
this.onerror?.(new ProgressEvent('error') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function mockFileReaderAbort(method: ReadMethod): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
queueMicrotask(() =>
|
||||
this.onabort?.(new ProgressEvent('abort') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
export function mockFileReaderResult(
|
||||
method: ReadMethod,
|
||||
result: string | ArrayBuffer | null
|
||||
): void {
|
||||
vi.spyOn(FileReader.prototype, method).mockImplementation(
|
||||
function (this: FileReader) {
|
||||
Object.defineProperty(this, 'result', {
|
||||
value: result,
|
||||
configurable: true
|
||||
})
|
||||
queueMicrotask(() =>
|
||||
this.onload?.(new ProgressEvent('load') as ProgressEvent<FileReader>)
|
||||
)
|
||||
}
|
||||
)
|
||||
}
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 552 B |
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
Before Width: | Height: | Size: 266 B |
Binary file not shown.
|
Before Width: | Height: | Size: 272 B |
@@ -1,76 +1,7 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
import { beforeEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromAvifFile } from './avif'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.avif')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('AVIF metadata', () => {
|
||||
it('extracts workflow and prompt from EXIF data in ISOBMFF boxes', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.avif', { type: 'image/avif' })
|
||||
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(JSON.parse(result.workflow)).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(JSON.parse(result.prompt)).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-AVIF data', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'fake.avif')
|
||||
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid AVIF file')
|
||||
})
|
||||
|
||||
it('returns empty when AVIF has valid ftyp but corrupt internal boxes', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
|
||||
const buf = new Uint8Array(40)
|
||||
const dv = new DataView(buf.buffer)
|
||||
dv.setUint32(0, 16)
|
||||
buf.set(new TextEncoder().encode('ftypavif'), 4)
|
||||
dv.setUint32(16, 24)
|
||||
buf.set(new TextEncoder().encode('meta'), 20)
|
||||
|
||||
const file = new File([buf], 'corrupt.avif', { type: 'image/avif' })
|
||||
const result = await getFromAvifFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Error parsing AVIF metadata'),
|
||||
expect.anything()
|
||||
)
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.avif')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromAvifFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromAvifFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
const setU32BE = (dv: DataView, off: number, val: number) =>
|
||||
dv.setUint32(off, val, false)
|
||||
const setU16BE = (dv: DataView, off: number, val: number) =>
|
||||
|
||||
@@ -407,7 +407,6 @@ export function getFromAvifFile(file: File): Promise<Record<string, string>> {
|
||||
console.error('FileReader: Error reading AVIF file:', err)
|
||||
resolve({})
|
||||
}
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,49 +0,0 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromWebmFile } from './ebml'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.webm')
|
||||
|
||||
describe('WebM/EBML metadata', () => {
|
||||
it('extracts workflow and prompt from EBML SimpleTag elements', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.webm', { type: 'video/webm' })
|
||||
|
||||
const result = await getFromWebmFile(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-WebM data', async () => {
|
||||
const file = new File([new Uint8Array(16)], 'fake.webm')
|
||||
|
||||
const result = await getFromWebmFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.webm')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromWebmFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromWebmFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -353,7 +353,6 @@ export function getFromWebmFile(file: File): Promise<ComfyMetadata> {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => handleFileLoad(event, resolve)
|
||||
reader.onerror = () => resolve({})
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, MAX_READ_BYTES))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromFlacBuffer, getFromFlacFile } from './flac'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.flac')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('FLAC metadata', () => {
|
||||
it('extracts workflow and prompt from Vorbis comments', () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const buffer = bytes.buffer.slice(
|
||||
bytes.byteOffset,
|
||||
bytes.byteOffset + bytes.byteLength
|
||||
)
|
||||
|
||||
const result = getFromFlacBuffer(buffer)
|
||||
|
||||
expect(result.workflow).toBe(JSON.stringify(EXPECTED_WORKFLOW))
|
||||
expect(result.prompt).toBe(JSON.stringify(EXPECTED_PROMPT))
|
||||
})
|
||||
|
||||
it('returns undefined for non-FLAC data', () => {
|
||||
const buf = new ArrayBuffer(16)
|
||||
const result = getFromFlacBuffer(buf)
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.flac')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getFromFlacFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getFromFlacFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -42,8 +42,6 @@ export function getFromFlacFile(file: File): Promise<Record<string, string>> {
|
||||
const arrayBuffer = event.target.result as ArrayBuffer
|
||||
r(getFromFlacBuffer(arrayBuffer))
|
||||
}
|
||||
reader.onerror = () => r({})
|
||||
reader.onabort = () => r({})
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,11 +1,7 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { ASCII, GltfSizeBytes } from '@/types/metadataTypes'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getGltfBinaryMetadata } from './gltf'
|
||||
|
||||
describe('GLTF binary metadata parser', () => {
|
||||
@@ -164,20 +160,4 @@ describe('GLTF binary metadata parser', () => {
|
||||
const metadata = await getGltfBinaryMetadata(invalidEmptyFile)
|
||||
expect(metadata).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.glb')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getGltfBinaryMetadata(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getGltfBinaryMetadata(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -165,7 +165,6 @@ export function getGltfBinaryMetadata(file: File): Promise<ComfyMetadata> {
|
||||
}
|
||||
}
|
||||
reader.onerror = () => resolve({})
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, bytesToRead))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,52 +0,0 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromIsobmffFile } from './isobmff'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.mp4')
|
||||
|
||||
describe('ISOBMFF (MP4) metadata', () => {
|
||||
it('extracts workflow and prompt from QuickTime keys/ilst boxes', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.mp4', { type: 'video/mp4' })
|
||||
|
||||
const result = await getFromIsobmffFile(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns empty for non-ISOBMFF data', async () => {
|
||||
const file = new File([new Uint8Array(16)], 'fake.mp4', {
|
||||
type: 'video/mp4'
|
||||
})
|
||||
|
||||
const result = await getFromIsobmffFile(file)
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const file = new File([new Uint8Array(16)], 'test.mp4')
|
||||
|
||||
it('resolves empty when the FileReader fires error', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
expect(await getFromIsobmffFile(file)).toEqual({})
|
||||
})
|
||||
|
||||
it('resolves empty when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
expect(await getFromIsobmffFile(file)).toEqual({})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -274,7 +274,6 @@ export function getFromIsobmffFile(file: File): Promise<ComfyMetadata> {
|
||||
console.error('FileReader: Error reading ISOBMFF file:', err)
|
||||
resolve({})
|
||||
}
|
||||
reader.onabort = () => resolve({})
|
||||
reader.readAsArrayBuffer(file.slice(0, MAX_READ_BYTES))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,91 +0,0 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError,
|
||||
mockFileReaderResult
|
||||
} from './__fixtures__/helpers'
|
||||
import { getDataFromJSON } from './json'
|
||||
|
||||
function jsonFile(content: object): File {
|
||||
return new File([JSON.stringify(content)], 'test.json', {
|
||||
type: 'application/json'
|
||||
})
|
||||
}
|
||||
|
||||
describe('getDataFromJSON', () => {
|
||||
it('detects API-format workflows by class_type on every value', async () => {
|
||||
const apiData = {
|
||||
'1': { class_type: 'KSampler', inputs: {} },
|
||||
'2': { class_type: 'EmptyLatentImage', inputs: {} }
|
||||
}
|
||||
|
||||
const result = await getDataFromJSON(jsonFile(apiData))
|
||||
|
||||
expect(result).toEqual({ prompt: apiData })
|
||||
})
|
||||
|
||||
it('treats objects without universal class_type as a workflow', async () => {
|
||||
const workflow = { nodes: [], links: [], version: 1 }
|
||||
|
||||
const result = await getDataFromJSON(jsonFile(workflow))
|
||||
|
||||
expect(result).toEqual({ workflow })
|
||||
})
|
||||
|
||||
it('extracts templates when the root object has a templates key', async () => {
|
||||
const templates = [{ name: 'basic' }]
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ templates }))
|
||||
|
||||
expect(result).toEqual({ templates })
|
||||
})
|
||||
|
||||
it('returns undefined for non-JSON content', async () => {
|
||||
const file = new File(['not valid json'], 'bad.json', {
|
||||
type: 'application/json'
|
||||
})
|
||||
|
||||
const result = await getDataFromJSON(file)
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks()
|
||||
})
|
||||
|
||||
it('resolves undefined when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsText')
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsText')
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when reader.result is not a string', async () => {
|
||||
mockFileReaderResult('readAsText', new ArrayBuffer(8))
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
|
||||
it('resolves undefined when reader.result is null', async () => {
|
||||
mockFileReaderResult('readAsText', null)
|
||||
|
||||
const result = await getDataFromJSON(jsonFile({ nodes: [] }))
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -6,28 +6,21 @@ export function getDataFromJSON(
|
||||
return new Promise<Record<string, object> | undefined>((resolve) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = async () => {
|
||||
try {
|
||||
if (typeof reader.result !== 'string') {
|
||||
resolve(undefined)
|
||||
return
|
||||
}
|
||||
const jsonContent = JSON.parse(reader.result)
|
||||
if (jsonContent?.templates) {
|
||||
resolve({ templates: jsonContent.templates })
|
||||
return
|
||||
}
|
||||
if (isApiJson(jsonContent)) {
|
||||
resolve({ prompt: jsonContent })
|
||||
return
|
||||
}
|
||||
resolve({ workflow: jsonContent })
|
||||
} catch {
|
||||
resolve(undefined)
|
||||
const readerResult = reader.result as string
|
||||
const jsonContent = JSON.parse(readerResult)
|
||||
if (jsonContent?.templates) {
|
||||
resolve({ templates: jsonContent.templates })
|
||||
return
|
||||
}
|
||||
if (isApiJson(jsonContent)) {
|
||||
resolve({ prompt: jsonContent })
|
||||
return
|
||||
}
|
||||
resolve({ workflow: jsonContent })
|
||||
return
|
||||
}
|
||||
reader.onerror = () => resolve(undefined)
|
||||
reader.onabort = () => resolve(undefined)
|
||||
reader.readAsText(file)
|
||||
return
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,106 +0,0 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getMp3Metadata } from './mp3'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.mp3')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('MP3 metadata', () => {
|
||||
it('extracts workflow and prompt from ID3 tags', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns undefined fields when file has no embedded metadata', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'empty.mp3', {
|
||||
type: 'audio/mpeg'
|
||||
})
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
expect(console.error).toHaveBeenCalledWith('Invalid file signature.')
|
||||
})
|
||||
|
||||
it('does not log an invalid signature for a valid MP3 sync header', async () => {
|
||||
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const buf = new Uint8Array(16)
|
||||
buf[0] = 0xff
|
||||
buf[1] = 0xfb
|
||||
const file = new File([buf], 'valid.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
await getMp3Metadata(file)
|
||||
|
||||
expect(errorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('does not log an invalid signature for a valid ID3v2 header', async () => {
|
||||
const errorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const buf = new Uint8Array(16)
|
||||
buf[0] = 0x49
|
||||
buf[1] = 0x44
|
||||
buf[2] = 0x33
|
||||
const file = new File([buf], 'valid-id3.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
await getMp3Metadata(file)
|
||||
|
||||
expect(errorSpy).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it('extracts metadata that spans the 4096-byte page boundary', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const metadata =
|
||||
`prompt\0${JSON.stringify(EXPECTED_PROMPT)}\0` +
|
||||
`workflow\0${JSON.stringify(EXPECTED_WORKFLOW)}\0`
|
||||
const metadataStart = 4090
|
||||
const size = metadataStart + metadata.length + 4
|
||||
const buf = new Uint8Array(size)
|
||||
for (let i = 0; i < metadata.length; i++) {
|
||||
buf[metadataStart + i] = metadata.charCodeAt(i)
|
||||
}
|
||||
buf[size - 2] = 0xff
|
||||
buf[size - 1] = 0xfb
|
||||
const file = new File([buf], 'large.mp3', { type: 'audio/mpeg' })
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.mp3')
|
||||
|
||||
it('resolves undefined fields when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
|
||||
it('resolves undefined fields when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getMp3Metadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,28 +1,21 @@
|
||||
export async function getMp3Metadata(file: File) {
|
||||
const reader = new FileReader()
|
||||
const read_process = new Promise<ArrayBuffer | null>((r) => {
|
||||
reader.onload = (event) => r((event?.target?.result as ArrayBuffer) ?? null)
|
||||
reader.onerror = () => r(null)
|
||||
reader.onabort = () => r(null)
|
||||
})
|
||||
const read_process = new Promise(
|
||||
(r) => (reader.onload = (event) => r(event?.target?.result))
|
||||
)
|
||||
reader.readAsArrayBuffer(file)
|
||||
const arrayBuffer = await read_process
|
||||
if (!arrayBuffer) return { prompt: undefined, workflow: undefined }
|
||||
const arrayBuffer = (await read_process) as ArrayBuffer
|
||||
//https://stackoverflow.com/questions/7302439/how-can-i-determine-that-a-particular-file-is-in-fact-an-mp3-file#7302482
|
||||
const sig_bytes = new Uint8Array(arrayBuffer, 0, 3)
|
||||
if (
|
||||
(sig_bytes[0] != 0xff || sig_bytes[1] != 0xfb) &&
|
||||
(sig_bytes[0] != 0x49 || sig_bytes[1] != 0x44 || sig_bytes[2] != 0x33)
|
||||
(sig_bytes[0] != 0xff && sig_bytes[1] != 0xfb) ||
|
||||
(sig_bytes[0] != 0x49 && sig_bytes[1] != 0x44 && sig_bytes[2] != 0x33)
|
||||
)
|
||||
console.error('Invalid file signature.')
|
||||
let header = ''
|
||||
while (header.length < arrayBuffer.byteLength) {
|
||||
const page = String.fromCharCode(
|
||||
...new Uint8Array(
|
||||
arrayBuffer,
|
||||
header.length,
|
||||
Math.min(4096, arrayBuffer.byteLength - header.length)
|
||||
)
|
||||
...new Uint8Array(arrayBuffer, header.length, header.length + 4096)
|
||||
)
|
||||
header += page
|
||||
if (page.match('\u00ff\u00fb')) break
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import {
|
||||
EXPECTED_PROMPT,
|
||||
EXPECTED_WORKFLOW,
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getOggMetadata } from './ogg'
|
||||
|
||||
const fixturePath = path.resolve(__dirname, '__fixtures__/with_metadata.opus')
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
describe('OGG/Opus metadata', () => {
|
||||
it('extracts workflow and prompt from an Opus file', async () => {
|
||||
const bytes = fs.readFileSync(fixturePath)
|
||||
const file = new File([bytes], 'test.opus', { type: 'audio/ogg' })
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toEqual(EXPECTED_WORKFLOW)
|
||||
expect(result.prompt).toEqual(EXPECTED_PROMPT)
|
||||
})
|
||||
|
||||
it('returns undefined fields for non-OGG data', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(16)], 'fake.ogg', {
|
||||
type: 'audio/ogg'
|
||||
})
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
expect(console.error).toHaveBeenCalledWith('Invalid file signature.')
|
||||
})
|
||||
|
||||
it('handles files larger than 4096 bytes without RangeError', async () => {
|
||||
const size = 5000
|
||||
const buf = new Uint8Array(size)
|
||||
const oggs = new TextEncoder().encode('OggS\0')
|
||||
buf.set(oggs, 0)
|
||||
buf.set(oggs, 4500)
|
||||
const file = new File([buf], 'large.ogg', { type: 'audio/ogg' })
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result.workflow).toBeUndefined()
|
||||
expect(result.prompt).toBeUndefined()
|
||||
})
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.ogg')
|
||||
|
||||
it('resolves undefined fields when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
|
||||
it('resolves undefined fields when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
|
||||
const result = await getOggMetadata(file)
|
||||
|
||||
expect(result).toEqual({ prompt: undefined, workflow: undefined })
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,24 +1,17 @@
|
||||
export async function getOggMetadata(file: File) {
|
||||
const reader = new FileReader()
|
||||
const read_process = new Promise<ArrayBuffer | null>((r) => {
|
||||
reader.onload = (event) => r((event?.target?.result as ArrayBuffer) ?? null)
|
||||
reader.onerror = () => r(null)
|
||||
reader.onabort = () => r(null)
|
||||
})
|
||||
const read_process = new Promise(
|
||||
(r) => (reader.onload = (event) => r(event?.target?.result))
|
||||
)
|
||||
reader.readAsArrayBuffer(file)
|
||||
const arrayBuffer = await read_process
|
||||
if (!arrayBuffer) return { prompt: undefined, workflow: undefined }
|
||||
const arrayBuffer = (await read_process) as ArrayBuffer
|
||||
const signature = String.fromCharCode(...new Uint8Array(arrayBuffer, 0, 4))
|
||||
if (signature !== 'OggS') console.error('Invalid file signature.')
|
||||
let oggs = 0
|
||||
let header = ''
|
||||
while (header.length < arrayBuffer.byteLength) {
|
||||
const page = String.fromCharCode(
|
||||
...new Uint8Array(
|
||||
arrayBuffer,
|
||||
header.length,
|
||||
Math.min(4096, arrayBuffer.byteLength - header.length)
|
||||
)
|
||||
...new Uint8Array(arrayBuffer, header.length, header.length + 4096)
|
||||
)
|
||||
if (page.match('OggS\u0000')) oggs++
|
||||
header += page
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import {
|
||||
mockFileReaderAbort,
|
||||
mockFileReaderError
|
||||
} from './__fixtures__/helpers'
|
||||
import { getFromPngBuffer, getFromPngFile } from './png'
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
|
||||
const PNG_SIGNATURE = [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a]
|
||||
import { getFromPngBuffer } from './png'
|
||||
|
||||
function createPngWithChunk(
|
||||
chunkType: string,
|
||||
keyword: string,
|
||||
content: string | Uint8Array,
|
||||
content: string,
|
||||
options: {
|
||||
compressionFlag?: number
|
||||
compressionMethod?: number
|
||||
@@ -28,11 +20,12 @@ function createPngWithChunk(
|
||||
translatedKeyword = ''
|
||||
} = options
|
||||
|
||||
const signature = new Uint8Array(PNG_SIGNATURE)
|
||||
const signature = new Uint8Array([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a
|
||||
])
|
||||
const typeBytes = new TextEncoder().encode(chunkType)
|
||||
const keywordBytes = new TextEncoder().encode(keyword)
|
||||
const contentBytes =
|
||||
content instanceof Uint8Array ? content : new TextEncoder().encode(content)
|
||||
const contentBytes = new TextEncoder().encode(content)
|
||||
|
||||
let chunkData: Uint8Array
|
||||
if (chunkType === 'iTXt') {
|
||||
@@ -73,11 +66,12 @@ function createPngWithChunk(
|
||||
new DataView(lengthBytes.buffer).setUint32(0, chunkData.length, false)
|
||||
|
||||
const crc = new Uint8Array(4)
|
||||
|
||||
const iendType = new TextEncoder().encode('IEND')
|
||||
const iendLength = new Uint8Array(4)
|
||||
const iendCrc = new Uint8Array(4)
|
||||
|
||||
const total = signature.length + (4 + 4 + chunkData.length + 4) + (4 + 4 + 4)
|
||||
const total = signature.length + 4 + 4 + chunkData.length + 4 + 4 + 4 + 0 + 4
|
||||
const result = new Uint8Array(total)
|
||||
|
||||
let offset = 0
|
||||
@@ -144,21 +138,6 @@ describe('getFromPngBuffer', () => {
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
|
||||
it('logs warning and skips iTXt chunk with unsupported compression method', async () => {
|
||||
vi.spyOn(console, 'warn').mockImplementation(() => {})
|
||||
const buffer = createPngWithChunk('iTXt', 'workflow', 'data', {
|
||||
compressionFlag: 1,
|
||||
compressionMethod: 99
|
||||
})
|
||||
|
||||
const result = await getFromPngBuffer(buffer)
|
||||
|
||||
expect(result['workflow']).toBeUndefined()
|
||||
expect(console.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Unsupported compression method 99')
|
||||
)
|
||||
})
|
||||
|
||||
it('parses compressed iTXt chunk', async () => {
|
||||
const workflow = '{"nodes":[{"id":1,"type":"KSampler"}]}'
|
||||
const contentBytes = new TextEncoder().encode(workflow)
|
||||
@@ -184,49 +163,83 @@ describe('getFromPngBuffer', () => {
|
||||
pos += chunk.length
|
||||
}
|
||||
|
||||
const buffer = createPngWithChunk('iTXt', 'workflow', compressedBytes, {
|
||||
compressionFlag: 1,
|
||||
compressionMethod: 0
|
||||
})
|
||||
const buffer = createPngWithCompressedITXt(
|
||||
'workflow',
|
||||
compressedBytes,
|
||||
'',
|
||||
''
|
||||
)
|
||||
const result = await getFromPngBuffer(buffer)
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
})
|
||||
|
||||
describe('getFromPngFile', () => {
|
||||
it('reads metadata from a File object', async () => {
|
||||
const workflow = '{"nodes":[]}'
|
||||
const buffer = createPngWithChunk('tEXt', 'workflow', workflow)
|
||||
const file = new File([buffer], 'test.png', { type: 'image/png' })
|
||||
function createPngWithCompressedITXt(
|
||||
keyword: string,
|
||||
compressedContent: Uint8Array,
|
||||
languageTag: string,
|
||||
translatedKeyword: string
|
||||
): ArrayBuffer {
|
||||
const signature = new Uint8Array([
|
||||
0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a
|
||||
])
|
||||
const typeBytes = new TextEncoder().encode('iTXt')
|
||||
const keywordBytes = new TextEncoder().encode(keyword)
|
||||
const langBytes = new TextEncoder().encode(languageTag)
|
||||
const transBytes = new TextEncoder().encode(translatedKeyword)
|
||||
|
||||
const result = await getFromPngFile(file)
|
||||
const totalLength =
|
||||
keywordBytes.length +
|
||||
1 +
|
||||
2 +
|
||||
langBytes.length +
|
||||
1 +
|
||||
transBytes.length +
|
||||
1 +
|
||||
compressedContent.length
|
||||
|
||||
expect(result['workflow']).toBe(workflow)
|
||||
})
|
||||
const chunkData = new Uint8Array(totalLength)
|
||||
let pos = 0
|
||||
chunkData.set(keywordBytes, pos)
|
||||
pos += keywordBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData[pos++] = 1
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(langBytes, pos)
|
||||
pos += langBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(transBytes, pos)
|
||||
pos += transBytes.length
|
||||
chunkData[pos++] = 0
|
||||
chunkData.set(compressedContent, pos)
|
||||
|
||||
it('returns empty for an invalid PNG File', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new ArrayBuffer(8)], 'bad.png', {
|
||||
type: 'image/png'
|
||||
})
|
||||
const lengthBytes = new Uint8Array(4)
|
||||
new DataView(lengthBytes.buffer).setUint32(0, chunkData.length, false)
|
||||
|
||||
const result = await getFromPngFile(file)
|
||||
const crc = new Uint8Array(4)
|
||||
const iendType = new TextEncoder().encode('IEND')
|
||||
const iendLength = new Uint8Array(4)
|
||||
const iendCrc = new Uint8Array(4)
|
||||
|
||||
expect(result).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid PNG file')
|
||||
})
|
||||
const total = signature.length + 4 + 4 + chunkData.length + 4 + 4 + 4 + 0 + 4
|
||||
const result = new Uint8Array(total)
|
||||
|
||||
describe('FileReader failure modes', () => {
|
||||
const file = new File([new Uint8Array(16)], 'test.png')
|
||||
let offset = 0
|
||||
result.set(signature, offset)
|
||||
offset += signature.length
|
||||
result.set(lengthBytes, offset)
|
||||
offset += 4
|
||||
result.set(typeBytes, offset)
|
||||
offset += 4
|
||||
result.set(chunkData, offset)
|
||||
offset += chunkData.length
|
||||
result.set(crc, offset)
|
||||
offset += 4
|
||||
result.set(iendLength, offset)
|
||||
offset += 4
|
||||
result.set(iendType, offset)
|
||||
offset += 4
|
||||
result.set(iendCrc, offset)
|
||||
|
||||
it('rejects when the FileReader fires error', async () => {
|
||||
mockFileReaderError('readAsArrayBuffer')
|
||||
await expect(getFromPngFile(file)).rejects.toBeDefined()
|
||||
})
|
||||
|
||||
it('rejects when the FileReader fires abort', async () => {
|
||||
mockFileReaderAbort('readAsArrayBuffer')
|
||||
await expect(getFromPngFile(file)).rejects.toThrow('FileReader aborted')
|
||||
})
|
||||
})
|
||||
})
|
||||
return result.buffer
|
||||
}
|
||||
|
||||
@@ -126,7 +126,6 @@ export async function getFromPngFile(
|
||||
resolve(result)
|
||||
}
|
||||
reader.onerror = () => reject(reader.error)
|
||||
reader.onabort = () => reject(new Error('FileReader aborted'))
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
import { describe, expect, it } from 'vitest'
|
||||
|
||||
import { getSvgMetadata } from './svg'
|
||||
|
||||
function svgFile(content: string): File {
|
||||
return new File([content], 'test.svg', { type: 'image/svg+xml' })
|
||||
}
|
||||
|
||||
describe('getSvgMetadata', () => {
|
||||
it('extracts workflow and prompt from CDATA in <metadata>', async () => {
|
||||
const svg = `<svg xmlns="http://www.w3.org/2000/svg">
|
||||
<metadata><![CDATA[${JSON.stringify({
|
||||
workflow: { nodes: [] },
|
||||
prompt: { '1': {} }
|
||||
})}]]></metadata>
|
||||
<rect width="1" height="1"/>
|
||||
</svg>`
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({
|
||||
workflow: { nodes: [] },
|
||||
prompt: { '1': {} }
|
||||
})
|
||||
})
|
||||
|
||||
it('returns empty when SVG has no metadata element', async () => {
|
||||
const svg = '<svg xmlns="http://www.w3.org/2000/svg"><rect/></svg>'
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
|
||||
it('returns empty when CDATA contains invalid JSON', async () => {
|
||||
const svg = `<svg><metadata><![CDATA[not valid json]]></metadata></svg>`
|
||||
|
||||
const result = await getSvgMetadata(svgFile(svg))
|
||||
|
||||
expect(result).toEqual({})
|
||||
})
|
||||
})
|
||||
@@ -1,6 +1,4 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import { afterEach, describe, expect, it, vi } from 'vitest'
|
||||
import { describe, expect, it, vi } from 'vitest'
|
||||
|
||||
import { getFromAvifFile } from './metadata/avif'
|
||||
import { getFromFlacFile } from './metadata/flac'
|
||||
@@ -23,183 +21,67 @@ vi.mock('./metadata/avif', () => ({
|
||||
getFromAvifFile: vi.fn()
|
||||
}))
|
||||
|
||||
afterEach(() => vi.restoreAllMocks())
|
||||
function buildExifPayload(workflowJson: string): Uint8Array {
|
||||
const fullStr = `workflow:${workflowJson}\0`
|
||||
const strBytes = new TextEncoder().encode(fullStr)
|
||||
|
||||
const fixturesDir = path.resolve(__dirname, 'metadata/__fixtures__')
|
||||
|
||||
type AsciiIfdEntry = { tag: number; value: string }
|
||||
|
||||
function encodeAsciiIfd(entries: AsciiIfdEntry[]): Uint8Array {
|
||||
const tableSize = 10 + 12 * entries.length
|
||||
const strings = entries.map((e) => new TextEncoder().encode(`${e.value}\0`))
|
||||
const totalStringBytes = strings.reduce((sum, s) => sum + s.length, 0)
|
||||
|
||||
const buf = new Uint8Array(tableSize + totalStringBytes)
|
||||
const headerSize = 22
|
||||
const buf = new Uint8Array(headerSize + strBytes.length)
|
||||
const dv = new DataView(buf.buffer)
|
||||
|
||||
buf.set([0x49, 0x49], 0)
|
||||
dv.setUint16(2, 0x002a, true)
|
||||
dv.setUint32(4, 8, true)
|
||||
dv.setUint16(8, entries.length, true)
|
||||
|
||||
let stringOffset = tableSize
|
||||
for (let i = 0; i < entries.length; i++) {
|
||||
const entryOffset = 10 + i * 12
|
||||
dv.setUint16(entryOffset, entries[i].tag, true)
|
||||
dv.setUint16(entryOffset + 2, 2, true)
|
||||
dv.setUint32(entryOffset + 4, strings[i].length, true)
|
||||
dv.setUint32(entryOffset + 8, stringOffset, true)
|
||||
buf.set(strings[i], stringOffset)
|
||||
stringOffset += strings[i].length
|
||||
}
|
||||
dv.setUint16(8, 1, true)
|
||||
dv.setUint16(10, 0, true)
|
||||
dv.setUint16(12, 2, true)
|
||||
dv.setUint32(14, strBytes.length, true)
|
||||
dv.setUint32(18, 22, true)
|
||||
buf.set(strBytes, 22)
|
||||
|
||||
return buf
|
||||
}
|
||||
|
||||
type WebpChunk = { type: string; payload: Uint8Array }
|
||||
function buildWebp(precedingChunkLength: number, workflowJson: string): File {
|
||||
const exifPayload = buildExifPayload(workflowJson)
|
||||
const precedingPadded = precedingChunkLength + (precedingChunkLength % 2)
|
||||
const totalSize = 12 + (8 + precedingPadded) + (8 + exifPayload.length)
|
||||
|
||||
function wrapInWebp(chunks: WebpChunk[]): File {
|
||||
let payloadSize = 0
|
||||
for (const c of chunks) {
|
||||
payloadSize += 8 + c.payload.length + (c.payload.length % 2)
|
||||
}
|
||||
const totalSize = 12 + payloadSize
|
||||
const buf = new Uint8Array(totalSize)
|
||||
const dv = new DataView(buf.buffer)
|
||||
const buffer = new Uint8Array(totalSize)
|
||||
const dv = new DataView(buffer.buffer)
|
||||
|
||||
buf.set([0x52, 0x49, 0x46, 0x46], 0)
|
||||
buffer.set([0x52, 0x49, 0x46, 0x46], 0)
|
||||
dv.setUint32(4, totalSize - 8, true)
|
||||
buf.set([0x57, 0x45, 0x42, 0x50], 8)
|
||||
buffer.set([0x57, 0x45, 0x42, 0x50], 8)
|
||||
|
||||
let offset = 12
|
||||
for (const c of chunks) {
|
||||
for (let i = 0; i < 4; i++) {
|
||||
buf[offset + i] = c.type.charCodeAt(i)
|
||||
}
|
||||
dv.setUint32(offset + 4, c.payload.length, true)
|
||||
buf.set(c.payload, offset + 8)
|
||||
offset += 8 + c.payload.length + (c.payload.length % 2)
|
||||
}
|
||||
buffer.set([0x56, 0x50, 0x38, 0x20], 12)
|
||||
dv.setUint32(16, precedingChunkLength, true)
|
||||
|
||||
return new File([buf], 'test.webp', { type: 'image/webp' })
|
||||
}
|
||||
const exifStart = 20 + precedingPadded
|
||||
buffer.set([0x45, 0x58, 0x49, 0x46], exifStart)
|
||||
dv.setUint32(exifStart + 4, exifPayload.length, true)
|
||||
buffer.set(exifPayload, exifStart + 8)
|
||||
|
||||
function exifChunk(
|
||||
entries: AsciiIfdEntry[],
|
||||
options: { withExifPrefix?: boolean } = {}
|
||||
): WebpChunk {
|
||||
const ifd = encodeAsciiIfd(entries)
|
||||
if (!options.withExifPrefix) {
|
||||
return { type: 'EXIF', payload: ifd }
|
||||
}
|
||||
const prefixed = new Uint8Array(6 + ifd.length)
|
||||
prefixed.set(new TextEncoder().encode('Exif\0\0'), 0)
|
||||
prefixed.set(ifd, 6)
|
||||
return { type: 'EXIF', payload: prefixed }
|
||||
return new File([buffer], 'test.webp', { type: 'image/webp' })
|
||||
}
|
||||
|
||||
describe('getWebpMetadata', () => {
|
||||
it('returns empty when the file is not a valid WEBP', async () => {
|
||||
vi.spyOn(console, 'error').mockImplementation(() => {})
|
||||
const file = new File([new Uint8Array(12)], 'fake.webp')
|
||||
it('finds workflow when a preceding chunk has odd length (RIFF padding)', async () => {
|
||||
const workflow = '{"nodes":[]}'
|
||||
const file = buildWebp(3, workflow)
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({})
|
||||
expect(console.error).toHaveBeenCalledWith('Not a valid WEBP file')
|
||||
expect(metadata.workflow).toBe(workflow)
|
||||
})
|
||||
|
||||
it('returns empty when a valid WEBP has no EXIF chunk', async () => {
|
||||
const file = wrapInWebp([
|
||||
{ type: 'VP8 ', payload: new Uint8Array([0, 0, 0, 0]) }
|
||||
])
|
||||
it('finds workflow when preceding chunk has even length (no padding)', async () => {
|
||||
const workflow = '{"nodes":[1]}'
|
||||
const file = buildWebp(4, workflow)
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({})
|
||||
})
|
||||
|
||||
it('extracts workflow and prompt from EXIF without prefix', async () => {
|
||||
const bytes = fs.readFileSync(path.join(fixturesDir, 'with_metadata.webp'))
|
||||
const file = new File([bytes], 'test.webp', { type: 'image/webp' })
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({
|
||||
workflow:
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[100,100],"size":[200,200]}]}',
|
||||
prompt: '{"1":{"class_type":"KSampler","inputs":{}}}'
|
||||
})
|
||||
})
|
||||
|
||||
it('extracts workflow and prompt from EXIF with Exif\\0\\0 prefix', async () => {
|
||||
const bytes = fs.readFileSync(
|
||||
path.join(fixturesDir, 'with_metadata_exif_prefix.webp')
|
||||
)
|
||||
const file = new File([bytes], 'test.webp', { type: 'image/webp' })
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({
|
||||
workflow:
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[100,100],"size":[200,200]}]}',
|
||||
prompt: '{"1":{"class_type":"KSampler","inputs":{}}}'
|
||||
})
|
||||
})
|
||||
|
||||
it('walks past odd-length preceding chunks (RIFF padding)', async () => {
|
||||
const file = wrapInWebp([
|
||||
{ type: 'VP8 ', payload: new Uint8Array(3) },
|
||||
exifChunk([{ tag: 0, value: 'workflow:{"a":1}' }])
|
||||
])
|
||||
|
||||
const metadata = await getWebpMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({ workflow: '{"a":1}' })
|
||||
})
|
||||
})
|
||||
|
||||
describe('getLatentMetadata', () => {
|
||||
function buildSafetensors(headerObj: object): File {
|
||||
const headerBytes = new TextEncoder().encode(JSON.stringify(headerObj))
|
||||
const buf = new Uint8Array(8 + headerBytes.length)
|
||||
const dv = new DataView(buf.buffer)
|
||||
dv.setUint32(0, headerBytes.length, true)
|
||||
dv.setUint32(4, 0, true)
|
||||
buf.set(headerBytes, 8)
|
||||
return new File([buf], 'test.safetensors')
|
||||
}
|
||||
|
||||
it('extracts __metadata__ from a safetensors header', async () => {
|
||||
const workflow =
|
||||
'{"nodes":[{"id":1,"type":"KSampler","pos":[100,100],"size":[200,200]}]}'
|
||||
const prompt = '{"1":{"class_type":"KSampler","inputs":{}}}'
|
||||
const file = buildSafetensors({
|
||||
__metadata__: { workflow, prompt },
|
||||
'tensor.weight': { dtype: 'F32', shape: [1], data_offsets: [0, 4] }
|
||||
})
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toEqual({ workflow, prompt })
|
||||
})
|
||||
|
||||
it('returns undefined when the safetensors header has no __metadata__', async () => {
|
||||
const file = buildSafetensors({
|
||||
'tensor.weight': { dtype: 'F32', shape: [1], data_offsets: [0, 4] }
|
||||
})
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toBeUndefined()
|
||||
})
|
||||
|
||||
it('returns undefined for a truncated or malformed file', async () => {
|
||||
const file = new File([new Uint8Array(4)], 'bad.safetensors')
|
||||
|
||||
const metadata = await getLatentMetadata(file)
|
||||
|
||||
expect(metadata).toBeUndefined()
|
||||
expect(metadata.workflow).toBe(workflow)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -234,3 +116,37 @@ describe('format-specific metadata wrappers', () => {
|
||||
expect(result).toEqual({ workflow: '{"avif":1}' })
|
||||
})
|
||||
})
|
||||
|
||||
const buildSafetensors = (header: Record<string, unknown>): File => {
|
||||
const headerJson = JSON.stringify(header)
|
||||
const headerBytes = new TextEncoder().encode(headerJson)
|
||||
const buf = new ArrayBuffer(8 + headerBytes.length)
|
||||
const dv = new DataView(buf)
|
||||
dv.setUint32(0, headerBytes.length, true)
|
||||
dv.setUint32(4, 0, true)
|
||||
new Uint8Array(buf, 8).set(headerBytes)
|
||||
return new File([buf], 'x.safetensors')
|
||||
}
|
||||
|
||||
describe('getLatentMetadata', () => {
|
||||
it('returns the __metadata__ object from a safetensors header', async () => {
|
||||
const file = buildSafetensors({
|
||||
__metadata__: { workflow: '{"nodes":[]}', extra: 'value' },
|
||||
'tensor.weight': { dtype: 'F32', shape: [1], data_offsets: [0, 4] }
|
||||
})
|
||||
|
||||
const result = await getLatentMetadata(file)
|
||||
|
||||
expect(result).toEqual({ workflow: '{"nodes":[]}', extra: 'value' })
|
||||
})
|
||||
|
||||
it('resolves undefined when header has no __metadata__ entry', async () => {
|
||||
const file = buildSafetensors({
|
||||
'tensor.weight': { dtype: 'F32', shape: [1], data_offsets: [0, 4] }
|
||||
})
|
||||
|
||||
const result = await getLatentMetadata(file)
|
||||
|
||||
expect(result).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
@@ -105,17 +105,14 @@ export function getWebpMetadata(file: File) {
|
||||
...webp.slice(offset, offset + 4)
|
||||
)
|
||||
if (chunk_type === 'EXIF') {
|
||||
let exifOffset = offset + 8
|
||||
let exifLength = chunk_length
|
||||
if (
|
||||
String.fromCharCode(...webp.slice(exifOffset, exifOffset + 6)) ==
|
||||
String.fromCharCode(...webp.slice(offset + 8, offset + 8 + 6)) ==
|
||||
'Exif\0\0'
|
||||
) {
|
||||
exifOffset += 6
|
||||
exifLength -= 6
|
||||
offset += 6
|
||||
}
|
||||
const data = parseExifData(
|
||||
webp.slice(exifOffset, exifOffset + exifLength)
|
||||
let data = parseExifData(
|
||||
webp.slice(offset + 8, offset + 8 + chunk_length)
|
||||
)
|
||||
for (const key in data) {
|
||||
const value = data[Number(key)]
|
||||
@@ -134,38 +131,30 @@ export function getWebpMetadata(file: File) {
|
||||
|
||||
r(txt_chunks)
|
||||
}
|
||||
reader.onerror = () => r({})
|
||||
reader.onabort = () => r({})
|
||||
|
||||
reader.readAsArrayBuffer(file)
|
||||
})
|
||||
}
|
||||
|
||||
export function getLatentMetadata(
|
||||
file: File
|
||||
): Promise<Record<string, string> | undefined> {
|
||||
export function getLatentMetadata(file: File): Promise<Record<string, string>> {
|
||||
return new Promise((r) => {
|
||||
const reader = new FileReader()
|
||||
reader.onload = (event) => {
|
||||
try {
|
||||
const safetensorsData = new Uint8Array(
|
||||
event.target?.result as ArrayBuffer
|
||||
const safetensorsData = new Uint8Array(
|
||||
event.target?.result as ArrayBuffer
|
||||
)
|
||||
const dataView = new DataView(safetensorsData.buffer)
|
||||
let header_size = dataView.getUint32(0, true)
|
||||
let offset = 8
|
||||
let header = JSON.parse(
|
||||
new TextDecoder().decode(
|
||||
safetensorsData.slice(offset, offset + header_size)
|
||||
)
|
||||
const dataView = new DataView(safetensorsData.buffer)
|
||||
const headerSize = dataView.getUint32(0, true)
|
||||
const offset = 8
|
||||
const header = JSON.parse(
|
||||
new TextDecoder().decode(
|
||||
safetensorsData.slice(offset, offset + headerSize)
|
||||
)
|
||||
)
|
||||
r(header.__metadata__)
|
||||
} catch {
|
||||
r(undefined)
|
||||
}
|
||||
)
|
||||
r(header.__metadata__)
|
||||
}
|
||||
reader.onerror = () => r(undefined)
|
||||
reader.onabort = () => r(undefined)
|
||||
const slice = file.slice(0, 1024 * 1024 * 4)
|
||||
|
||||
var slice = file.slice(0, 1024 * 1024 * 4)
|
||||
reader.readAsArrayBuffer(slice)
|
||||
})
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user